1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. dataplex
  5. Datascan
Google Cloud v8.26.0 published on Thursday, Apr 10, 2025 by Pulumi

gcp.dataplex.Datascan

Explore with Pulumi AI

Represents a user-visible job which provides the insights for the related data source.

To get more information about Datascan, see:

Example Usage

Dataplex Datascan Basic Profile

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const basicProfile = new gcp.dataplex.Datascan("basic_profile", {
    location: "us-central1",
    dataScanId: "dataprofile-basic",
    data: {
        resource: "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
    },
    executionSpec: {
        trigger: {
            onDemand: {},
        },
    },
    dataProfileSpec: {},
    project: "my-project-name",
});
Copy
import pulumi
import pulumi_gcp as gcp

basic_profile = gcp.dataplex.Datascan("basic_profile",
    location="us-central1",
    data_scan_id="dataprofile-basic",
    data={
        "resource": "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
    },
    execution_spec={
        "trigger": {
            "on_demand": {},
        },
    },
    data_profile_spec={},
    project="my-project-name")
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataplex.NewDatascan(ctx, "basic_profile", &dataplex.DatascanArgs{
			Location:   pulumi.String("us-central1"),
			DataScanId: pulumi.String("dataprofile-basic"),
			Data: &dataplex.DatascanDataArgs{
				Resource: pulumi.String("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare"),
			},
			ExecutionSpec: &dataplex.DatascanExecutionSpecArgs{
				Trigger: &dataplex.DatascanExecutionSpecTriggerArgs{
					OnDemand: &dataplex.DatascanExecutionSpecTriggerOnDemandArgs{},
				},
			},
			DataProfileSpec: &dataplex.DatascanDataProfileSpecArgs{},
			Project:         pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var basicProfile = new Gcp.DataPlex.Datascan("basic_profile", new()
    {
        Location = "us-central1",
        DataScanId = "dataprofile-basic",
        Data = new Gcp.DataPlex.Inputs.DatascanDataArgs
        {
            Resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
        },
        ExecutionSpec = new Gcp.DataPlex.Inputs.DatascanExecutionSpecArgs
        {
            Trigger = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerArgs
            {
                OnDemand = null,
            },
        },
        DataProfileSpec = null,
        Project = "my-project-name",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataplex.Datascan;
import com.pulumi.gcp.dataplex.DatascanArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerOnDemandArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataProfileSpecArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var basicProfile = new Datascan("basicProfile", DatascanArgs.builder()
            .location("us-central1")
            .dataScanId("dataprofile-basic")
            .data(DatascanDataArgs.builder()
                .resource("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare")
                .build())
            .executionSpec(DatascanExecutionSpecArgs.builder()
                .trigger(DatascanExecutionSpecTriggerArgs.builder()
                    .onDemand(DatascanExecutionSpecTriggerOnDemandArgs.builder()
                        .build())
                    .build())
                .build())
            .dataProfileSpec(DatascanDataProfileSpecArgs.builder()
                .build())
            .project("my-project-name")
            .build());

    }
}
Copy
resources:
  basicProfile:
    type: gcp:dataplex:Datascan
    name: basic_profile
    properties:
      location: us-central1
      dataScanId: dataprofile-basic
      data:
        resource: //bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare
      executionSpec:
        trigger:
          onDemand: {}
      dataProfileSpec: {}
      project: my-project-name
Copy

Dataplex Datascan Full Profile

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const source = new gcp.bigquery.Dataset("source", {
    datasetId: "dataplex_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "US",
    deleteContentsOnDestroy: true,
});
const fullProfile = new gcp.dataplex.Datascan("full_profile", {
    location: "us-central1",
    displayName: "Full Datascan Profile",
    dataScanId: "dataprofile-full",
    description: "Example resource - Full Datascan Profile",
    labels: {
        author: "billing",
    },
    data: {
        resource: "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
    },
    executionSpec: {
        trigger: {
            schedule: {
                cron: "TZ=America/New_York 1 1 * * *",
            },
        },
    },
    dataProfileSpec: {
        samplingPercent: 80,
        rowFilter: "word_count > 10",
        includeFields: {
            fieldNames: ["word_count"],
        },
        excludeFields: {
            fieldNames: ["property_type"],
        },
        postScanActions: {
            bigqueryExport: {
                resultsTable: "//bigquery.googleapis.com/projects/my-project-name/datasets/dataplex_dataset/tables/profile_export",
            },
        },
    },
    project: "my-project-name",
}, {
    dependsOn: [source],
});
Copy
import pulumi
import pulumi_gcp as gcp

source = gcp.bigquery.Dataset("source",
    dataset_id="dataplex_dataset",
    friendly_name="test",
    description="This is a test description",
    location="US",
    delete_contents_on_destroy=True)
full_profile = gcp.dataplex.Datascan("full_profile",
    location="us-central1",
    display_name="Full Datascan Profile",
    data_scan_id="dataprofile-full",
    description="Example resource - Full Datascan Profile",
    labels={
        "author": "billing",
    },
    data={
        "resource": "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
    },
    execution_spec={
        "trigger": {
            "schedule": {
                "cron": "TZ=America/New_York 1 1 * * *",
            },
        },
    },
    data_profile_spec={
        "sampling_percent": 80,
        "row_filter": "word_count > 10",
        "include_fields": {
            "field_names": ["word_count"],
        },
        "exclude_fields": {
            "field_names": ["property_type"],
        },
        "post_scan_actions": {
            "bigquery_export": {
                "results_table": "//bigquery.googleapis.com/projects/my-project-name/datasets/dataplex_dataset/tables/profile_export",
            },
        },
    },
    project="my-project-name",
    opts = pulumi.ResourceOptions(depends_on=[source]))
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		source, err := bigquery.NewDataset(ctx, "source", &bigquery.DatasetArgs{
			DatasetId:               pulumi.String("dataplex_dataset"),
			FriendlyName:            pulumi.String("test"),
			Description:             pulumi.String("This is a test description"),
			Location:                pulumi.String("US"),
			DeleteContentsOnDestroy: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		_, err = dataplex.NewDatascan(ctx, "full_profile", &dataplex.DatascanArgs{
			Location:    pulumi.String("us-central1"),
			DisplayName: pulumi.String("Full Datascan Profile"),
			DataScanId:  pulumi.String("dataprofile-full"),
			Description: pulumi.String("Example resource - Full Datascan Profile"),
			Labels: pulumi.StringMap{
				"author": pulumi.String("billing"),
			},
			Data: &dataplex.DatascanDataArgs{
				Resource: pulumi.String("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare"),
			},
			ExecutionSpec: &dataplex.DatascanExecutionSpecArgs{
				Trigger: &dataplex.DatascanExecutionSpecTriggerArgs{
					Schedule: &dataplex.DatascanExecutionSpecTriggerScheduleArgs{
						Cron: pulumi.String("TZ=America/New_York 1 1 * * *"),
					},
				},
			},
			DataProfileSpec: &dataplex.DatascanDataProfileSpecArgs{
				SamplingPercent: pulumi.Float64(80),
				RowFilter:       pulumi.String("word_count > 10"),
				IncludeFields: &dataplex.DatascanDataProfileSpecIncludeFieldsArgs{
					FieldNames: pulumi.StringArray{
						pulumi.String("word_count"),
					},
				},
				ExcludeFields: &dataplex.DatascanDataProfileSpecExcludeFieldsArgs{
					FieldNames: pulumi.StringArray{
						pulumi.String("property_type"),
					},
				},
				PostScanActions: &dataplex.DatascanDataProfileSpecPostScanActionsArgs{
					BigqueryExport: &dataplex.DatascanDataProfileSpecPostScanActionsBigqueryExportArgs{
						ResultsTable: pulumi.String("//bigquery.googleapis.com/projects/my-project-name/datasets/dataplex_dataset/tables/profile_export"),
					},
				},
			},
			Project: pulumi.String("my-project-name"),
		}, pulumi.DependsOn([]pulumi.Resource{
			source,
		}))
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var source = new Gcp.BigQuery.Dataset("source", new()
    {
        DatasetId = "dataplex_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "US",
        DeleteContentsOnDestroy = true,
    });

    var fullProfile = new Gcp.DataPlex.Datascan("full_profile", new()
    {
        Location = "us-central1",
        DisplayName = "Full Datascan Profile",
        DataScanId = "dataprofile-full",
        Description = "Example resource - Full Datascan Profile",
        Labels = 
        {
            { "author", "billing" },
        },
        Data = new Gcp.DataPlex.Inputs.DatascanDataArgs
        {
            Resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
        },
        ExecutionSpec = new Gcp.DataPlex.Inputs.DatascanExecutionSpecArgs
        {
            Trigger = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerArgs
            {
                Schedule = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerScheduleArgs
                {
                    Cron = "TZ=America/New_York 1 1 * * *",
                },
            },
        },
        DataProfileSpec = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecArgs
        {
            SamplingPercent = 80,
            RowFilter = "word_count > 10",
            IncludeFields = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecIncludeFieldsArgs
            {
                FieldNames = new[]
                {
                    "word_count",
                },
            },
            ExcludeFields = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecExcludeFieldsArgs
            {
                FieldNames = new[]
                {
                    "property_type",
                },
            },
            PostScanActions = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecPostScanActionsArgs
            {
                BigqueryExport = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecPostScanActionsBigqueryExportArgs
                {
                    ResultsTable = "//bigquery.googleapis.com/projects/my-project-name/datasets/dataplex_dataset/tables/profile_export",
                },
            },
        },
        Project = "my-project-name",
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            source,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.dataplex.Datascan;
import com.pulumi.gcp.dataplex.DatascanArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerScheduleArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataProfileSpecArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataProfileSpecIncludeFieldsArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataProfileSpecExcludeFieldsArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataProfileSpecPostScanActionsArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataProfileSpecPostScanActionsBigqueryExportArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var source = new Dataset("source", DatasetArgs.builder()
            .datasetId("dataplex_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("US")
            .deleteContentsOnDestroy(true)
            .build());

        var fullProfile = new Datascan("fullProfile", DatascanArgs.builder()
            .location("us-central1")
            .displayName("Full Datascan Profile")
            .dataScanId("dataprofile-full")
            .description("Example resource - Full Datascan Profile")
            .labels(Map.of("author", "billing"))
            .data(DatascanDataArgs.builder()
                .resource("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare")
                .build())
            .executionSpec(DatascanExecutionSpecArgs.builder()
                .trigger(DatascanExecutionSpecTriggerArgs.builder()
                    .schedule(DatascanExecutionSpecTriggerScheduleArgs.builder()
                        .cron("TZ=America/New_York 1 1 * * *")
                        .build())
                    .build())
                .build())
            .dataProfileSpec(DatascanDataProfileSpecArgs.builder()
                .samplingPercent(80.0)
                .rowFilter("word_count > 10")
                .includeFields(DatascanDataProfileSpecIncludeFieldsArgs.builder()
                    .fieldNames("word_count")
                    .build())
                .excludeFields(DatascanDataProfileSpecExcludeFieldsArgs.builder()
                    .fieldNames("property_type")
                    .build())
                .postScanActions(DatascanDataProfileSpecPostScanActionsArgs.builder()
                    .bigqueryExport(DatascanDataProfileSpecPostScanActionsBigqueryExportArgs.builder()
                        .resultsTable("//bigquery.googleapis.com/projects/my-project-name/datasets/dataplex_dataset/tables/profile_export")
                        .build())
                    .build())
                .build())
            .project("my-project-name")
            .build(), CustomResourceOptions.builder()
                .dependsOn(source)
                .build());

    }
}
Copy
resources:
  fullProfile:
    type: gcp:dataplex:Datascan
    name: full_profile
    properties:
      location: us-central1
      displayName: Full Datascan Profile
      dataScanId: dataprofile-full
      description: Example resource - Full Datascan Profile
      labels:
        author: billing
      data:
        resource: //bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare
      executionSpec:
        trigger:
          schedule:
            cron: TZ=America/New_York 1 1 * * *
      dataProfileSpec:
        samplingPercent: 80
        rowFilter: word_count > 10
        includeFields:
          fieldNames:
            - word_count
        excludeFields:
          fieldNames:
            - property_type
        postScanActions:
          bigqueryExport:
            resultsTable: //bigquery.googleapis.com/projects/my-project-name/datasets/dataplex_dataset/tables/profile_export
      project: my-project-name
    options:
      dependsOn:
        - ${source}
  source:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: dataplex_dataset
      friendlyName: test
      description: This is a test description
      location: US
      deleteContentsOnDestroy: true
Copy

Dataplex Datascan Basic Quality

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const basicQuality = new gcp.dataplex.Datascan("basic_quality", {
    location: "us-central1",
    dataScanId: "dataquality-basic",
    data: {
        resource: "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
    },
    executionSpec: {
        trigger: {
            onDemand: {},
        },
    },
    dataQualitySpec: {
        rules: [{
            dimension: "VALIDITY",
            name: "rule1",
            description: "rule 1 for validity dimension",
            tableConditionExpectation: {
                sqlExpression: "COUNT(*) > 0",
            },
        }],
    },
    project: "my-project-name",
});
Copy
import pulumi
import pulumi_gcp as gcp

basic_quality = gcp.dataplex.Datascan("basic_quality",
    location="us-central1",
    data_scan_id="dataquality-basic",
    data={
        "resource": "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
    },
    execution_spec={
        "trigger": {
            "on_demand": {},
        },
    },
    data_quality_spec={
        "rules": [{
            "dimension": "VALIDITY",
            "name": "rule1",
            "description": "rule 1 for validity dimension",
            "table_condition_expectation": {
                "sql_expression": "COUNT(*) > 0",
            },
        }],
    },
    project="my-project-name")
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataplex.NewDatascan(ctx, "basic_quality", &dataplex.DatascanArgs{
			Location:   pulumi.String("us-central1"),
			DataScanId: pulumi.String("dataquality-basic"),
			Data: &dataplex.DatascanDataArgs{
				Resource: pulumi.String("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare"),
			},
			ExecutionSpec: &dataplex.DatascanExecutionSpecArgs{
				Trigger: &dataplex.DatascanExecutionSpecTriggerArgs{
					OnDemand: &dataplex.DatascanExecutionSpecTriggerOnDemandArgs{},
				},
			},
			DataQualitySpec: &dataplex.DatascanDataQualitySpecArgs{
				Rules: dataplex.DatascanDataQualitySpecRuleArray{
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Dimension:   pulumi.String("VALIDITY"),
						Name:        pulumi.String("rule1"),
						Description: pulumi.String("rule 1 for validity dimension"),
						TableConditionExpectation: &dataplex.DatascanDataQualitySpecRuleTableConditionExpectationArgs{
							SqlExpression: pulumi.String("COUNT(*) > 0"),
						},
					},
				},
			},
			Project: pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var basicQuality = new Gcp.DataPlex.Datascan("basic_quality", new()
    {
        Location = "us-central1",
        DataScanId = "dataquality-basic",
        Data = new Gcp.DataPlex.Inputs.DatascanDataArgs
        {
            Resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare",
        },
        ExecutionSpec = new Gcp.DataPlex.Inputs.DatascanExecutionSpecArgs
        {
            Trigger = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerArgs
            {
                OnDemand = null,
            },
        },
        DataQualitySpec = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecArgs
        {
            Rules = new[]
            {
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Dimension = "VALIDITY",
                    Name = "rule1",
                    Description = "rule 1 for validity dimension",
                    TableConditionExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleTableConditionExpectationArgs
                    {
                        SqlExpression = "COUNT(*) > 0",
                    },
                },
            },
        },
        Project = "my-project-name",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataplex.Datascan;
import com.pulumi.gcp.dataplex.DatascanArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerOnDemandArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataQualitySpecArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var basicQuality = new Datascan("basicQuality", DatascanArgs.builder()
            .location("us-central1")
            .dataScanId("dataquality-basic")
            .data(DatascanDataArgs.builder()
                .resource("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare")
                .build())
            .executionSpec(DatascanExecutionSpecArgs.builder()
                .trigger(DatascanExecutionSpecTriggerArgs.builder()
                    .onDemand(DatascanExecutionSpecTriggerOnDemandArgs.builder()
                        .build())
                    .build())
                .build())
            .dataQualitySpec(DatascanDataQualitySpecArgs.builder()
                .rules(DatascanDataQualitySpecRuleArgs.builder()
                    .dimension("VALIDITY")
                    .name("rule1")
                    .description("rule 1 for validity dimension")
                    .tableConditionExpectation(DatascanDataQualitySpecRuleTableConditionExpectationArgs.builder()
                        .sqlExpression("COUNT(*) > 0")
                        .build())
                    .build())
                .build())
            .project("my-project-name")
            .build());

    }
}
Copy
resources:
  basicQuality:
    type: gcp:dataplex:Datascan
    name: basic_quality
    properties:
      location: us-central1
      dataScanId: dataquality-basic
      data:
        resource: //bigquery.googleapis.com/projects/bigquery-public-data/datasets/samples/tables/shakespeare
      executionSpec:
        trigger:
          onDemand: {}
      dataQualitySpec:
        rules:
          - dimension: VALIDITY
            name: rule1
            description: rule 1 for validity dimension
            tableConditionExpectation:
              sqlExpression: COUNT(*) > 0
      project: my-project-name
Copy

Dataplex Datascan Full Quality

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const fullQuality = new gcp.dataplex.Datascan("full_quality", {
    location: "us-central1",
    displayName: "Full Datascan Quality",
    dataScanId: "dataquality-full",
    description: "Example resource - Full Datascan Quality",
    labels: {
        author: "billing",
    },
    data: {
        resource: "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations",
    },
    executionSpec: {
        trigger: {
            schedule: {
                cron: "TZ=America/New_York 1 1 * * *",
            },
        },
        field: "modified_date",
    },
    dataQualitySpec: {
        samplingPercent: 5,
        rowFilter: "station_id > 1000",
        rules: [
            {
                column: "address",
                dimension: "VALIDITY",
                threshold: 0.99,
                nonNullExpectation: {},
            },
            {
                column: "council_district",
                dimension: "VALIDITY",
                ignoreNull: true,
                threshold: 0.9,
                rangeExpectation: {
                    minValue: "1",
                    maxValue: "10",
                    strictMinEnabled: true,
                    strictMaxEnabled: false,
                },
            },
            {
                column: "power_type",
                dimension: "VALIDITY",
                ignoreNull: false,
                regexExpectation: {
                    regex: ".*solar.*",
                },
            },
            {
                column: "property_type",
                dimension: "VALIDITY",
                ignoreNull: false,
                setExpectation: {
                    values: [
                        "sidewalk",
                        "parkland",
                    ],
                },
            },
            {
                column: "address",
                dimension: "UNIQUENESS",
                uniquenessExpectation: {},
            },
            {
                column: "number_of_docks",
                dimension: "VALIDITY",
                statisticRangeExpectation: {
                    statistic: "MEAN",
                    minValue: "5",
                    maxValue: "15",
                    strictMinEnabled: true,
                    strictMaxEnabled: true,
                },
            },
            {
                column: "footprint_length",
                dimension: "VALIDITY",
                rowConditionExpectation: {
                    sqlExpression: "footprint_length > 0 AND footprint_length <= 10",
                },
            },
            {
                dimension: "VALIDITY",
                tableConditionExpectation: {
                    sqlExpression: "COUNT(*) > 0",
                },
            },
            {
                dimension: "VALIDITY",
                sqlAssertion: {
                    sqlStatement: "select * from bigquery-public-data.austin_bikeshare.bikeshare_stations where station_id is null",
                },
            },
        ],
    },
    project: "my-project-name",
});
Copy
import pulumi
import pulumi_gcp as gcp

full_quality = gcp.dataplex.Datascan("full_quality",
    location="us-central1",
    display_name="Full Datascan Quality",
    data_scan_id="dataquality-full",
    description="Example resource - Full Datascan Quality",
    labels={
        "author": "billing",
    },
    data={
        "resource": "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations",
    },
    execution_spec={
        "trigger": {
            "schedule": {
                "cron": "TZ=America/New_York 1 1 * * *",
            },
        },
        "field": "modified_date",
    },
    data_quality_spec={
        "sampling_percent": 5,
        "row_filter": "station_id > 1000",
        "rules": [
            {
                "column": "address",
                "dimension": "VALIDITY",
                "threshold": 0.99,
                "non_null_expectation": {},
            },
            {
                "column": "council_district",
                "dimension": "VALIDITY",
                "ignore_null": True,
                "threshold": 0.9,
                "range_expectation": {
                    "min_value": "1",
                    "max_value": "10",
                    "strict_min_enabled": True,
                    "strict_max_enabled": False,
                },
            },
            {
                "column": "power_type",
                "dimension": "VALIDITY",
                "ignore_null": False,
                "regex_expectation": {
                    "regex": ".*solar.*",
                },
            },
            {
                "column": "property_type",
                "dimension": "VALIDITY",
                "ignore_null": False,
                "set_expectation": {
                    "values": [
                        "sidewalk",
                        "parkland",
                    ],
                },
            },
            {
                "column": "address",
                "dimension": "UNIQUENESS",
                "uniqueness_expectation": {},
            },
            {
                "column": "number_of_docks",
                "dimension": "VALIDITY",
                "statistic_range_expectation": {
                    "statistic": "MEAN",
                    "min_value": "5",
                    "max_value": "15",
                    "strict_min_enabled": True,
                    "strict_max_enabled": True,
                },
            },
            {
                "column": "footprint_length",
                "dimension": "VALIDITY",
                "row_condition_expectation": {
                    "sql_expression": "footprint_length > 0 AND footprint_length <= 10",
                },
            },
            {
                "dimension": "VALIDITY",
                "table_condition_expectation": {
                    "sql_expression": "COUNT(*) > 0",
                },
            },
            {
                "dimension": "VALIDITY",
                "sql_assertion": {
                    "sql_statement": "select * from bigquery-public-data.austin_bikeshare.bikeshare_stations where station_id is null",
                },
            },
        ],
    },
    project="my-project-name")
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataplex"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataplex.NewDatascan(ctx, "full_quality", &dataplex.DatascanArgs{
			Location:    pulumi.String("us-central1"),
			DisplayName: pulumi.String("Full Datascan Quality"),
			DataScanId:  pulumi.String("dataquality-full"),
			Description: pulumi.String("Example resource - Full Datascan Quality"),
			Labels: pulumi.StringMap{
				"author": pulumi.String("billing"),
			},
			Data: &dataplex.DatascanDataArgs{
				Resource: pulumi.String("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations"),
			},
			ExecutionSpec: &dataplex.DatascanExecutionSpecArgs{
				Trigger: &dataplex.DatascanExecutionSpecTriggerArgs{
					Schedule: &dataplex.DatascanExecutionSpecTriggerScheduleArgs{
						Cron: pulumi.String("TZ=America/New_York 1 1 * * *"),
					},
				},
				Field: pulumi.String("modified_date"),
			},
			DataQualitySpec: &dataplex.DatascanDataQualitySpecArgs{
				SamplingPercent: pulumi.Float64(5),
				RowFilter:       pulumi.String("station_id > 1000"),
				Rules: dataplex.DatascanDataQualitySpecRuleArray{
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:             pulumi.String("address"),
						Dimension:          pulumi.String("VALIDITY"),
						Threshold:          pulumi.Float64(0.99),
						NonNullExpectation: &dataplex.DatascanDataQualitySpecRuleNonNullExpectationArgs{},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:     pulumi.String("council_district"),
						Dimension:  pulumi.String("VALIDITY"),
						IgnoreNull: pulumi.Bool(true),
						Threshold:  pulumi.Float64(0.9),
						RangeExpectation: &dataplex.DatascanDataQualitySpecRuleRangeExpectationArgs{
							MinValue:         pulumi.String("1"),
							MaxValue:         pulumi.String("10"),
							StrictMinEnabled: pulumi.Bool(true),
							StrictMaxEnabled: pulumi.Bool(false),
						},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:     pulumi.String("power_type"),
						Dimension:  pulumi.String("VALIDITY"),
						IgnoreNull: pulumi.Bool(false),
						RegexExpectation: &dataplex.DatascanDataQualitySpecRuleRegexExpectationArgs{
							Regex: pulumi.String(".*solar.*"),
						},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:     pulumi.String("property_type"),
						Dimension:  pulumi.String("VALIDITY"),
						IgnoreNull: pulumi.Bool(false),
						SetExpectation: &dataplex.DatascanDataQualitySpecRuleSetExpectationArgs{
							Values: pulumi.StringArray{
								pulumi.String("sidewalk"),
								pulumi.String("parkland"),
							},
						},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:                pulumi.String("address"),
						Dimension:             pulumi.String("UNIQUENESS"),
						UniquenessExpectation: &dataplex.DatascanDataQualitySpecRuleUniquenessExpectationArgs{},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:    pulumi.String("number_of_docks"),
						Dimension: pulumi.String("VALIDITY"),
						StatisticRangeExpectation: &dataplex.DatascanDataQualitySpecRuleStatisticRangeExpectationArgs{
							Statistic:        pulumi.String("MEAN"),
							MinValue:         pulumi.String("5"),
							MaxValue:         pulumi.String("15"),
							StrictMinEnabled: pulumi.Bool(true),
							StrictMaxEnabled: pulumi.Bool(true),
						},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Column:    pulumi.String("footprint_length"),
						Dimension: pulumi.String("VALIDITY"),
						RowConditionExpectation: &dataplex.DatascanDataQualitySpecRuleRowConditionExpectationArgs{
							SqlExpression: pulumi.String("footprint_length > 0 AND footprint_length <= 10"),
						},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Dimension: pulumi.String("VALIDITY"),
						TableConditionExpectation: &dataplex.DatascanDataQualitySpecRuleTableConditionExpectationArgs{
							SqlExpression: pulumi.String("COUNT(*) > 0"),
						},
					},
					&dataplex.DatascanDataQualitySpecRuleArgs{
						Dimension: pulumi.String("VALIDITY"),
						SqlAssertion: &dataplex.DatascanDataQualitySpecRuleSqlAssertionArgs{
							SqlStatement: pulumi.String("select * from bigquery-public-data.austin_bikeshare.bikeshare_stations where station_id is null"),
						},
					},
				},
			},
			Project: pulumi.String("my-project-name"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var fullQuality = new Gcp.DataPlex.Datascan("full_quality", new()
    {
        Location = "us-central1",
        DisplayName = "Full Datascan Quality",
        DataScanId = "dataquality-full",
        Description = "Example resource - Full Datascan Quality",
        Labels = 
        {
            { "author", "billing" },
        },
        Data = new Gcp.DataPlex.Inputs.DatascanDataArgs
        {
            Resource = "//bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations",
        },
        ExecutionSpec = new Gcp.DataPlex.Inputs.DatascanExecutionSpecArgs
        {
            Trigger = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerArgs
            {
                Schedule = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerScheduleArgs
                {
                    Cron = "TZ=America/New_York 1 1 * * *",
                },
            },
            Field = "modified_date",
        },
        DataQualitySpec = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecArgs
        {
            SamplingPercent = 5,
            RowFilter = "station_id > 1000",
            Rules = new[]
            {
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "address",
                    Dimension = "VALIDITY",
                    Threshold = 0.99,
                    NonNullExpectation = null,
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "council_district",
                    Dimension = "VALIDITY",
                    IgnoreNull = true,
                    Threshold = 0.9,
                    RangeExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleRangeExpectationArgs
                    {
                        MinValue = "1",
                        MaxValue = "10",
                        StrictMinEnabled = true,
                        StrictMaxEnabled = false,
                    },
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "power_type",
                    Dimension = "VALIDITY",
                    IgnoreNull = false,
                    RegexExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleRegexExpectationArgs
                    {
                        Regex = ".*solar.*",
                    },
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "property_type",
                    Dimension = "VALIDITY",
                    IgnoreNull = false,
                    SetExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleSetExpectationArgs
                    {
                        Values = new[]
                        {
                            "sidewalk",
                            "parkland",
                        },
                    },
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "address",
                    Dimension = "UNIQUENESS",
                    UniquenessExpectation = null,
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "number_of_docks",
                    Dimension = "VALIDITY",
                    StatisticRangeExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleStatisticRangeExpectationArgs
                    {
                        Statistic = "MEAN",
                        MinValue = "5",
                        MaxValue = "15",
                        StrictMinEnabled = true,
                        StrictMaxEnabled = true,
                    },
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Column = "footprint_length",
                    Dimension = "VALIDITY",
                    RowConditionExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleRowConditionExpectationArgs
                    {
                        SqlExpression = "footprint_length > 0 AND footprint_length <= 10",
                    },
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Dimension = "VALIDITY",
                    TableConditionExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleTableConditionExpectationArgs
                    {
                        SqlExpression = "COUNT(*) > 0",
                    },
                },
                new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
                {
                    Dimension = "VALIDITY",
                    SqlAssertion = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleSqlAssertionArgs
                    {
                        SqlStatement = "select * from bigquery-public-data.austin_bikeshare.bikeshare_stations where station_id is null",
                    },
                },
            },
        },
        Project = "my-project-name",
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataplex.Datascan;
import com.pulumi.gcp.dataplex.DatascanArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanExecutionSpecTriggerScheduleArgs;
import com.pulumi.gcp.dataplex.inputs.DatascanDataQualitySpecArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var fullQuality = new Datascan("fullQuality", DatascanArgs.builder()
            .location("us-central1")
            .displayName("Full Datascan Quality")
            .dataScanId("dataquality-full")
            .description("Example resource - Full Datascan Quality")
            .labels(Map.of("author", "billing"))
            .data(DatascanDataArgs.builder()
                .resource("//bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations")
                .build())
            .executionSpec(DatascanExecutionSpecArgs.builder()
                .trigger(DatascanExecutionSpecTriggerArgs.builder()
                    .schedule(DatascanExecutionSpecTriggerScheduleArgs.builder()
                        .cron("TZ=America/New_York 1 1 * * *")
                        .build())
                    .build())
                .field("modified_date")
                .build())
            .dataQualitySpec(DatascanDataQualitySpecArgs.builder()
                .samplingPercent(5.0)
                .rowFilter("station_id > 1000")
                .rules(                
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("address")
                        .dimension("VALIDITY")
                        .threshold(0.99)
                        .nonNullExpectation(DatascanDataQualitySpecRuleNonNullExpectationArgs.builder()
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("council_district")
                        .dimension("VALIDITY")
                        .ignoreNull(true)
                        .threshold(0.9)
                        .rangeExpectation(DatascanDataQualitySpecRuleRangeExpectationArgs.builder()
                            .minValue("1")
                            .maxValue("10")
                            .strictMinEnabled(true)
                            .strictMaxEnabled(false)
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("power_type")
                        .dimension("VALIDITY")
                        .ignoreNull(false)
                        .regexExpectation(DatascanDataQualitySpecRuleRegexExpectationArgs.builder()
                            .regex(".*solar.*")
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("property_type")
                        .dimension("VALIDITY")
                        .ignoreNull(false)
                        .setExpectation(DatascanDataQualitySpecRuleSetExpectationArgs.builder()
                            .values(                            
                                "sidewalk",
                                "parkland")
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("address")
                        .dimension("UNIQUENESS")
                        .uniquenessExpectation(DatascanDataQualitySpecRuleUniquenessExpectationArgs.builder()
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("number_of_docks")
                        .dimension("VALIDITY")
                        .statisticRangeExpectation(DatascanDataQualitySpecRuleStatisticRangeExpectationArgs.builder()
                            .statistic("MEAN")
                            .minValue("5")
                            .maxValue("15")
                            .strictMinEnabled(true)
                            .strictMaxEnabled(true)
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .column("footprint_length")
                        .dimension("VALIDITY")
                        .rowConditionExpectation(DatascanDataQualitySpecRuleRowConditionExpectationArgs.builder()
                            .sqlExpression("footprint_length > 0 AND footprint_length <= 10")
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .dimension("VALIDITY")
                        .tableConditionExpectation(DatascanDataQualitySpecRuleTableConditionExpectationArgs.builder()
                            .sqlExpression("COUNT(*) > 0")
                            .build())
                        .build(),
                    DatascanDataQualitySpecRuleArgs.builder()
                        .dimension("VALIDITY")
                        .sqlAssertion(DatascanDataQualitySpecRuleSqlAssertionArgs.builder()
                            .sqlStatement("select * from bigquery-public-data.austin_bikeshare.bikeshare_stations where station_id is null")
                            .build())
                        .build())
                .build())
            .project("my-project-name")
            .build());

    }
}
Copy
resources:
  fullQuality:
    type: gcp:dataplex:Datascan
    name: full_quality
    properties:
      location: us-central1
      displayName: Full Datascan Quality
      dataScanId: dataquality-full
      description: Example resource - Full Datascan Quality
      labels:
        author: billing
      data:
        resource: //bigquery.googleapis.com/projects/bigquery-public-data/datasets/austin_bikeshare/tables/bikeshare_stations
      executionSpec:
        trigger:
          schedule:
            cron: TZ=America/New_York 1 1 * * *
        field: modified_date
      dataQualitySpec:
        samplingPercent: 5
        rowFilter: station_id > 1000
        rules:
          - column: address
            dimension: VALIDITY
            threshold: 0.99
            nonNullExpectation: {}
          - column: council_district
            dimension: VALIDITY
            ignoreNull: true
            threshold: 0.9
            rangeExpectation:
              minValue: 1
              maxValue: 10
              strictMinEnabled: true
              strictMaxEnabled: false
          - column: power_type
            dimension: VALIDITY
            ignoreNull: false
            regexExpectation:
              regex: .*solar.*
          - column: property_type
            dimension: VALIDITY
            ignoreNull: false
            setExpectation:
              values:
                - sidewalk
                - parkland
          - column: address
            dimension: UNIQUENESS
            uniquenessExpectation: {}
          - column: number_of_docks
            dimension: VALIDITY
            statisticRangeExpectation:
              statistic: MEAN
              minValue: 5
              maxValue: 15
              strictMinEnabled: true
              strictMaxEnabled: true
          - column: footprint_length
            dimension: VALIDITY
            rowConditionExpectation:
              sqlExpression: footprint_length > 0 AND footprint_length <= 10
          - dimension: VALIDITY
            tableConditionExpectation:
              sqlExpression: COUNT(*) > 0
          - dimension: VALIDITY
            sqlAssertion:
              sqlStatement: select * from bigquery-public-data.austin_bikeshare.bikeshare_stations where station_id is null
      project: my-project-name
Copy

Create Datascan Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Datascan(name: string, args: DatascanArgs, opts?: CustomResourceOptions);
@overload
def Datascan(resource_name: str,
             args: DatascanArgs,
             opts: Optional[ResourceOptions] = None)

@overload
def Datascan(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             data: Optional[DatascanDataArgs] = None,
             data_scan_id: Optional[str] = None,
             execution_spec: Optional[DatascanExecutionSpecArgs] = None,
             location: Optional[str] = None,
             data_profile_spec: Optional[DatascanDataProfileSpecArgs] = None,
             data_quality_spec: Optional[DatascanDataQualitySpecArgs] = None,
             description: Optional[str] = None,
             display_name: Optional[str] = None,
             labels: Optional[Mapping[str, str]] = None,
             project: Optional[str] = None)
func NewDatascan(ctx *Context, name string, args DatascanArgs, opts ...ResourceOption) (*Datascan, error)
public Datascan(string name, DatascanArgs args, CustomResourceOptions? opts = null)
public Datascan(String name, DatascanArgs args)
public Datascan(String name, DatascanArgs args, CustomResourceOptions options)
type: gcp:dataplex:Datascan
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. DatascanArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. DatascanArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. DatascanArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. DatascanArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. DatascanArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var datascanResource = new Gcp.DataPlex.Datascan("datascanResource", new()
{
    Data = new Gcp.DataPlex.Inputs.DatascanDataArgs
    {
        Entity = "string",
        Resource = "string",
    },
    DataScanId = "string",
    ExecutionSpec = new Gcp.DataPlex.Inputs.DatascanExecutionSpecArgs
    {
        Trigger = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerArgs
        {
            OnDemand = null,
            Schedule = new Gcp.DataPlex.Inputs.DatascanExecutionSpecTriggerScheduleArgs
            {
                Cron = "string",
            },
        },
        Field = "string",
    },
    Location = "string",
    DataProfileSpec = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecArgs
    {
        ExcludeFields = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecExcludeFieldsArgs
        {
            FieldNames = new[]
            {
                "string",
            },
        },
        IncludeFields = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecIncludeFieldsArgs
        {
            FieldNames = new[]
            {
                "string",
            },
        },
        PostScanActions = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecPostScanActionsArgs
        {
            BigqueryExport = new Gcp.DataPlex.Inputs.DatascanDataProfileSpecPostScanActionsBigqueryExportArgs
            {
                ResultsTable = "string",
            },
        },
        RowFilter = "string",
        SamplingPercent = 0,
    },
    DataQualitySpec = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecArgs
    {
        PostScanActions = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecPostScanActionsArgs
        {
            BigqueryExport = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecPostScanActionsBigqueryExportArgs
            {
                ResultsTable = "string",
            },
        },
        RowFilter = "string",
        Rules = new[]
        {
            new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleArgs
            {
                Dimension = "string",
                RangeExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleRangeExpectationArgs
                {
                    MaxValue = "string",
                    MinValue = "string",
                    StrictMaxEnabled = false,
                    StrictMinEnabled = false,
                },
                RowConditionExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleRowConditionExpectationArgs
                {
                    SqlExpression = "string",
                },
                IgnoreNull = false,
                Name = "string",
                NonNullExpectation = null,
                Column = "string",
                RegexExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleRegexExpectationArgs
                {
                    Regex = "string",
                },
                Description = "string",
                SetExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleSetExpectationArgs
                {
                    Values = new[]
                    {
                        "string",
                    },
                },
                SqlAssertion = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleSqlAssertionArgs
                {
                    SqlStatement = "string",
                },
                StatisticRangeExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleStatisticRangeExpectationArgs
                {
                    Statistic = "string",
                    MaxValue = "string",
                    MinValue = "string",
                    StrictMaxEnabled = false,
                    StrictMinEnabled = false,
                },
                TableConditionExpectation = new Gcp.DataPlex.Inputs.DatascanDataQualitySpecRuleTableConditionExpectationArgs
                {
                    SqlExpression = "string",
                },
                Threshold = 0,
                UniquenessExpectation = null,
            },
        },
        SamplingPercent = 0,
    },
    Description = "string",
    DisplayName = "string",
    Labels = 
    {
        { "string", "string" },
    },
    Project = "string",
});
Copy
example, err := dataplex.NewDatascan(ctx, "datascanResource", &dataplex.DatascanArgs{
	Data: &dataplex.DatascanDataArgs{
		Entity:   pulumi.String("string"),
		Resource: pulumi.String("string"),
	},
	DataScanId: pulumi.String("string"),
	ExecutionSpec: &dataplex.DatascanExecutionSpecArgs{
		Trigger: &dataplex.DatascanExecutionSpecTriggerArgs{
			OnDemand: &dataplex.DatascanExecutionSpecTriggerOnDemandArgs{},
			Schedule: &dataplex.DatascanExecutionSpecTriggerScheduleArgs{
				Cron: pulumi.String("string"),
			},
		},
		Field: pulumi.String("string"),
	},
	Location: pulumi.String("string"),
	DataProfileSpec: &dataplex.DatascanDataProfileSpecArgs{
		ExcludeFields: &dataplex.DatascanDataProfileSpecExcludeFieldsArgs{
			FieldNames: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
		IncludeFields: &dataplex.DatascanDataProfileSpecIncludeFieldsArgs{
			FieldNames: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
		PostScanActions: &dataplex.DatascanDataProfileSpecPostScanActionsArgs{
			BigqueryExport: &dataplex.DatascanDataProfileSpecPostScanActionsBigqueryExportArgs{
				ResultsTable: pulumi.String("string"),
			},
		},
		RowFilter:       pulumi.String("string"),
		SamplingPercent: pulumi.Float64(0),
	},
	DataQualitySpec: &dataplex.DatascanDataQualitySpecArgs{
		PostScanActions: &dataplex.DatascanDataQualitySpecPostScanActionsArgs{
			BigqueryExport: &dataplex.DatascanDataQualitySpecPostScanActionsBigqueryExportArgs{
				ResultsTable: pulumi.String("string"),
			},
		},
		RowFilter: pulumi.String("string"),
		Rules: dataplex.DatascanDataQualitySpecRuleArray{
			&dataplex.DatascanDataQualitySpecRuleArgs{
				Dimension: pulumi.String("string"),
				RangeExpectation: &dataplex.DatascanDataQualitySpecRuleRangeExpectationArgs{
					MaxValue:         pulumi.String("string"),
					MinValue:         pulumi.String("string"),
					StrictMaxEnabled: pulumi.Bool(false),
					StrictMinEnabled: pulumi.Bool(false),
				},
				RowConditionExpectation: &dataplex.DatascanDataQualitySpecRuleRowConditionExpectationArgs{
					SqlExpression: pulumi.String("string"),
				},
				IgnoreNull:         pulumi.Bool(false),
				Name:               pulumi.String("string"),
				NonNullExpectation: &dataplex.DatascanDataQualitySpecRuleNonNullExpectationArgs{},
				Column:             pulumi.String("string"),
				RegexExpectation: &dataplex.DatascanDataQualitySpecRuleRegexExpectationArgs{
					Regex: pulumi.String("string"),
				},
				Description: pulumi.String("string"),
				SetExpectation: &dataplex.DatascanDataQualitySpecRuleSetExpectationArgs{
					Values: pulumi.StringArray{
						pulumi.String("string"),
					},
				},
				SqlAssertion: &dataplex.DatascanDataQualitySpecRuleSqlAssertionArgs{
					SqlStatement: pulumi.String("string"),
				},
				StatisticRangeExpectation: &dataplex.DatascanDataQualitySpecRuleStatisticRangeExpectationArgs{
					Statistic:        pulumi.String("string"),
					MaxValue:         pulumi.String("string"),
					MinValue:         pulumi.String("string"),
					StrictMaxEnabled: pulumi.Bool(false),
					StrictMinEnabled: pulumi.Bool(false),
				},
				TableConditionExpectation: &dataplex.DatascanDataQualitySpecRuleTableConditionExpectationArgs{
					SqlExpression: pulumi.String("string"),
				},
				Threshold:             pulumi.Float64(0),
				UniquenessExpectation: &dataplex.DatascanDataQualitySpecRuleUniquenessExpectationArgs{},
			},
		},
		SamplingPercent: pulumi.Float64(0),
	},
	Description: pulumi.String("string"),
	DisplayName: pulumi.String("string"),
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Project: pulumi.String("string"),
})
Copy
var datascanResource = new Datascan("datascanResource", DatascanArgs.builder()
    .data(DatascanDataArgs.builder()
        .entity("string")
        .resource("string")
        .build())
    .dataScanId("string")
    .executionSpec(DatascanExecutionSpecArgs.builder()
        .trigger(DatascanExecutionSpecTriggerArgs.builder()
            .onDemand()
            .schedule(DatascanExecutionSpecTriggerScheduleArgs.builder()
                .cron("string")
                .build())
            .build())
        .field("string")
        .build())
    .location("string")
    .dataProfileSpec(DatascanDataProfileSpecArgs.builder()
        .excludeFields(DatascanDataProfileSpecExcludeFieldsArgs.builder()
            .fieldNames("string")
            .build())
        .includeFields(DatascanDataProfileSpecIncludeFieldsArgs.builder()
            .fieldNames("string")
            .build())
        .postScanActions(DatascanDataProfileSpecPostScanActionsArgs.builder()
            .bigqueryExport(DatascanDataProfileSpecPostScanActionsBigqueryExportArgs.builder()
                .resultsTable("string")
                .build())
            .build())
        .rowFilter("string")
        .samplingPercent(0)
        .build())
    .dataQualitySpec(DatascanDataQualitySpecArgs.builder()
        .postScanActions(DatascanDataQualitySpecPostScanActionsArgs.builder()
            .bigqueryExport(DatascanDataQualitySpecPostScanActionsBigqueryExportArgs.builder()
                .resultsTable("string")
                .build())
            .build())
        .rowFilter("string")
        .rules(DatascanDataQualitySpecRuleArgs.builder()
            .dimension("string")
            .rangeExpectation(DatascanDataQualitySpecRuleRangeExpectationArgs.builder()
                .maxValue("string")
                .minValue("string")
                .strictMaxEnabled(false)
                .strictMinEnabled(false)
                .build())
            .rowConditionExpectation(DatascanDataQualitySpecRuleRowConditionExpectationArgs.builder()
                .sqlExpression("string")
                .build())
            .ignoreNull(false)
            .name("string")
            .nonNullExpectation()
            .column("string")
            .regexExpectation(DatascanDataQualitySpecRuleRegexExpectationArgs.builder()
                .regex("string")
                .build())
            .description("string")
            .setExpectation(DatascanDataQualitySpecRuleSetExpectationArgs.builder()
                .values("string")
                .build())
            .sqlAssertion(DatascanDataQualitySpecRuleSqlAssertionArgs.builder()
                .sqlStatement("string")
                .build())
            .statisticRangeExpectation(DatascanDataQualitySpecRuleStatisticRangeExpectationArgs.builder()
                .statistic("string")
                .maxValue("string")
                .minValue("string")
                .strictMaxEnabled(false)
                .strictMinEnabled(false)
                .build())
            .tableConditionExpectation(DatascanDataQualitySpecRuleTableConditionExpectationArgs.builder()
                .sqlExpression("string")
                .build())
            .threshold(0)
            .uniquenessExpectation()
            .build())
        .samplingPercent(0)
        .build())
    .description("string")
    .displayName("string")
    .labels(Map.of("string", "string"))
    .project("string")
    .build());
Copy
datascan_resource = gcp.dataplex.Datascan("datascanResource",
    data={
        "entity": "string",
        "resource": "string",
    },
    data_scan_id="string",
    execution_spec={
        "trigger": {
            "on_demand": {},
            "schedule": {
                "cron": "string",
            },
        },
        "field": "string",
    },
    location="string",
    data_profile_spec={
        "exclude_fields": {
            "field_names": ["string"],
        },
        "include_fields": {
            "field_names": ["string"],
        },
        "post_scan_actions": {
            "bigquery_export": {
                "results_table": "string",
            },
        },
        "row_filter": "string",
        "sampling_percent": 0,
    },
    data_quality_spec={
        "post_scan_actions": {
            "bigquery_export": {
                "results_table": "string",
            },
        },
        "row_filter": "string",
        "rules": [{
            "dimension": "string",
            "range_expectation": {
                "max_value": "string",
                "min_value": "string",
                "strict_max_enabled": False,
                "strict_min_enabled": False,
            },
            "row_condition_expectation": {
                "sql_expression": "string",
            },
            "ignore_null": False,
            "name": "string",
            "non_null_expectation": {},
            "column": "string",
            "regex_expectation": {
                "regex": "string",
            },
            "description": "string",
            "set_expectation": {
                "values": ["string"],
            },
            "sql_assertion": {
                "sql_statement": "string",
            },
            "statistic_range_expectation": {
                "statistic": "string",
                "max_value": "string",
                "min_value": "string",
                "strict_max_enabled": False,
                "strict_min_enabled": False,
            },
            "table_condition_expectation": {
                "sql_expression": "string",
            },
            "threshold": 0,
            "uniqueness_expectation": {},
        }],
        "sampling_percent": 0,
    },
    description="string",
    display_name="string",
    labels={
        "string": "string",
    },
    project="string")
Copy
const datascanResource = new gcp.dataplex.Datascan("datascanResource", {
    data: {
        entity: "string",
        resource: "string",
    },
    dataScanId: "string",
    executionSpec: {
        trigger: {
            onDemand: {},
            schedule: {
                cron: "string",
            },
        },
        field: "string",
    },
    location: "string",
    dataProfileSpec: {
        excludeFields: {
            fieldNames: ["string"],
        },
        includeFields: {
            fieldNames: ["string"],
        },
        postScanActions: {
            bigqueryExport: {
                resultsTable: "string",
            },
        },
        rowFilter: "string",
        samplingPercent: 0,
    },
    dataQualitySpec: {
        postScanActions: {
            bigqueryExport: {
                resultsTable: "string",
            },
        },
        rowFilter: "string",
        rules: [{
            dimension: "string",
            rangeExpectation: {
                maxValue: "string",
                minValue: "string",
                strictMaxEnabled: false,
                strictMinEnabled: false,
            },
            rowConditionExpectation: {
                sqlExpression: "string",
            },
            ignoreNull: false,
            name: "string",
            nonNullExpectation: {},
            column: "string",
            regexExpectation: {
                regex: "string",
            },
            description: "string",
            setExpectation: {
                values: ["string"],
            },
            sqlAssertion: {
                sqlStatement: "string",
            },
            statisticRangeExpectation: {
                statistic: "string",
                maxValue: "string",
                minValue: "string",
                strictMaxEnabled: false,
                strictMinEnabled: false,
            },
            tableConditionExpectation: {
                sqlExpression: "string",
            },
            threshold: 0,
            uniquenessExpectation: {},
        }],
        samplingPercent: 0,
    },
    description: "string",
    displayName: "string",
    labels: {
        string: "string",
    },
    project: "string",
});
Copy
type: gcp:dataplex:Datascan
properties:
    data:
        entity: string
        resource: string
    dataProfileSpec:
        excludeFields:
            fieldNames:
                - string
        includeFields:
            fieldNames:
                - string
        postScanActions:
            bigqueryExport:
                resultsTable: string
        rowFilter: string
        samplingPercent: 0
    dataQualitySpec:
        postScanActions:
            bigqueryExport:
                resultsTable: string
        rowFilter: string
        rules:
            - column: string
              description: string
              dimension: string
              ignoreNull: false
              name: string
              nonNullExpectation: {}
              rangeExpectation:
                maxValue: string
                minValue: string
                strictMaxEnabled: false
                strictMinEnabled: false
              regexExpectation:
                regex: string
              rowConditionExpectation:
                sqlExpression: string
              setExpectation:
                values:
                    - string
              sqlAssertion:
                sqlStatement: string
              statisticRangeExpectation:
                maxValue: string
                minValue: string
                statistic: string
                strictMaxEnabled: false
                strictMinEnabled: false
              tableConditionExpectation:
                sqlExpression: string
              threshold: 0
              uniquenessExpectation: {}
        samplingPercent: 0
    dataScanId: string
    description: string
    displayName: string
    executionSpec:
        field: string
        trigger:
            onDemand: {}
            schedule:
                cron: string
    labels:
        string: string
    location: string
    project: string
Copy

Datascan Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Datascan resource accepts the following input properties:

Data
This property is required.
Changes to this property will trigger replacement.
DatascanData
The data source for DataScan. Structure is documented below.
DataScanId
This property is required.
Changes to this property will trigger replacement.
string
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
ExecutionSpec This property is required. DatascanExecutionSpec
DataScan execution settings. Structure is documented below.
Location
This property is required.
Changes to this property will trigger replacement.
string
The location where the data scan should reside.
DataProfileSpec DatascanDataProfileSpec
DataProfileScan related setting.
DataQualitySpec DatascanDataQualitySpec
DataQualityScan related setting.
Description string
Description of the scan.
DisplayName string
User friendly display name.
Labels Dictionary<string, string>
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Project Changes to this property will trigger replacement. string
Data
This property is required.
Changes to this property will trigger replacement.
DatascanDataArgs
The data source for DataScan. Structure is documented below.
DataScanId
This property is required.
Changes to this property will trigger replacement.
string
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
ExecutionSpec This property is required. DatascanExecutionSpecArgs
DataScan execution settings. Structure is documented below.
Location
This property is required.
Changes to this property will trigger replacement.
string
The location where the data scan should reside.
DataProfileSpec DatascanDataProfileSpecArgs
DataProfileScan related setting.
DataQualitySpec DatascanDataQualitySpecArgs
DataQualityScan related setting.
Description string
Description of the scan.
DisplayName string
User friendly display name.
Labels map[string]string
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Project Changes to this property will trigger replacement. string
data
This property is required.
Changes to this property will trigger replacement.
DatascanData
The data source for DataScan. Structure is documented below.
dataScanId
This property is required.
Changes to this property will trigger replacement.
String
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
executionSpec This property is required. DatascanExecutionSpec
DataScan execution settings. Structure is documented below.
location
This property is required.
Changes to this property will trigger replacement.
String
The location where the data scan should reside.
dataProfileSpec DatascanDataProfileSpec
DataProfileScan related setting.
dataQualitySpec DatascanDataQualitySpec
DataQualityScan related setting.
description String
Description of the scan.
displayName String
User friendly display name.
labels Map<String,String>
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
project Changes to this property will trigger replacement. String
data
This property is required.
Changes to this property will trigger replacement.
DatascanData
The data source for DataScan. Structure is documented below.
dataScanId
This property is required.
Changes to this property will trigger replacement.
string
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
executionSpec This property is required. DatascanExecutionSpec
DataScan execution settings. Structure is documented below.
location
This property is required.
Changes to this property will trigger replacement.
string
The location where the data scan should reside.
dataProfileSpec DatascanDataProfileSpec
DataProfileScan related setting.
dataQualitySpec DatascanDataQualitySpec
DataQualityScan related setting.
description string
Description of the scan.
displayName string
User friendly display name.
labels {[key: string]: string}
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
project Changes to this property will trigger replacement. string
data
This property is required.
Changes to this property will trigger replacement.
DatascanDataArgs
The data source for DataScan. Structure is documented below.
data_scan_id
This property is required.
Changes to this property will trigger replacement.
str
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
execution_spec This property is required. DatascanExecutionSpecArgs
DataScan execution settings. Structure is documented below.
location
This property is required.
Changes to this property will trigger replacement.
str
The location where the data scan should reside.
data_profile_spec DatascanDataProfileSpecArgs
DataProfileScan related setting.
data_quality_spec DatascanDataQualitySpecArgs
DataQualityScan related setting.
description str
Description of the scan.
display_name str
User friendly display name.
labels Mapping[str, str]
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
project Changes to this property will trigger replacement. str
data
This property is required.
Changes to this property will trigger replacement.
Property Map
The data source for DataScan. Structure is documented below.
dataScanId
This property is required.
Changes to this property will trigger replacement.
String
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
executionSpec This property is required. Property Map
DataScan execution settings. Structure is documented below.
location
This property is required.
Changes to this property will trigger replacement.
String
The location where the data scan should reside.
dataProfileSpec Property Map
DataProfileScan related setting.
dataQualitySpec Property Map
DataQualityScan related setting.
description String
Description of the scan.
displayName String
User friendly display name.
labels Map<String>
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
project Changes to this property will trigger replacement. String

Outputs

All input properties are implicitly available as output properties. Additionally, the Datascan resource produces the following output properties:

CreateTime string
The time when the scan was created.
EffectiveLabels Dictionary<string, string>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionStatuses List<DatascanExecutionStatus>
Status of the data scan execution. Structure is documented below.
Id string
The provider-assigned unique ID for this managed resource.
Name string
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
State string
Current state of the DataScan.
Type string
The type of DataScan.
Uid string
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
UpdateTime string
The time when the scan was last updated.
CreateTime string
The time when the scan was created.
EffectiveLabels map[string]string
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionStatuses []DatascanExecutionStatus
Status of the data scan execution. Structure is documented below.
Id string
The provider-assigned unique ID for this managed resource.
Name string
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
State string
Current state of the DataScan.
Type string
The type of DataScan.
Uid string
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
UpdateTime string
The time when the scan was last updated.
createTime String
The time when the scan was created.
effectiveLabels Map<String,String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionStatuses List<DatascanExecutionStatus>
Status of the data scan execution. Structure is documented below.
id String
The provider-assigned unique ID for this managed resource.
name String
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
state String
Current state of the DataScan.
type String
The type of DataScan.
uid String
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
updateTime String
The time when the scan was last updated.
createTime string
The time when the scan was created.
effectiveLabels {[key: string]: string}
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionStatuses DatascanExecutionStatus[]
Status of the data scan execution. Structure is documented below.
id string
The provider-assigned unique ID for this managed resource.
name string
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
state string
Current state of the DataScan.
type string
The type of DataScan.
uid string
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
updateTime string
The time when the scan was last updated.
create_time str
The time when the scan was created.
effective_labels Mapping[str, str]
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
execution_statuses Sequence[DatascanExecutionStatus]
Status of the data scan execution. Structure is documented below.
id str
The provider-assigned unique ID for this managed resource.
name str
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
state str
Current state of the DataScan.
type str
The type of DataScan.
uid str
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
update_time str
The time when the scan was last updated.
createTime String
The time when the scan was created.
effectiveLabels Map<String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionStatuses List<Property Map>
Status of the data scan execution. Structure is documented below.
id String
The provider-assigned unique ID for this managed resource.
name String
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
state String
Current state of the DataScan.
type String
The type of DataScan.
uid String
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
updateTime String
The time when the scan was last updated.

Look up Existing Datascan Resource

Get an existing Datascan resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: DatascanState, opts?: CustomResourceOptions): Datascan
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        create_time: Optional[str] = None,
        data: Optional[DatascanDataArgs] = None,
        data_profile_spec: Optional[DatascanDataProfileSpecArgs] = None,
        data_quality_spec: Optional[DatascanDataQualitySpecArgs] = None,
        data_scan_id: Optional[str] = None,
        description: Optional[str] = None,
        display_name: Optional[str] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        execution_spec: Optional[DatascanExecutionSpecArgs] = None,
        execution_statuses: Optional[Sequence[DatascanExecutionStatusArgs]] = None,
        labels: Optional[Mapping[str, str]] = None,
        location: Optional[str] = None,
        name: Optional[str] = None,
        project: Optional[str] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        state: Optional[str] = None,
        type: Optional[str] = None,
        uid: Optional[str] = None,
        update_time: Optional[str] = None) -> Datascan
func GetDatascan(ctx *Context, name string, id IDInput, state *DatascanState, opts ...ResourceOption) (*Datascan, error)
public static Datascan Get(string name, Input<string> id, DatascanState? state, CustomResourceOptions? opts = null)
public static Datascan get(String name, Output<String> id, DatascanState state, CustomResourceOptions options)
resources:  _:    type: gcp:dataplex:Datascan    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
CreateTime string
The time when the scan was created.
Data Changes to this property will trigger replacement. DatascanData
The data source for DataScan. Structure is documented below.
DataProfileSpec DatascanDataProfileSpec
DataProfileScan related setting.
DataQualitySpec DatascanDataQualitySpec
DataQualityScan related setting.
DataScanId Changes to this property will trigger replacement. string
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
Description string
Description of the scan.
DisplayName string
User friendly display name.
EffectiveLabels Dictionary<string, string>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionSpec DatascanExecutionSpec
DataScan execution settings. Structure is documented below.
ExecutionStatuses List<DatascanExecutionStatus>
Status of the data scan execution. Structure is documented below.
Labels Dictionary<string, string>
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Location Changes to this property will trigger replacement. string
The location where the data scan should reside.
Name string
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
Project Changes to this property will trigger replacement. string
PulumiLabels Dictionary<string, string>
The combination of labels configured directly on the resource and default labels configured on the provider.
State string
Current state of the DataScan.
Type string
The type of DataScan.
Uid string
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
UpdateTime string
The time when the scan was last updated.
CreateTime string
The time when the scan was created.
Data Changes to this property will trigger replacement. DatascanDataArgs
The data source for DataScan. Structure is documented below.
DataProfileSpec DatascanDataProfileSpecArgs
DataProfileScan related setting.
DataQualitySpec DatascanDataQualitySpecArgs
DataQualityScan related setting.
DataScanId Changes to this property will trigger replacement. string
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
Description string
Description of the scan.
DisplayName string
User friendly display name.
EffectiveLabels map[string]string
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
ExecutionSpec DatascanExecutionSpecArgs
DataScan execution settings. Structure is documented below.
ExecutionStatuses []DatascanExecutionStatusArgs
Status of the data scan execution. Structure is documented below.
Labels map[string]string
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
Location Changes to this property will trigger replacement. string
The location where the data scan should reside.
Name string
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
Project Changes to this property will trigger replacement. string
PulumiLabels map[string]string
The combination of labels configured directly on the resource and default labels configured on the provider.
State string
Current state of the DataScan.
Type string
The type of DataScan.
Uid string
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
UpdateTime string
The time when the scan was last updated.
createTime String
The time when the scan was created.
data Changes to this property will trigger replacement. DatascanData
The data source for DataScan. Structure is documented below.
dataProfileSpec DatascanDataProfileSpec
DataProfileScan related setting.
dataQualitySpec DatascanDataQualitySpec
DataQualityScan related setting.
dataScanId Changes to this property will trigger replacement. String
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
description String
Description of the scan.
displayName String
User friendly display name.
effectiveLabels Map<String,String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionSpec DatascanExecutionSpec
DataScan execution settings. Structure is documented below.
executionStatuses List<DatascanExecutionStatus>
Status of the data scan execution. Structure is documented below.
labels Map<String,String>
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
location Changes to this property will trigger replacement. String
The location where the data scan should reside.
name String
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
project Changes to this property will trigger replacement. String
pulumiLabels Map<String,String>
The combination of labels configured directly on the resource and default labels configured on the provider.
state String
Current state of the DataScan.
type String
The type of DataScan.
uid String
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
updateTime String
The time when the scan was last updated.
createTime string
The time when the scan was created.
data Changes to this property will trigger replacement. DatascanData
The data source for DataScan. Structure is documented below.
dataProfileSpec DatascanDataProfileSpec
DataProfileScan related setting.
dataQualitySpec DatascanDataQualitySpec
DataQualityScan related setting.
dataScanId Changes to this property will trigger replacement. string
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
description string
Description of the scan.
displayName string
User friendly display name.
effectiveLabels {[key: string]: string}
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionSpec DatascanExecutionSpec
DataScan execution settings. Structure is documented below.
executionStatuses DatascanExecutionStatus[]
Status of the data scan execution. Structure is documented below.
labels {[key: string]: string}
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
location Changes to this property will trigger replacement. string
The location where the data scan should reside.
name string
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
project Changes to this property will trigger replacement. string
pulumiLabels {[key: string]: string}
The combination of labels configured directly on the resource and default labels configured on the provider.
state string
Current state of the DataScan.
type string
The type of DataScan.
uid string
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
updateTime string
The time when the scan was last updated.
create_time str
The time when the scan was created.
data Changes to this property will trigger replacement. DatascanDataArgs
The data source for DataScan. Structure is documented below.
data_profile_spec DatascanDataProfileSpecArgs
DataProfileScan related setting.
data_quality_spec DatascanDataQualitySpecArgs
DataQualityScan related setting.
data_scan_id Changes to this property will trigger replacement. str
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
description str
Description of the scan.
display_name str
User friendly display name.
effective_labels Mapping[str, str]
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
execution_spec DatascanExecutionSpecArgs
DataScan execution settings. Structure is documented below.
execution_statuses Sequence[DatascanExecutionStatusArgs]
Status of the data scan execution. Structure is documented below.
labels Mapping[str, str]
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
location Changes to this property will trigger replacement. str
The location where the data scan should reside.
name str
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
project Changes to this property will trigger replacement. str
pulumi_labels Mapping[str, str]
The combination of labels configured directly on the resource and default labels configured on the provider.
state str
Current state of the DataScan.
type str
The type of DataScan.
uid str
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
update_time str
The time when the scan was last updated.
createTime String
The time when the scan was created.
data Changes to this property will trigger replacement. Property Map
The data source for DataScan. Structure is documented below.
dataProfileSpec Property Map
DataProfileScan related setting.
dataQualitySpec Property Map
DataQualityScan related setting.
dataScanId Changes to this property will trigger replacement. String
DataScan identifier. Must contain only lowercase letters, numbers and hyphens. Must start with a letter. Must end with a number or a letter.
description String
Description of the scan.
displayName String
User friendly display name.
effectiveLabels Map<String>
All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
executionSpec Property Map
DataScan execution settings. Structure is documented below.
executionStatuses List<Property Map>
Status of the data scan execution. Structure is documented below.
labels Map<String>
User-defined labels for the scan. A list of key->value pairs. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
location Changes to this property will trigger replacement. String
The location where the data scan should reside.
name String
The relative resource name of the scan, of the form: projects/{project}/locations/{locationId}/dataScans/{datascan_id}, where project refers to a project_id or project_number and locationId refers to a GCP region.
project Changes to this property will trigger replacement. String
pulumiLabels Map<String>
The combination of labels configured directly on the resource and default labels configured on the provider.
state String
Current state of the DataScan.
type String
The type of DataScan.
uid String
System generated globally unique ID for the scan. This ID will be different if the scan is deleted and re-created with the same name.
updateTime String
The time when the scan was last updated.

Supporting Types

DatascanData
, DatascanDataArgs

Entity Changes to this property will trigger replacement. string
The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan.
Resource Changes to this property will trigger replacement. string
The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan.
Entity Changes to this property will trigger replacement. string
The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan.
Resource Changes to this property will trigger replacement. string
The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan.
entity Changes to this property will trigger replacement. String
The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan.
resource Changes to this property will trigger replacement. String
The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan.
entity Changes to this property will trigger replacement. string
The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan.
resource Changes to this property will trigger replacement. string
The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan.
entity Changes to this property will trigger replacement. str
The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan.
resource Changes to this property will trigger replacement. str
The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan.
entity Changes to this property will trigger replacement. String
The Dataplex entity that represents the data source(e.g. BigQuery table) for Datascan.
resource Changes to this property will trigger replacement. String
The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field could be: (Cloud Storage bucket for DataDiscoveryScan)BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan.

DatascanDataProfileSpec
, DatascanDataProfileSpecArgs

ExcludeFields DatascanDataProfileSpecExcludeFields
The fields to exclude from data profile. If specified, the fields will be excluded from data profile, regardless of include_fields value. Structure is documented below.
IncludeFields DatascanDataProfileSpecIncludeFields
The fields to include in data profile. If not specified, all fields at the time of profile scan job execution are included, except for ones listed in exclude_fields. Structure is documented below.
PostScanActions DatascanDataProfileSpecPostScanActions
Actions to take upon job completion. Structure is documented below.
RowFilter string
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
SamplingPercent double
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
ExcludeFields DatascanDataProfileSpecExcludeFields
The fields to exclude from data profile. If specified, the fields will be excluded from data profile, regardless of include_fields value. Structure is documented below.
IncludeFields DatascanDataProfileSpecIncludeFields
The fields to include in data profile. If not specified, all fields at the time of profile scan job execution are included, except for ones listed in exclude_fields. Structure is documented below.
PostScanActions DatascanDataProfileSpecPostScanActions
Actions to take upon job completion. Structure is documented below.
RowFilter string
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
SamplingPercent float64
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
excludeFields DatascanDataProfileSpecExcludeFields
The fields to exclude from data profile. If specified, the fields will be excluded from data profile, regardless of include_fields value. Structure is documented below.
includeFields DatascanDataProfileSpecIncludeFields
The fields to include in data profile. If not specified, all fields at the time of profile scan job execution are included, except for ones listed in exclude_fields. Structure is documented below.
postScanActions DatascanDataProfileSpecPostScanActions
Actions to take upon job completion. Structure is documented below.
rowFilter String
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
samplingPercent Double
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
excludeFields DatascanDataProfileSpecExcludeFields
The fields to exclude from data profile. If specified, the fields will be excluded from data profile, regardless of include_fields value. Structure is documented below.
includeFields DatascanDataProfileSpecIncludeFields
The fields to include in data profile. If not specified, all fields at the time of profile scan job execution are included, except for ones listed in exclude_fields. Structure is documented below.
postScanActions DatascanDataProfileSpecPostScanActions
Actions to take upon job completion. Structure is documented below.
rowFilter string
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
samplingPercent number
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
exclude_fields DatascanDataProfileSpecExcludeFields
The fields to exclude from data profile. If specified, the fields will be excluded from data profile, regardless of include_fields value. Structure is documented below.
include_fields DatascanDataProfileSpecIncludeFields
The fields to include in data profile. If not specified, all fields at the time of profile scan job execution are included, except for ones listed in exclude_fields. Structure is documented below.
post_scan_actions DatascanDataProfileSpecPostScanActions
Actions to take upon job completion. Structure is documented below.
row_filter str
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
sampling_percent float
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
excludeFields Property Map
The fields to exclude from data profile. If specified, the fields will be excluded from data profile, regardless of include_fields value. Structure is documented below.
includeFields Property Map
The fields to include in data profile. If not specified, all fields at the time of profile scan job execution are included, except for ones listed in exclude_fields. Structure is documented below.
postScanActions Property Map
Actions to take upon job completion. Structure is documented below.
rowFilter String
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
samplingPercent Number
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.

DatascanDataProfileSpecExcludeFields
, DatascanDataProfileSpecExcludeFieldsArgs

FieldNames List<string>
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
FieldNames []string
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
fieldNames List<String>
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
fieldNames string[]
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
field_names Sequence[str]
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
fieldNames List<String>
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.

DatascanDataProfileSpecIncludeFields
, DatascanDataProfileSpecIncludeFieldsArgs

FieldNames List<string>
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
FieldNames []string
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
fieldNames List<String>
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
fieldNames string[]
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
field_names Sequence[str]
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.
fieldNames List<String>
Expected input is a list of fully qualified names of fields as in the schema. Only top-level field names for nested fields are supported. For instance, if 'x' is of nested field type, listing 'x' is supported but 'x.y.z' is not supported. Here 'y' and 'y.z' are nested fields of 'x'.

DatascanDataProfileSpecPostScanActions
, DatascanDataProfileSpecPostScanActionsArgs

BigqueryExport DatascanDataProfileSpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
BigqueryExport DatascanDataProfileSpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigqueryExport DatascanDataProfileSpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigqueryExport DatascanDataProfileSpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigquery_export DatascanDataProfileSpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigqueryExport Property Map
If set, results will be exported to the provided BigQuery table. Structure is documented below.

DatascanDataProfileSpecPostScanActionsBigqueryExport
, DatascanDataProfileSpecPostScanActionsBigqueryExportArgs

ResultsTable string
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
ResultsTable string
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
resultsTable String
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
resultsTable string
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
results_table str
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
resultsTable String
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID

DatascanDataQualitySpec
, DatascanDataQualitySpecArgs

PostScanActions DatascanDataQualitySpecPostScanActions
Actions to take upon job completion. Structure is documented below.
RowFilter string
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
Rules List<DatascanDataQualitySpecRule>
The list of rules to evaluate against a data source. At least one rule is required. Structure is documented below.
SamplingPercent double
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
PostScanActions DatascanDataQualitySpecPostScanActions
Actions to take upon job completion. Structure is documented below.
RowFilter string
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
Rules []DatascanDataQualitySpecRule
The list of rules to evaluate against a data source. At least one rule is required. Structure is documented below.
SamplingPercent float64
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
postScanActions DatascanDataQualitySpecPostScanActions
Actions to take upon job completion. Structure is documented below.
rowFilter String
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
rules List<DatascanDataQualitySpecRule>
The list of rules to evaluate against a data source. At least one rule is required. Structure is documented below.
samplingPercent Double
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
postScanActions DatascanDataQualitySpecPostScanActions
Actions to take upon job completion. Structure is documented below.
rowFilter string
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
rules DatascanDataQualitySpecRule[]
The list of rules to evaluate against a data source. At least one rule is required. Structure is documented below.
samplingPercent number
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
post_scan_actions DatascanDataQualitySpecPostScanActions
Actions to take upon job completion. Structure is documented below.
row_filter str
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
rules Sequence[DatascanDataQualitySpecRule]
The list of rules to evaluate against a data source. At least one rule is required. Structure is documented below.
sampling_percent float
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.
postScanActions Property Map
Actions to take upon job completion. Structure is documented below.
rowFilter String
A filter applied to all rows in a single DataScan job. The filter needs to be a valid SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10
rules List<Property Map>
The list of rules to evaluate against a data source. At least one rule is required. Structure is documented below.
samplingPercent Number
The percentage of the records to be selected from the dataset for DataScan. Value can range between 0.0 and 100.0 with up to 3 significant decimal digits. Sampling is not applied if sampling_percent is not specified, 0 or 100.

DatascanDataQualitySpecPostScanActions
, DatascanDataQualitySpecPostScanActionsArgs

BigqueryExport DatascanDataQualitySpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
BigqueryExport DatascanDataQualitySpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigqueryExport DatascanDataQualitySpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigqueryExport DatascanDataQualitySpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigquery_export DatascanDataQualitySpecPostScanActionsBigqueryExport
If set, results will be exported to the provided BigQuery table. Structure is documented below.
bigqueryExport Property Map
If set, results will be exported to the provided BigQuery table. Structure is documented below.

DatascanDataQualitySpecPostScanActionsBigqueryExport
, DatascanDataQualitySpecPostScanActionsBigqueryExportArgs

ResultsTable string
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
ResultsTable string
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
resultsTable String
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
resultsTable string
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
results_table str
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID
resultsTable String
The BigQuery table to export DataProfileScan results to. Format://bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID

DatascanDataQualitySpecRule
, DatascanDataQualitySpecRuleArgs

Dimension This property is required. string
The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"]
Column string
The unnested column which this rule is evaluated against.
Description string
Description of the rule. The maximum length is 1,024 characters.
IgnoreNull bool
Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules.
Name string
A mutable name for the rule. The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter.
NonNullExpectation DatascanDataQualitySpecRuleNonNullExpectation
ColumnMap rule which evaluates whether each column value is null.
RangeExpectation DatascanDataQualitySpecRuleRangeExpectation
ColumnMap rule which evaluates whether each column value lies between a specified range. Structure is documented below.
RegexExpectation DatascanDataQualitySpecRuleRegexExpectation
ColumnMap rule which evaluates whether each column value matches a specified regex. Structure is documented below.
RowConditionExpectation DatascanDataQualitySpecRuleRowConditionExpectation
Table rule which evaluates whether each row passes the specified condition. Structure is documented below.
SetExpectation DatascanDataQualitySpecRuleSetExpectation
ColumnMap rule which evaluates whether each column value is contained by a specified set. Structure is documented below.
SqlAssertion DatascanDataQualitySpecRuleSqlAssertion
Table rule which evaluates whether any row matches invalid state. Structure is documented below.
StatisticRangeExpectation DatascanDataQualitySpecRuleStatisticRangeExpectation
ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. Structure is documented below.
TableConditionExpectation DatascanDataQualitySpecRuleTableConditionExpectation
Table rule which evaluates whether the provided expression is true. Structure is documented below.
Threshold double
The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0).
UniquenessExpectation DatascanDataQualitySpecRuleUniquenessExpectation
Row-level rule which evaluates whether each column value is unique.
Dimension This property is required. string
The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"]
Column string
The unnested column which this rule is evaluated against.
Description string
Description of the rule. The maximum length is 1,024 characters.
IgnoreNull bool
Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules.
Name string
A mutable name for the rule. The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter.
NonNullExpectation DatascanDataQualitySpecRuleNonNullExpectation
ColumnMap rule which evaluates whether each column value is null.
RangeExpectation DatascanDataQualitySpecRuleRangeExpectation
ColumnMap rule which evaluates whether each column value lies between a specified range. Structure is documented below.
RegexExpectation DatascanDataQualitySpecRuleRegexExpectation
ColumnMap rule which evaluates whether each column value matches a specified regex. Structure is documented below.
RowConditionExpectation DatascanDataQualitySpecRuleRowConditionExpectation
Table rule which evaluates whether each row passes the specified condition. Structure is documented below.
SetExpectation DatascanDataQualitySpecRuleSetExpectation
ColumnMap rule which evaluates whether each column value is contained by a specified set. Structure is documented below.
SqlAssertion DatascanDataQualitySpecRuleSqlAssertion
Table rule which evaluates whether any row matches invalid state. Structure is documented below.
StatisticRangeExpectation DatascanDataQualitySpecRuleStatisticRangeExpectation
ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. Structure is documented below.
TableConditionExpectation DatascanDataQualitySpecRuleTableConditionExpectation
Table rule which evaluates whether the provided expression is true. Structure is documented below.
Threshold float64
The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0).
UniquenessExpectation DatascanDataQualitySpecRuleUniquenessExpectation
Row-level rule which evaluates whether each column value is unique.
dimension This property is required. String
The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"]
column String
The unnested column which this rule is evaluated against.
description String
Description of the rule. The maximum length is 1,024 characters.
ignoreNull Boolean
Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules.
name String
A mutable name for the rule. The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter.
nonNullExpectation DatascanDataQualitySpecRuleNonNullExpectation
ColumnMap rule which evaluates whether each column value is null.
rangeExpectation DatascanDataQualitySpecRuleRangeExpectation
ColumnMap rule which evaluates whether each column value lies between a specified range. Structure is documented below.
regexExpectation DatascanDataQualitySpecRuleRegexExpectation
ColumnMap rule which evaluates whether each column value matches a specified regex. Structure is documented below.
rowConditionExpectation DatascanDataQualitySpecRuleRowConditionExpectation
Table rule which evaluates whether each row passes the specified condition. Structure is documented below.
setExpectation DatascanDataQualitySpecRuleSetExpectation
ColumnMap rule which evaluates whether each column value is contained by a specified set. Structure is documented below.
sqlAssertion DatascanDataQualitySpecRuleSqlAssertion
Table rule which evaluates whether any row matches invalid state. Structure is documented below.
statisticRangeExpectation DatascanDataQualitySpecRuleStatisticRangeExpectation
ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. Structure is documented below.
tableConditionExpectation DatascanDataQualitySpecRuleTableConditionExpectation
Table rule which evaluates whether the provided expression is true. Structure is documented below.
threshold Double
The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0).
uniquenessExpectation DatascanDataQualitySpecRuleUniquenessExpectation
Row-level rule which evaluates whether each column value is unique.
dimension This property is required. string
The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"]
column string
The unnested column which this rule is evaluated against.
description string
Description of the rule. The maximum length is 1,024 characters.
ignoreNull boolean
Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules.
name string
A mutable name for the rule. The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter.
nonNullExpectation DatascanDataQualitySpecRuleNonNullExpectation
ColumnMap rule which evaluates whether each column value is null.
rangeExpectation DatascanDataQualitySpecRuleRangeExpectation
ColumnMap rule which evaluates whether each column value lies between a specified range. Structure is documented below.
regexExpectation DatascanDataQualitySpecRuleRegexExpectation
ColumnMap rule which evaluates whether each column value matches a specified regex. Structure is documented below.
rowConditionExpectation DatascanDataQualitySpecRuleRowConditionExpectation
Table rule which evaluates whether each row passes the specified condition. Structure is documented below.
setExpectation DatascanDataQualitySpecRuleSetExpectation
ColumnMap rule which evaluates whether each column value is contained by a specified set. Structure is documented below.
sqlAssertion DatascanDataQualitySpecRuleSqlAssertion
Table rule which evaluates whether any row matches invalid state. Structure is documented below.
statisticRangeExpectation DatascanDataQualitySpecRuleStatisticRangeExpectation
ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. Structure is documented below.
tableConditionExpectation DatascanDataQualitySpecRuleTableConditionExpectation
Table rule which evaluates whether the provided expression is true. Structure is documented below.
threshold number
The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0).
uniquenessExpectation DatascanDataQualitySpecRuleUniquenessExpectation
Row-level rule which evaluates whether each column value is unique.
dimension This property is required. str
The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"]
column str
The unnested column which this rule is evaluated against.
description str
Description of the rule. The maximum length is 1,024 characters.
ignore_null bool
Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules.
name str
A mutable name for the rule. The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter.
non_null_expectation DatascanDataQualitySpecRuleNonNullExpectation
ColumnMap rule which evaluates whether each column value is null.
range_expectation DatascanDataQualitySpecRuleRangeExpectation
ColumnMap rule which evaluates whether each column value lies between a specified range. Structure is documented below.
regex_expectation DatascanDataQualitySpecRuleRegexExpectation
ColumnMap rule which evaluates whether each column value matches a specified regex. Structure is documented below.
row_condition_expectation DatascanDataQualitySpecRuleRowConditionExpectation
Table rule which evaluates whether each row passes the specified condition. Structure is documented below.
set_expectation DatascanDataQualitySpecRuleSetExpectation
ColumnMap rule which evaluates whether each column value is contained by a specified set. Structure is documented below.
sql_assertion DatascanDataQualitySpecRuleSqlAssertion
Table rule which evaluates whether any row matches invalid state. Structure is documented below.
statistic_range_expectation DatascanDataQualitySpecRuleStatisticRangeExpectation
ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. Structure is documented below.
table_condition_expectation DatascanDataQualitySpecRuleTableConditionExpectation
Table rule which evaluates whether the provided expression is true. Structure is documented below.
threshold float
The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0).
uniqueness_expectation DatascanDataQualitySpecRuleUniquenessExpectation
Row-level rule which evaluates whether each column value is unique.
dimension This property is required. String
The dimension a rule belongs to. Results are also aggregated at the dimension level. Supported dimensions are ["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "INTEGRITY"]
column String
The unnested column which this rule is evaluated against.
description String
Description of the rule. The maximum length is 1,024 characters.
ignoreNull Boolean
Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing. Only applicable to ColumnMap rules.
name String
A mutable name for the rule. The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-). The maximum length is 63 characters. Must start with a letter. Must end with a number or a letter.
nonNullExpectation Property Map
ColumnMap rule which evaluates whether each column value is null.
rangeExpectation Property Map
ColumnMap rule which evaluates whether each column value lies between a specified range. Structure is documented below.
regexExpectation Property Map
ColumnMap rule which evaluates whether each column value matches a specified regex. Structure is documented below.
rowConditionExpectation Property Map
Table rule which evaluates whether each row passes the specified condition. Structure is documented below.
setExpectation Property Map
ColumnMap rule which evaluates whether each column value is contained by a specified set. Structure is documented below.
sqlAssertion Property Map
Table rule which evaluates whether any row matches invalid state. Structure is documented below.
statisticRangeExpectation Property Map
ColumnAggregate rule which evaluates whether the column aggregate statistic lies between a specified range. Structure is documented below.
tableConditionExpectation Property Map
Table rule which evaluates whether the provided expression is true. Structure is documented below.
threshold Number
The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0).
uniquenessExpectation Property Map
Row-level rule which evaluates whether each column value is unique.

DatascanDataQualitySpecRuleRangeExpectation
, DatascanDataQualitySpecRuleRangeExpectationArgs

MaxValue string
The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
MinValue string
The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
StrictMaxEnabled bool
Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
StrictMinEnabled bool
Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
MaxValue string
The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
MinValue string
The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
StrictMaxEnabled bool
Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
StrictMinEnabled bool
Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
maxValue String
The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
minValue String
The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strictMaxEnabled Boolean
Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strictMinEnabled Boolean
Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
maxValue string
The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
minValue string
The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strictMaxEnabled boolean
Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strictMinEnabled boolean
Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
max_value str
The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
min_value str
The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strict_max_enabled bool
Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strict_min_enabled bool
Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
maxValue String
The maximum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
minValue String
The minimum column value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strictMaxEnabled Boolean
Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strictMinEnabled Boolean
Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.

DatascanDataQualitySpecRuleRegexExpectation
, DatascanDataQualitySpecRuleRegexExpectationArgs

Regex This property is required. string
A regular expression the column value is expected to match.
Regex This property is required. string
A regular expression the column value is expected to match.
regex This property is required. String
A regular expression the column value is expected to match.
regex This property is required. string
A regular expression the column value is expected to match.
regex This property is required. str
A regular expression the column value is expected to match.
regex This property is required. String
A regular expression the column value is expected to match.

DatascanDataQualitySpecRuleRowConditionExpectation
, DatascanDataQualitySpecRuleRowConditionExpectationArgs

SqlExpression This property is required. string
The SQL expression.
SqlExpression This property is required. string
The SQL expression.
sqlExpression This property is required. String
The SQL expression.
sqlExpression This property is required. string
The SQL expression.
sql_expression This property is required. str
The SQL expression.
sqlExpression This property is required. String
The SQL expression.

DatascanDataQualitySpecRuleSetExpectation
, DatascanDataQualitySpecRuleSetExpectationArgs

Values This property is required. List<string>
Expected values for the column value.
Values This property is required. []string
Expected values for the column value.
values This property is required. List<String>
Expected values for the column value.
values This property is required. string[]
Expected values for the column value.
values This property is required. Sequence[str]
Expected values for the column value.
values This property is required. List<String>
Expected values for the column value.

DatascanDataQualitySpecRuleSqlAssertion
, DatascanDataQualitySpecRuleSqlAssertionArgs

SqlStatement This property is required. string
The SQL statement.
SqlStatement This property is required. string
The SQL statement.
sqlStatement This property is required. String
The SQL statement.
sqlStatement This property is required. string
The SQL statement.
sql_statement This property is required. str
The SQL statement.
sqlStatement This property is required. String
The SQL statement.

DatascanDataQualitySpecRuleStatisticRangeExpectation
, DatascanDataQualitySpecRuleStatisticRangeExpectationArgs

Statistic This property is required. string
column statistics. Possible values are: STATISTIC_UNDEFINED, MEAN, MIN, MAX.
MaxValue string
The maximum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
MinValue string
The minimum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
StrictMaxEnabled bool
Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
StrictMinEnabled bool
Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
Statistic This property is required. string
column statistics. Possible values are: STATISTIC_UNDEFINED, MEAN, MIN, MAX.
MaxValue string
The maximum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
MinValue string
The minimum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
StrictMaxEnabled bool
Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
StrictMinEnabled bool
Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
statistic This property is required. String
column statistics. Possible values are: STATISTIC_UNDEFINED, MEAN, MIN, MAX.
maxValue String
The maximum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
minValue String
The minimum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strictMaxEnabled Boolean
Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strictMinEnabled Boolean
Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
statistic This property is required. string
column statistics. Possible values are: STATISTIC_UNDEFINED, MEAN, MIN, MAX.
maxValue string
The maximum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
minValue string
The minimum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strictMaxEnabled boolean
Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strictMinEnabled boolean
Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
statistic This property is required. str
column statistics. Possible values are: STATISTIC_UNDEFINED, MEAN, MIN, MAX.
max_value str
The maximum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
min_value str
The minimum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strict_max_enabled bool
Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strict_min_enabled bool
Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.
statistic This property is required. String
column statistics. Possible values are: STATISTIC_UNDEFINED, MEAN, MIN, MAX.
maxValue String
The maximum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
minValue String
The minimum column statistic value allowed for a row to pass this validation. At least one of minValue and maxValue need to be provided.
strictMaxEnabled Boolean
Whether column statistic needs to be strictly lesser than ('<') the maximum, or if equality is allowed. Only relevant if a maxValue has been defined. Default = false.
strictMinEnabled Boolean
Whether column statistic needs to be strictly greater than ('>') the minimum, or if equality is allowed. Only relevant if a minValue has been defined. Default = false.

DatascanDataQualitySpecRuleTableConditionExpectation
, DatascanDataQualitySpecRuleTableConditionExpectationArgs

SqlExpression This property is required. string
The SQL expression.
SqlExpression This property is required. string
The SQL expression.
sqlExpression This property is required. String
The SQL expression.
sqlExpression This property is required. string
The SQL expression.
sql_expression This property is required. str
The SQL expression.
sqlExpression This property is required. String
The SQL expression.

DatascanExecutionSpec
, DatascanExecutionSpecArgs

Trigger This property is required. DatascanExecutionSpecTrigger
Spec related to how often and when a scan should be triggered. Structure is documented below.
Field Changes to this property will trigger replacement. string
The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table.
Trigger This property is required. DatascanExecutionSpecTrigger
Spec related to how often and when a scan should be triggered. Structure is documented below.
Field Changes to this property will trigger replacement. string
The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table.
trigger This property is required. DatascanExecutionSpecTrigger
Spec related to how often and when a scan should be triggered. Structure is documented below.
field Changes to this property will trigger replacement. String
The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table.
trigger This property is required. DatascanExecutionSpecTrigger
Spec related to how often and when a scan should be triggered. Structure is documented below.
field Changes to this property will trigger replacement. string
The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table.
trigger This property is required. DatascanExecutionSpecTrigger
Spec related to how often and when a scan should be triggered. Structure is documented below.
field Changes to this property will trigger replacement. str
The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table.
trigger This property is required. Property Map
Spec related to how often and when a scan should be triggered. Structure is documented below.
field Changes to this property will trigger replacement. String
The unnested field (of type Date or Timestamp) that contains values which monotonically increase over time. If not specified, a data scan will run for all data in the table.

DatascanExecutionSpecTrigger
, DatascanExecutionSpecTriggerArgs

OnDemand DatascanExecutionSpecTriggerOnDemand
The scan runs once via dataScans.run API.
Schedule DatascanExecutionSpecTriggerSchedule
The scan is scheduled to run periodically. Structure is documented below.
OnDemand DatascanExecutionSpecTriggerOnDemand
The scan runs once via dataScans.run API.
Schedule DatascanExecutionSpecTriggerSchedule
The scan is scheduled to run periodically. Structure is documented below.
onDemand DatascanExecutionSpecTriggerOnDemand
The scan runs once via dataScans.run API.
schedule DatascanExecutionSpecTriggerSchedule
The scan is scheduled to run periodically. Structure is documented below.
onDemand DatascanExecutionSpecTriggerOnDemand
The scan runs once via dataScans.run API.
schedule DatascanExecutionSpecTriggerSchedule
The scan is scheduled to run periodically. Structure is documented below.
on_demand DatascanExecutionSpecTriggerOnDemand
The scan runs once via dataScans.run API.
schedule DatascanExecutionSpecTriggerSchedule
The scan is scheduled to run periodically. Structure is documented below.
onDemand Property Map
The scan runs once via dataScans.run API.
schedule Property Map
The scan is scheduled to run periodically. Structure is documented below.

DatascanExecutionSpecTriggerSchedule
, DatascanExecutionSpecTriggerScheduleArgs

Cron This property is required. string
Cron schedule for running scans periodically. This field is required for Schedule scans.


Cron This property is required. string
Cron schedule for running scans periodically. This field is required for Schedule scans.


cron This property is required. String
Cron schedule for running scans periodically. This field is required for Schedule scans.


cron This property is required. string
Cron schedule for running scans periodically. This field is required for Schedule scans.


cron This property is required. str
Cron schedule for running scans periodically. This field is required for Schedule scans.


cron This property is required. String
Cron schedule for running scans periodically. This field is required for Schedule scans.


DatascanExecutionStatus
, DatascanExecutionStatusArgs

LatestJobEndTime string
(Output) The time when the latest DataScanJob started.
LatestJobStartTime string
(Output) The time when the latest DataScanJob ended.
LatestJobEndTime string
(Output) The time when the latest DataScanJob started.
LatestJobStartTime string
(Output) The time when the latest DataScanJob ended.
latestJobEndTime String
(Output) The time when the latest DataScanJob started.
latestJobStartTime String
(Output) The time when the latest DataScanJob ended.
latestJobEndTime string
(Output) The time when the latest DataScanJob started.
latestJobStartTime string
(Output) The time when the latest DataScanJob ended.
latest_job_end_time str
(Output) The time when the latest DataScanJob started.
latest_job_start_time str
(Output) The time when the latest DataScanJob ended.
latestJobEndTime String
(Output) The time when the latest DataScanJob started.
latestJobStartTime String
(Output) The time when the latest DataScanJob ended.

Import

Datascan can be imported using any of these accepted formats:

  • projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}

  • {{project}}/{{location}}/{{data_scan_id}}

  • {{location}}/{{data_scan_id}}

  • {{data_scan_id}}

When using the pulumi import command, Datascan can be imported using one of the formats above. For example:

$ pulumi import gcp:dataplex/datascan:Datascan default projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}
Copy
$ pulumi import gcp:dataplex/datascan:Datascan default {{project}}/{{location}}/{{data_scan_id}}
Copy
$ pulumi import gcp:dataplex/datascan:Datascan default {{location}}/{{data_scan_id}}
Copy
$ pulumi import gcp:dataplex/datascan:Datascan default {{data_scan_id}}
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes
This Pulumi package is based on the google-beta Terraform Provider.