montecarlo#
Monte Carlo’s CLI.
montecarlo [OPTIONS] COMMAND [ARGS]...
Options
- --profile <profile>#
Specify an MCD profile name. Uses default otherwise.
- --config-path <config_path>#
Specify path where to look for config file. Uses /root/.mcd otherwise.
- --version#
Show the version and exit.
agents#
Manage a Monte Carlo Agent.
montecarlo agents [OPTIONS] COMMAND [ARGS]...
deregister#
Deregister an Agent.
montecarlo agents deregister [OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent to deregister.
get-aws-template#
Displays the current CloudFormation template in use by an AWS Agent, in YAML format.
montecarlo agents get-aws-template
[OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent.
get-aws-template-parameters#
Displays the current CloudFormation template parameters in use by an AWS Agent. For Terraform it displays the current value for MemorySize and ConcurrentExecutions.
montecarlo agents get-aws-template-parameters
[OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent.
get-aws-upgrade-logs#
Returns the upgrade logs for a remote AWS Agent.
montecarlo agents get-aws-upgrade-logs
[OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent.
- --limit <limit>#
Maximum number of log events to return, defaults to 100
- --start-time <start_time>#
Optional start time, for example: 2023-12-02T13:40:25Z. Defaults to 12 hours ago.
get-operation-logs#
Returns the operation logs for a remote agent.
montecarlo agents get-operation-logs
[OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent.
- --start-time <start_time>#
Optional start time, for example: 2023-12-02T13:40:25Z. Defaults to 10 minutes ago.
- --end-time <end_time>#
Optional end time, for example: 2023-12-02T13:45:25Z. Defaults to now.
- --limit <limit>#
Maximum number of log events to return, defaults to 1,000
- --connection-type <connection_type>#
Optional connection type to filter logs, for example snowflake, redshift, etc.
health#
Perform a health check of the Agent.
montecarlo agents health [OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent.
list#
List all agents in account.
montecarlo agents list [OPTIONS]
Options
- --show-inactive#
Only list active agents.
- Default:
False
register-aws-agent#
Register a Remote AWS Agent.
montecarlo agents register-aws-agent
[OPTIONS]
Options
- --assumable-role <assumable_role>#
Required ARN of AWS assumable role.
- --lambda-arn <lambda_arn>#
Required ARN of AWS Lambda function.
- --external-id <external_id>#
AWS External ID.
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --agent-id <agent_id>#
UUID of existing Agent to update.
register-aws-proxied-agent#
Register a Remote AWS Proxied Agent.
montecarlo agents register-aws-proxied-agent
[OPTIONS]
Options
- --authentication <authentication>#
Authentication type.
- Options:
oauth2_client_credentials | oauth2_password
- --proxy-endpoint <proxy_endpoint>#
Required HTTP/HTTPS Proxy URL.
- --auth-url <auth_url>#
OAuth2 Client Credentials - Authentication URL, used to get the access token.
- --client-id <client_id>#
OAuth2 Client Credentials - Client ID.
- --client-secret <client_secret>#
OAuth2 Client Credentials - Client Secret. If you prefer a prompt (with hidden input) enter -1 This option requires setting ‘auth-url’, and ‘client_id’.
- --username <username>#
OAuth2 Username/Password - Username. This option requires setting ‘auth-url’, ‘authentication’, and ‘password’.
- --password <password>#
OAuth2 Username/Password - Password. If you prefer a prompt (with hidden input) enter -1 This option requires setting ‘auth-url’, ‘authentication’, and ‘username’.
- --deployment-type <deployment_type>#
Specify if this AWS Agent is deployed as ‘lambda’ or ‘generic’ (e.g. ECS, EKS, EC2)
- Options:
lambda | generic
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --client-certificate <client_certificate>#
Path to the file that contains a client certificate for agent connectivity.
- --private-key <private_key>#
Path to the file that contains a private key for the client certificate for agent connectivity. This option requires setting ‘client-certificate’.
- --agent-id <agent_id>#
UUID of existing Agent to update.
register-azure-agent#
Register a Remote Azure Agent.
montecarlo agents register-azure-agent
[OPTIONS]
Options
- --app-key <app_key>#
Required App key from the Azure Function to use for authentication. If you prefer a prompt (with hidden input) enter -1
- --url <url>#
Required URL for accessing agent.
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --client-certificate <client_certificate>#
Path to the file that contains a client certificate for agent connectivity.
- --private-key <private_key>#
Path to the file that contains a private key for the client certificate for agent connectivity. This option requires setting ‘client-certificate’.
- --agent-id <agent_id>#
UUID of existing Agent to update.
register-azure-blob-store#
Register a Data Store Agent with remote Azure Blob storage container.
montecarlo agents register-azure-blob-store
[OPTIONS]
Options
- --connection-string <connection_string>#
Required A connection string to an Azure Storage account. If you prefer a prompt (with hidden input) enter -1
- --container-name <container_name>#
Required Name of Azure Storage container for data store.
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --agent-id <agent_id>#
UUID of existing Agent to update.
register-gcp-agent#
Register a Remote GCP Agent.
montecarlo agents register-gcp-agent
[OPTIONS]
Options
- --key-file <key_file>#
Required JSON Key file if auth type is GCP JSON service account key.
- --url <url>#
Required URL for accessing agent.
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --client-certificate <client_certificate>#
Path to the file that contains a client certificate for agent connectivity.
- --private-key <private_key>#
Path to the file that contains a private key for the client certificate for agent connectivity. This option requires setting ‘client-certificate’.
- --agent-id <agent_id>#
UUID of existing Agent to update.
register-gcs-store#
Register a Data Store Agent with Google Cloud Storage.
montecarlo agents register-gcs-store
[OPTIONS]
Options
- --key-file <key_file>#
Required JSON Key file if auth type is GCP JSON service account key.
- --bucket-name <bucket_name>#
Required Name of GCS bucket for data store.
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --agent-id <agent_id>#
UUID of existing Agent to update.
register-s3-store#
Register a Data Store Agent with remote S3 bucket.
montecarlo agents register-s3-store
[OPTIONS]
Options
- --assumable-role <assumable_role>#
Required ARN of AWS assumable role.
- --bucket-name <bucket_name>#
Required Name of S3 bucket for data store.
- --external-id <external_id>#
AWS External ID.
- --dry-run#
Dry run (validates credentials but doesn’t create agent).
- Default:
False
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors.
- --agent-id <agent_id>#
UUID of existing Agent to update.
test-tcp-open#
Tests if a destination exists and accepts requests. Opens a TCP Socket to a specific port from the agent.
montecarlo agents test-tcp-open
[OPTIONS]
Options
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --host <host>#
Required Host to check.
- --port <port>#
Required Port to check.
- --timeout <timeout>#
Timeout in seconds.
- Default:
5
test-telnet#
Checks if telnet connection is usable from the agent.
montecarlo agents test-telnet [OPTIONS]
Options
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --host <host>#
Required Host to check.
- --port <port>#
Required Port to check.
- --timeout <timeout>#
Timeout in seconds.
- Default:
5
upgrade#
Upgrades the running version of an Agent.
montecarlo agents upgrade [OPTIONS]
Options
- --agent-id <agent_id>#
Required UUID of Agent to upgrade.
- --image-tag <image_tag>#
Image version to upgrade to.
- --params <parameters>#
Parameters key,value pairs as JSON. If a key is not specified the existing (or default) value is used.
E.g. –params ‘{“MemorySize”:”1024”, “ConcurrentExecutions”: “25”}’
collectors#
Manage a data collector.
montecarlo collectors [OPTIONS] COMMAND [ARGS]...
add#
Add a collector record to the account. Prompts to opens browser to CF console. If declined (or skipped) can use deploy, get-template or open-link with the generated ID.
montecarlo collectors add [OPTIONS]
Options
- --no-prompt#
Skip prompt for launching browser (Auto no).
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
deploy#
Deploy a data collector stack.
montecarlo collectors deploy [OPTIONS]
Options
- --aws-profile <aws_profile>#
Required AWS profile.
- --params <params>#
Parameters key,value pairs as JSON. If a key is not specified the existing (or default) value is used.
E.g. –params ‘{“CreateEventInfra”:”True”}’
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --aws-region <aws_region>#
AWS region where the collector is deployed or intended to be deployed.
- Default:
us-east-1
- --stack-name <stack_name>#
Required The name that is associated with the CloudFormation stack. Must be unique in the region.
- --enable-termination-protection, --no-enable-termination-protection#
Enable termination protection for this stack.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
get-template#
Get link to the latest template. For initial deployment or manually upgrading.
montecarlo collectors get-template
[OPTIONS]
Options
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --aws-region <aws_region>#
AWS region where the collector is deployed or intended to be deployed.
- Default:
us-east-1
- --update-infra, --no-update-infra#
Update the collector infrastructure. Otherwise, only the lambda code will be updated.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
list#
List all collector records.
montecarlo collectors list [OPTIONS]
Options
- --active-only#
Only list active collectors.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
open-link#
Opens browser to CF console with a quick create link. For initial deployment.
montecarlo collectors open-link
[OPTIONS]
Options
- --dry#
Echos quick create link.
- Default:
False
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --aws-region <aws_region>#
AWS region where the collector is deployed or intended to be deployed.
- Default:
us-east-1
- --option-file <option_file>#
Read configuration from FILE.
ping#
Validate whether a collector is operational.
montecarlo collectors ping [OPTIONS]
Options
- --trace-id <trace_id>#
Optional custom UUID for tracking and correlating the ping response. By default we generate a random UUID to be sent with the ping request and validate that the ping response contains the same exact value.
- --collector-id <dc_id>#
Required ID for the data collector. To disambiguate accounts with multiple collectors.
- --option-file <option_file>#
Read configuration from FILE.
run-validations#
Runs all validations for all active integrations in a given collector.
montecarlo collectors run-validations
[OPTIONS]
Options
- --only-periodic, --all-validations#
Whether all validations or only those marked as ‘periodic’ will be executed. Running only periodic validations will require less time and run the same validations that are executed periodically.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
test-tcp-open#
Tests if a destination exists and accepts requests. Opens a TCP Socket to a specific port from the collector.
montecarlo collectors test-tcp-open
[OPTIONS]
Options
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --host <host>#
Required Host to check.
- --port <port>#
Required Port to check.
- --timeout <timeout>#
Timeout in seconds.
- Default:
5
- --option-file <option_file>#
Read configuration from FILE.
test-telnet#
Checks if telnet connection is usable from the collector.
montecarlo collectors test-telnet
[OPTIONS]
Options
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --host <host>#
Required Host to check.
- --port <port>#
Required Port to check.
- --timeout <timeout>#
Timeout in seconds.
- Default:
5
- --option-file <option_file>#
Read configuration from FILE.
upgrade#
Upgrade to the latest version.
montecarlo collectors upgrade [OPTIONS]
Options
- --update-infra, --no-update-infra#
Update the collector infrastructure. Otherwise, only the lambda code will be updated.
- Default:
False
- --aws-profile <aws_profile>#
Required AWS profile.
- --params <params>#
Parameters key,value pairs as JSON. If a key is not specified the existing (or default) value is used.
E.g. –params ‘{“CreateEventInfra”:”True”}’
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --aws-region <aws_region>#
AWS region where the collector is deployed or intended to be deployed.
- Default:
us-east-1
- --option-file <option_file>#
Read configuration from FILE.
configure#
Configure the CLI.
montecarlo configure [OPTIONS]
Options
- --profile-name <profile_name>#
Specify a profile name for configuration.
- --config-path <config_path>#
Specify path where to look for config file.
- --mcd-id <mcd_id>#
Monte Carlo token user ID.
- --mcd-token <mcd_token>#
Monte Carlo token value.
discovery#
Display information about resources.
montecarlo discovery [OPTIONS] COMMAND [ARGS]...
athena-policy-gen#
Generate an IAM policy for Athena. After review, output of this command can be redirected into montecarlo integrations create-role or montecarlo discovery cf-role-gen if you prefer IaC.
montecarlo discovery athena-policy-gen
[OPTIONS]
Options
- --database-name <database_names>#
Required Glue/Athena database name to generate a policy from. Enter ‘*’ to give Monte Carlo access to all databases. This option can be passed multiple times for more than one database.
- --data-bucket-name <bucket_names>#
Name of a S3 bucket storing the data for your Glue/Athena tables. If this option is not specified the bucket names are derived (looked up) from the tables in your databases. This option can be passed multiple times for more than one bucket. Enter ‘*’ to give Monte Carlo access to all buckets.
- --workgroup-name <workgroup_name>#
Required Athena workgroup for Monte Carlo to use when performing queries. The “primary” workgroup for the region is used if one is not specified.
- Default:
primary
- --resource-aws-region <resource_aws_region>#
Override the AWS region where the resource is located. Defaults to the region where the collector is hosted.
- --resource-aws-profile <resource_aws_profile>#
Required AWS profile for the resource.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
cf-role-gen#
Generate a CloudFormation template to create a resource access IAM role. After review, this template can be deployed using CloudFormation. The Role ARN and External ID for onboarding can be found in the stack outputs.
montecarlo discovery cf-role-gen
[OPTIONS]
Options
- --policy-file <policy_files>#
Required File containing an IAM policy to generate an IAM role from. This option can be passed multiple times for more than one policy.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
glue-policy-gen#
Generate an IAM policy for Glue. After review, output of this command can be redirected into montecarlo integrations create-role or montecarlo discovery cf-role-gen if you prefer IaC.
montecarlo discovery glue-policy-gen
[OPTIONS]
Options
- --database-name <database_names>#
Required Glue/Athena database name to generate a policy from. Enter ‘*’ to give Monte Carlo access to all databases. This option can be passed multiple times for more than one database.
- --data-bucket-name <bucket_names>#
Name of a S3 bucket storing the data for your Glue/Athena tables. If this option is not specified the bucket names are derived (looked up) from the tables in your databases. This option can be passed multiple times for more than one bucket. Enter ‘*’ to give Monte Carlo access to all buckets.
- --resource-aws-region <resource_aws_region>#
Override the AWS region where the resource is located. Defaults to the region where the collector is hosted.
- --resource-aws-profile <resource_aws_profile>#
Required AWS profile for the resource.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
list-emr-clusters#
List details about EMR clusters in a region.
montecarlo discovery list-emr-clusters
[OPTIONS]
Options
- --aws-profile <aws_profile>#
Required AWS profile.
- --aws-region <aws_region>#
Required AWS region.
- --only-log-locations#
Display only unique log locations
- --created-after <created_after>#
Display clusters created after date (e.g. 2017-07-04T00:01:30)
- --state <state>#
Cluster states
- Options:
active | terminated | failed
- --no-grid#
Do not display as grid and print as results are available, useful when the cluster list is large
- --option-file <option_file>#
Read configuration from FILE.
msk-policy-gen#
Generate an IAM policy for MSK. After review, output of this command can be redirected into montecarlo integrations create-role or montecarlo discovery cf-role-gen if you prefer IaC.
montecarlo discovery msk-policy-gen
[OPTIONS]
Options
- --resource-aws-region <resource_aws_region>#
Override the AWS region where the resource is located. Defaults to the region where the collector is hosted.
- --resource-aws-profile <resource_aws_profile>#
Required AWS profile for the resource.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
network-recommender#
Alpha network recommender. Attempts to analyze and makes recommendations on how to connect a resource with the Data Collector.
montecarlo discovery network-recommender
[OPTIONS]
Options
- --resource-identifier <resource_identifier>#
Required Identifier for the AWS resource you want to connect the Collector with (e.g. Redshift cluster ID).
- --resource-type <resource_type>#
Required Type of AWS resource.
- Options:
redshift
- --collector-aws-profile <collector_aws_profile>#
Required AWS profile for the Collector.
- --resource-aws-region <resource_aws_region>#
Override the AWS region where the resource is located. Defaults to the region where the collector is hosted.
- --resource-aws-profile <resource_aws_profile>#
Required AWS profile for the resource.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
export#
Export your data.
montecarlo export [OPTIONS] COMMAND [ARGS]...
get#
Export Monte Carlo objects.
DESTINATION is the path where the data will be written to.
Supported schemes:
‘file://’ - save Data Export locally.
‘s3://’ - save Data Export to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo export get [OPTIONS]
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the Data Export and quit.
- --name <name>#
Required Type of objects to export. Names can be found via the list command (e.g. MONITORS from “Monitors (MONITORS)”)
- --destination <destination>#
Required Destination location to save the data.
get-alerts#
Export alerts data.
DESTINATION is the path where the data will be written to.
Supported schemes:
‘file://’ - save Data Export locally.
‘s3://’ - save Data Export to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo export get-alerts [OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the Data Export and quit.
Arguments
- DESTINATION#
Required argument
get-assets#
Export assets data.
DESTINATION is the path where the data will be written to.
Supported schemes:
‘file://’ - save Data Export locally.
‘s3://’ - save Data Export to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo export get-assets [OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the Data Export and quit.
Arguments
- DESTINATION#
Required argument
get-events#
Export events data.
DESTINATION is the path where the data will be written to.
Supported schemes:
‘file://’ - save Data Export locally.
‘s3://’ - save Data Export to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo export get-events [OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the Data Export and quit.
Arguments
- DESTINATION#
Required argument
get-monitors#
Export monitors data.
DESTINATION is the path where the data will be written to.
Supported schemes:
‘file://’ - save Data Export locally.
‘s3://’ - save Data Export to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo export get-monitors
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the Data Export and quit.
Arguments
- DESTINATION#
Required argument
list#
List Data Export details and availability.
montecarlo export list [OPTIONS]
help#
Echo all help text.
montecarlo help [OPTIONS]
import#
Import data.
montecarlo import [OPTIONS] COMMAND [ARGS]...
dbt-run#
Import dbt run artifacts.
montecarlo import dbt-run [OPTIONS]
Options
- --project-name <project_name>#
Project name (perhaps a logical group of dbt models, analogous to a project in dbt Cloud)
- Default:
default-project
- --job-name <job_name>#
Job name (perhaps a logical sequence of dbt executions, analogous to a job in dbt Cloud)
- Default:
default-job
- --manifest <manifest>#
Required Path to the dbt manifest file (manifest.json)
- --run-results <run_results>#
Required Path to the dbt run results file (run_results.json)
- --logs <logs>#
Path to a file containing dbt run logs
- --connection-id <connection_id>#
Identifier of warehouse or lake connection to use to resolve dbt models to tables. Required if you have more than one warehouse or lake connection.
insights#
Aggregated insights on your tables.
montecarlo insights [OPTIONS] COMMAND [ARGS]...
get#
Get an arbitrary insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get [OPTIONS]
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
- --name <name>#
Required Name of the insight to retrieve. Insight names can be found via the list command (e.g. key_assets from “Key Assets (key_assets)”)
- --destination <destination>#
Required Destination location to save insight.
get-cleanup-suggestions#
Get cleanup suggestions insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-cleanup-suggestions
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-coverage-overview#
Get coverage overview (monitors) insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-coverage-overview
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-deteriorating-queries#
Get deteriorating queries insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-deteriorating-queries
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-events#
Get events insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-events
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-incident-queries#
Get incident query changes insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-incident-queries
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-key-assets#
Get key assets insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-key-assets
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-rule-results#
Get rule and SLI results insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-rule-results
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
get-table-activity#
Get table read/write activity insight.
DESTINATION is the path where the insight will be written to.
Supported schemes:
‘file://’ - save insight locally.
‘s3://’ - save insight to S3.
Notice - Will overwrite a file if it exists in the path and create any missing directories or prefixes.
montecarlo insights get-table-activity
[OPTIONS] DESTINATION
Options
- --aws-profile <aws_profile>#
AWS profile to be used when uploading to S3.
- --dry#
Echo temporary presigned URL for the insight and quit.
Arguments
- DESTINATION#
Required argument
list#
List insights details and availability.
montecarlo insights list [OPTIONS]
integrations#
Set up or manage an integration with Monte Carlo.
montecarlo integrations [OPTIONS] COMMAND
[ARGS]...
add-airflow#
Setup an Airflow integration to receive events from Airflow callbacks.
montecarlo integrations add-airflow
[OPTIONS]
Options
- --host <host>#
Required Hostname.
- --name <name>#
Friendly name for the etl connection.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-athena#
Setup an Athena integration. For query logs and health queries.
montecarlo integrations add-athena
[OPTIONS]
Options
- --catalog <catalog>#
Glue data catalog. If not specified the AwsDataCatalog is used.
- --workgroup <workgroup>#
Workbook for running queries and retrieving logs. If not specified the primary is used.
- --region <region>#
Athena cluster region. If not specified the region the collector is deployed in is used.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --role <role>#
Required Assumable role ARN to use for accessing AWS resources.
- --external-id <external_id>#
An external id, per assumable role conditions.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-azure-data-factory#
Setup an Azure Data Factory integration.
montecarlo integrations add-azure-data-factory
[OPTIONS]
Options
- --tenant-id <tenant_id>#
Required Azure Tenant ID.
- --client-id <client_id>#
Required Azure Client ID.
- --client-secret <client_secret>#
Required Azure Client Secret. If you prefer a prompt (with hidden input) enter -1.
- --subscription-id <subscription_id>#
Required Azure Subscription ID.
- --resource-group-name <resource_group_name>#
Required Azure Resource Group Name.
- --factory-name <factory_name>#
Required Azure Data Factory Name.
- --name <name>#
Required Friendly name for the etl connection.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-azure-dedicated-sql-pool#
Setup an Azure Dedicated SQL Pool integration. For metadata, and custom SQL monitors.
montecarlo integrations add-azure-dedicated-sql-pool
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
1433
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-azure-sql-database#
Setup an Azure SQL Database integration. For metadata, and custom SQL monitors.
montecarlo integrations add-azure-sql-database
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
1433
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-bigquery#
Setup a BigQuery integration. For metadata, query logs and metrics.
montecarlo integrations add-bigquery
[OPTIONS]
Options
- --key-file <key_file>#
Required JSON Key file.
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-databricks-delta#
Setup a Databricks Delta integration. For metadata queries on delta tables when using an external metastore in databricks.
montecarlo integrations add-databricks-delta
[OPTIONS]
Options
- --databricks-workspace-url <databricks_workspace_url>#
Required Databricks workspace URL.
- --databricks-token <databricks_token>#
Databricks access token. If you prefer a prompt (with hidden input) enter -1.
- --databricks-workspace-id <databricks_workspace_id>#
Required Databricks workspace ID.
- --databricks-cluster-id <databricks_cluster_id>#
Required Databricks cluster ID.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --skip-secret-creation#
Skip secret creation. Warning: Advanced use-case only.
- Default:
False
- --databricks-secret-key <databricks_secret_key>#
Databricks secret key. This is a secret used by Databricks to communicate back to Monte Carlo.
- Default:
monte-carlo-collector-gateway-secret
- --databricks-secret-scope <databricks_secret_scope>#
Databricks secret scope. This is a secret used by Databricks to communicate back to Monte Carlo.
- Default:
monte-carlo-collector-gateway-scope
- --skip-notebook-creation#
Skip notebook creation. This option requires setting ‘databricks-job-id’, ‘databricks-job-name’, and ‘databricks_notebook_path’.
- Default:
False
- --databricks-job-id <databricks_job_id>#
Databricks job id, required if notebook creation is skipped. This option requires setting ‘skip-notebook-creation’.
- --databricks-job-name <databricks_job_name>#
Databricks job name, required if notebook creation is skipped. This option requires setting ‘skip-notebook-creation’.
- --databricks-notebook-path <databricks_notebook_path>#
Databricks notebook path, required if notebook creation is skipped. This option requires setting ‘skip-notebook-creation’.
- --databricks-notebook-source <databricks_notebook_source>#
Databricks notebook source, required if notebook creation is skipped. (e.g. “resources/databricks/notebook/v1/collection.py”) This option requires setting ‘skip-notebook-creation’.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-databricks-metastore#
Setup a Databricks metastore integration. For metadata and health queries.
montecarlo integrations add-databricks-metastore
[OPTIONS]
Options
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --databricks-workspace-url <databricks_workspace_url>#
Required Databricks workspace URL.
- --databricks-token <databricks_token>#
Databricks access token. If you prefer a prompt (with hidden input) enter -1.
- --databricks-workspace-id <databricks_workspace_id>#
Required Databricks workspace ID.
- --databricks-cluster-id <databricks_cluster_id>#
Required Databricks cluster ID.
- --skip-secret-creation#
Skip secret creation. Warning: Advanced use-case only.
- Default:
False
- --databricks-secret-key <databricks_secret_key>#
Databricks secret key. This is a secret used by Databricks to communicate back to Monte Carlo.
- Default:
monte-carlo-collector-gateway-secret
- --databricks-secret-scope <databricks_secret_scope>#
Databricks secret scope. This is a secret used by Databricks to communicate back to Monte Carlo.
- Default:
monte-carlo-collector-gateway-scope
- --skip-notebook-creation#
Skip notebook creation. This option requires setting ‘databricks-job-id’, ‘databricks-job-name’, and ‘databricks_notebook_path’.
- Default:
False
- --databricks-job-id <databricks_job_id>#
Databricks job id, required if notebook creation is skipped. This option requires setting ‘skip-notebook-creation’.
- --databricks-job-name <databricks_job_name>#
Databricks job name, required if notebook creation is skipped. This option requires setting ‘skip-notebook-creation’.
- --databricks-notebook-path <databricks_notebook_path>#
Databricks notebook path, required if notebook creation is skipped. This option requires setting ‘skip-notebook-creation’.
- --databricks-notebook-source <databricks_notebook_source>#
Databricks notebook source, required if notebook creation is skipped. (e.g. “resources/databricks/notebook/v1/collection.py”) This option requires setting ‘skip-notebook-creation’.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-databricks-metastore-sql-warehouse#
Setup a Databricks metastore sql warehouse integration. For metadata.
montecarlo integrations add-databricks-metastore-sql-warehouse
[OPTIONS]
Options
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --databricks-workspace-url <databricks_workspace_url>#
Required Databricks workspace URL.
- --databricks-token <databricks_token>#
Databricks access token. If you prefer a prompt (with hidden input) enter -1.
- --databricks-warehouse-id <databricks_warehouse_id>#
Required Databricks warehouse ID.
- --databricks-client-id <databricks_client_id>#
Databricks OAuth Client ID. This option cannot be used with ‘databricks-token’. This option requires setting ‘databricks-client-secret’.
- --databricks-client-secret <databricks_client_secret>#
Databricks OAuth Client Secret. If you prefer a prompt (with hidden input) enter -1. This option cannot be used with ‘databricks-token’. This option requires setting ‘databricks-client-id’.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --databricks-workspace-id <databricks_workspace_id>#
Required Databricks workspace ID.
- --option-file <option_file>#
Read configuration from FILE.
add-databricks-sql-warehouse#
Setup a Databricks SQL Warehouse integration. For health queries
montecarlo integrations add-databricks-sql-warehouse
[OPTIONS]
Options
- --databricks-workspace-url <databricks_workspace_url>#
Required Databricks workspace URL.
- --databricks-token <databricks_token>#
Databricks access token. If you prefer a prompt (with hidden input) enter -1.
- --databricks-warehouse-id <databricks_warehouse_id>#
Required Databricks warehouse ID.
- --databricks-client-id <databricks_client_id>#
Databricks OAuth Client ID. This option cannot be used with ‘databricks-token’. This option requires setting ‘databricks-client-secret’.
- --databricks-client-secret <databricks_client_secret>#
Databricks OAuth Client Secret. If you prefer a prompt (with hidden input) enter -1. This option cannot be used with ‘databricks-token’. This option requires setting ‘databricks-client-id’.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-dbt-cloud#
Setup a dbt Cloud integration.
montecarlo integrations add-dbt-cloud
[OPTIONS]
Options
- --dbt-cloud-api-token <dbt_cloud_api_token>#
Required dbt Cloud API token. If you prefer a prompt (with hidden input) enter -1.
- --dbt-cloud-account-id <dbt_cloud_account_id>#
Required dbt Cloud Account ID.
- --dbt-cloud-base-url <dbt_cloud_base_url>#
dbt Cloud Base URL.
- --webhook-hmac-secret <webhook_hmac_secret>#
Required The secret token hmac_secret provided by dbt after a webhook is successfully created in dbt.
- --webhook-id <webhook_id>#
Required The webhook id provided by dbt after a webhook is successfully created in dbt.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-dremio#
Setup a Dremio integration. For metadata, and custom SQL monitors.
montecarlo integrations add-dremio
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --token <token>#
Required Token for authentication
- --host <host>#
Required Hostname of coordinator node or data.dremio.cloud if using Dremio cloud.
- --port <port>#
Required Dremio’s Arrow Flight server port. 443 if using Dremio Cloud
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
- --tls#
Use TLS for connection. Required for Dremio cloud.
add-events#
Setup complete S3 event notifications for a lake.
montecarlo integrations add-events
[OPTIONS]
Options
- --bucket <bucket_name>#
Name of bucket to enable events for.
- --prefix <prefix>#
Limit the notifications to objects starting with a prefix (e.g. ‘data/’).
- --suffix <suffix>#
Limit notifications to objects ending with a suffix (e.g. ‘.csv’).
- --topic-arn <topic_arn>#
Use an existing SNS topic (same region as the bucket). Creates a topic if one is not specified or if an MCD topic does not already exist in the region.
- --buckets-filename <buckets_filename>#
Filename that contains bucket config to enable events for This option cannot be used with ‘bucket-name’.
- --event-type <event_type>#
Required Type of event to setup.
- Default:
metadata
- Options:
metadata | query-logs
- --collector-aws-profile <collector_aws_profile>#
Required The AWS profile to use for operations on SQS/Collector.
- --resource-aws-profile <bucket_aws_profile>#
Required The AWS profile to use for operations on S3/SNS.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
add-fivetran#
Setup a Fivetran integration.
montecarlo integrations add-fivetran
[OPTIONS]
Options
- --fivetran-api-key <fivetran_api_key>#
Required Fivetran API Key. If you prefer a prompt (with hidden input) enter -1.
- --fivetran-api-password <fivetran_api_password>#
Required Fivetran API Key. If you prefer a prompt (with hidden input) enter -1.
- --fivetran-base-url <fivetran_base_url>#
Fivetran Base URL.
- --name <name>#
Friendly name for the etl connection.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-glue#
Setup a Glue integration. For metadata.
montecarlo integrations add-glue
[OPTIONS]
Options
- --region <region>#
Glue catalog region. If not specified the region the collector is deployed in is used.
- --role <role>#
Required Assumable role ARN to use for accessing AWS resources.
- --external-id <external_id>#
An external id, per assumable role conditions.
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-hive#
Setup a Hive SQL integration. For health queries.
montecarlo integrations add-hive
[OPTIONS]
Options
- --host <host>#
Required Hostname.
- --database <database>#
Name of database.
- --port <port>#
HTTP port.
- Default:
10000
- --user <user>#
Required Username with access to hive.
- --auth-mode <auth_mode>#
Hive authentication mode.
- Default:
SASL
- Options:
SASL | NOSASL
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-hive-logs#
Setup a Hive EMR logs integration (S3). For query logs.
montecarlo integrations add-hive-logs
[OPTIONS]
Options
- --bucket <bucket>#
Required S3 Bucket where query logs are contained.
- --prefix <prefix>#
Required Path to query logs.
- --role <role>#
Assumable role ARN to use for accessing AWS resources.
- --external-id <external_id>#
An external id, per assumable role conditions.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-hive-metastore#
Setup a Hive metastore integration (MySQL). For metadata.
montecarlo integrations add-hive-metastore
[OPTIONS]
Options
- --port <port>#
HTTP port.
- Default:
3306
- --use-ssl#
Use SSL to connect (using AWS RDS certificates).
- Default:
False
- --catalog <catalog>#
Presto catalog name. For using multiple hive clusters with Presto. Uses ‘hive’ if not specified.
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --create-warehouse <create_warehouse>#
Create a new warehouse with this connection
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-informatica#
Setup an Informatica integration.
montecarlo integrations add-informatica
[OPTIONS]
Options
- --username <username>#
Required Informatica username. If you prefer a prompt (with hidden input) enter -1.
- --password <password>#
Required Informatica password. If you prefer a prompt (with hidden input) enter -1.
- --name <name>#
Required Friendly name for the etl connection.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-looker#
Setup a Looker metadata integration. For reports.
montecarlo integrations add-looker
[OPTIONS]
Options
- --host-url <base_url>#
Required Looker host url.
- --client-id <client_id>#
Required Looker client id.
- --client-secret <client_secret>#
Required Looker client secret (API key). If you prefer a prompt (with hidden input) enter -1.
- --name <name>#
Friendly name for the Looker integration.
- --connection-name <connection_name>#
Friendly name for Looker connection.
- --verify-ssl, --no-verify-ssl#
Whether to verify the SSL connection (uncheck for self-signed certs).
- Default:
True
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-looker-git#
Setup a Looker ML (git) integration. For reports.
montecarlo integrations add-looker-git
[OPTIONS]
Options
- --ssh-key <ssh_key>#
The ssh key for git ssh integrations. This option cannot be used with ‘token’, and ‘username’.
- --repo-url <repo_url>#
Required Repository URL as ssh://[user@]server/project.git or the shorter form [user@]server:project.git for ssh. For https, use https://server/project.git.
- --token <token>#
Git Access Token to be used for Https instead of ssh key. This option cannot be used with ‘ssh-key’.
- --username <username>#
Git username to be used in conjunction with the access token. This is only required for BitBucket integrations. This option cannot be used with ‘ssh-key’.
- --connection-name <connection_name>#
Friendly name for Looker Git connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-mariadb#
Setup an MariaDB integration. For metadata, and custom SQL monitors.
montecarlo integrations add-mariadb
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
3306
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-motherduck-database#
Setup a Motherduck integration. For metadata, and custom SQL monitors.
montecarlo integrations add-motherduck-database
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --token <token>#
Required Token for authentication
- --database <database>#
Required Name of default database.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-mysql#
Setup a MySQL integration. For metadata, and custom SQL monitors.
montecarlo integrations add-mysql
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
3306
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --ssl-ca <ssl_ca>#
Path to the file that contains a PEM-formatted CA certificate. This option cannot be used with ‘ssl-disabled’.
- --ssl-cert <ssl_cert>#
Path to the file that contains a PEM-formatted client certificate. This option cannot be used with ‘ssl-disabled’.
- --ssl-key <ssl_key>#
Path to the file that contains a PEM-formatted private key for the client certificate. This option cannot be used with ‘ssl-disabled’. This option requires setting ‘ssl-cert’.
- --ssl-disabled <ssl_disabled>#
A boolean value that disables usage of TLS. This option cannot be used with ‘ssl-key’, ‘ssl-ca’, and ‘ssl_cert’.
- --ssl-key-password <ssl_key_password>#
The password for the client certificate private key. If you prefer a prompt (with hidden input) enter -1. This option requires setting ‘ssl-cert’.
- --ssl-verify-cert <ssl_verify_cert>#
Set to true to check the server certificate’s validity. This option requires setting ‘ssl-cert’.
- --ssl-verify-identity <ssl_verify_identity>#
Set to true to check the server’s identity. This option requires setting ‘ssl-ca’.
- --skip-cert-verification#
Skip SSL certificate verification. This option cannot be used with ‘ssl-verify-cert’, and ‘ssl_verify_identity’.
- --option-file <option_file>#
Read configuration from FILE.
add-oracle#
Setup an Oracle integration. For metadata, and custom SQL monitors.
montecarlo integrations add-oracle
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
1521
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-pinecone#
Setup a Pinecone integration.
montecarlo integrations add-pinecone
[OPTIONS]
Options
- --environment <environment>#
Required Pinecone environment (e.g. us-east-1-aws).
- --project-id <project_id>#
Required Pinecone project id.
- --api-key <api_key>#
Required API key for Pinecone project.
- --name <name>#
Friendly name for the integration (defaults to environment:project-id).
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
add-postgres#
Setup a Postgres integration. For metadata, and custom SQL monitors.
montecarlo integrations add-postgres
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
5432
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
- --rds-proxy#
Use if connecting to Postgres through a RDS proxy.
add-power-bi#
Setup a Power BI integration. For reports.
montecarlo integrations add-power-bi
[OPTIONS]
Options
- --tenant-id <tenant_id>#
Required The tenant ID from the Azure Active Directory.
- --auth-mode <auth_mode>#
Required Authentication Mode. We support service principal and primary user two auth types
- Options:
SERVICE_PRINCIPAL | PRIMARY_USER
- --client-id <client_id>#
Required App registration application ID for accessing Power BI.
- --client-secret <client_secret>#
Secret for the application to access the Power BI. Set only when auth-mode is SERVICE_PRINCIPAL This option cannot be used with ‘username’, and ‘password’.
- --username <username>#
Username for accessing the Power BI. Set only when auth-mode is PRIMARY_USER. This option cannot be used with ‘client-secret’.
- --password <password>#
Password for accessing the Power BI. Set only when auth-mode is PRIMARY_USER. This option cannot be used with ‘client-secret’.
- --name <name>#
Friendly name for the Power BI integration.
- --connection-name <connection_name>#
Friendly name for Power BI connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
add-presto#
Setup a Presto SQL integration. For health queries.
montecarlo integrations add-presto
[OPTIONS]
Options
- --host <host>#
Required Hostname.
- --port <port>#
HTTP port.
- Default:
8889
- --user <user>#
Username with access to catalog/schema.
- --password <password>#
User’s password. If you prefer a prompt (with hidden input) enter -1
- --catalog <catalog>#
Mount point to access data source.
- --schema <schema>#
Schema to access.
- --http-scheme <http_scheme>#
Required Scheme for authentication.
- Options:
http | https
- --cert-file <cert_file>#
Local SSL certificate file to upload to collector. This option cannot be used with ‘cert-s3’.
- --aws-profile <aws_profile>#
AWS profile to be used when uploading cert file. This option requires setting ‘cert-file’.
- --aws-region <aws_region>#
AWS region to be used when uploading cert file. This option requires setting ‘cert-file’.
- --cert-s3 <cert_s3>#
Object path (key) to a certificate already uploaded to the collector. This option cannot be used with ‘cert-file’.
- --skip-cert-verification#
Skip SSL certificate verification.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-redshift#
Setup a Redshift integration. For metadata, query logs and metrics.
montecarlo integrations add-redshift
[OPTIONS]
Options
- --port <port>#
HTTP port.
- Default:
5439
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --test-network-only#
Skip any permission tests. Only validates network connection between the collector and resource can be established. This option cannot be used with ‘skip-validation’.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-redshift-consumer-connection#
Setup a Redshift consumer integration. For metadata, query logs and metrics.
montecarlo integrations add-redshift-consumer-connection
[OPTIONS]
Options
- --port <port>#
HTTP port.
- Default:
5439
- --producer-resource-id <producer_resource_id>#
Required UUID of Producer warehouse
- --connection-name <connection_name>#
Friendly name for the consumer connection.
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --test-network-only#
Skip any permission tests. Only validates network connection between the collector and resource can be established. This option cannot be used with ‘skip-validation’.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-sap-hana-database#
Setup a SAP HANA integration. For metadata, and custom SQL monitors.
montecarlo integrations add-sap-hana-database
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
39015
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --database <database>#
Required Name of database/site.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-self-hosted-credentials#
Setup an integration that uses self-hosted credentials.
montecarlo integrations add-self-hosted-credentials
[OPTIONS]
Options
- --connection-type <connection_type>#
Required Type of connection. This option requires setting ‘name’ when it is set to one of these values: {‘snowflake’, ‘redshift’, ‘bigquery’}.
- Options:
athena | bigquery | databricks-delta | databricks-metastore | databricks-metastore-sql-warehouse | dbt-cloud-webhook | dbt-core | fivetran | glue | hive-mysql | hive-s3 | hive | looker-git-clone | looker-git-ssh | looker | power-bi | presto | redshift | snowflake | spark | tableau | transactional-db
- --mechanism <mechanism>#
Required Credential self-hosting mechanism.
- Options:
secretsmanager
- --key <key>#
Required Identifier for credentials within self-hosting mechanism.
- --name <name>#
Friendly name for the warehouse.
- --role <role>#
Assumable role ARN to use for accessing AWS resources.
- --external-id <external_id>#
An external id, per assumable role conditions.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-self-hosted-credentials-v2#
Setup an integration that uses self-hosted credentials (v2).
montecarlo integrations add-self-hosted-credentials-v2
[OPTIONS]
Options
- --connection-type <connection_type>#
Required Connection type to test credentials for (e.g. ‘snowflake’).
- Options:
athena | bigquery | databricks-delta | databricks-metastore | databricks-metastore-sql-warehouse | dbt-cloud-webhook | dbt-core | fivetran | glue | hive-mysql | hive-s3 | hive | looker-git-clone | looker-git-ssh | looker | power-bi | presto | redshift | snowflake | spark | tableau | transactional-db
- --self-hosted-credentials-type <self_hosted_credentials_type>#
Required Self-hosted credentials type (e.g. ‘env_var’, ‘aws_secrets_manager’).
- Options:
ENV_VAR | AWS_SECRETS_MANAGER
- --decryption-service-type <decryption_service_type>#
Optional type of service used to decrypt environment variable credentials (e.g. ‘kms’). This option requires setting ‘env-var-name’.
- Options:
KMS
- --env-var-name <env_var_name>#
Name of environment variable containing credentials. Must use prefix ‘MCD_’. This option cannot be used with ‘aws-secret’. This option requires setting ‘self-hosted-credentials-type’.
- --kms-key-id <kms_key_id>#
Optional KMS key id for decrypting environment variable credentials. This option requires setting ‘decryption-service-type’.
- --aws-secret <aws_secret>#
ARN or name of AWS Secret Manager secret containing credentials. This option cannot be used with ‘env-var-name’. This option requires setting ‘self-hosted-credentials-type’.
- --aws-region <aws_region>#
Optional AWS region where secret manager secret is stored.
- --assumable-role <assumable_role>#
Optional ARN of AWS role to assume when accessing secret manager secret.
- --external-id <external_id>#
Optional external id for AWS role. This option requires setting ‘assumable-role’.
- --name <name>#
Friendly name for the warehouse.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-snowflake#
Setup a Snowflake integration. For metadata, query logs and metrics.
montecarlo integrations add-snowflake
[OPTIONS]
Options
- --user <user>#
Required User with access to snowflake.
- --account <account>#
Required Snowflake account name.
- --warehouse <warehouse>#
Required Name of the warehouse for the user.
- --private-key <private_key>#
User’s private key file for key pair auth. This option cannot be used with ‘use-oauth’.
- --private-key-passphrase <private_key_passphrase>#
User’s private key passphrase. This argument is only needed when the private key is encrypted. If you prefer a prompt (with hidden input) enter -1. This option requires setting ‘private-key’.
- --use-oauth#
Use OAuth for Auth in this integration.
- Default:
False
- --oauth-client-id <oauth_client_id>#
OAuth Client ID. This option requires setting ‘use-oauth’.
- --oauth-client-secret <oauth_client_secret>#
OAuth Client Secret. If you prefer a prompt (with hidden input) enter -1. This option requires setting ‘use-oauth’.
- --oauth-access-token-endpoint <oauth_access_token_endpoint>#
Endpoint used to acquire access tokens This option requires setting ‘use-oauth’.
- --oauth-grant-type <oauth_grant_type>#
OAuth Grant type This option requires setting ‘use-oauth’.
- Options:
client_credentials | password
- --oauth-scope <oauth_scope>#
OAuth Scope
- --oauth-username <oauth_username>#
OAuth Username for oauth password grant flow
- --oauth-password <oauth_password>#
OAuth Password for oauth password grant flow. If you prefer a prompt (with hidden input) enter -1. This option requires setting ‘oauth-username’.
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-spark-binary-mode#
Setup a thrift binary Spark integration. For health queries.
montecarlo integrations add-spark-binary-mode
[OPTIONS]
Options
- --host <host>#
Required Hostname.
- --database <database>#
Required Name of database.
- --port <port>#
Port.
- Default:
10000
- --user <user>#
Required Username with access to spark.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-spark-databricks#
Setup a Spark integration for Databricks. For health queries.
montecarlo integrations add-spark-databricks
[OPTIONS]
Options
- --databricks-workspace-url <databricks_workspace_url>#
Required Databricks workspace URL.
- --databricks-token <databricks_token>#
Databricks access token. If you prefer a prompt (with hidden input) enter -1.
- --databricks-workspace-id <databricks_workspace_id>#
Required Databricks workspace ID.
- --databricks-cluster-id <databricks_cluster_id>#
Required Databricks cluster ID.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-spark-http-mode#
Setup a thrift HTTP Spark integration. For health queries.
montecarlo integrations add-spark-http-mode
[OPTIONS]
Options
- --url <url>#
Required HTTP URL.
- --user <user>#
Required Username with access to spark.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-sql-server#
Setup a SQL Server integration. For metadata, and custom SQL monitors.
montecarlo integrations add-sql-server
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
1433
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-streaming-cluster-connection#
.
montecarlo integrations add-streaming-cluster-connection
[OPTIONS]
Options
- --connection-type <connection_type>#
Required Streaming Connection Type to create. Right now only support [‘confluent-kafka’, ‘confluent-kafka-connect’, ‘msk-kafka’, ‘msk-kafka-connect’, ‘self-hosted-kafka’, ‘self-hosted-kafka-connect’]
- Options:
confluent-kafka | confluent-kafka-connect | msk-kafka | msk-kafka-connect | self-hosted-kafka | self-hosted-kafka-connect
- --key <key>#
Credentials key.You can obtain it by calling test-confluent-kafka-credentials for connection type confluent-kafka; Or calling test-confluent-kafka-connect-credentials for connection type confluent-kafka-connect. It’s ok to leave it blank, but fill [url, api-key, secret] field for confluent-kafka connection in this all; Or fill [api-key, secret, confluent-env] field for confluent-kafka-connect connection. This option cannot be used with ‘api-key’, ‘confluent-env’, ‘secret’, and ‘url’.
- --url <url>#
This is required only when the key is not specified and the connection type is confluent-kafka. This option cannot be used with ‘key’.
- --confluent-env <confluent_env>#
This is required only when the key is not specified and the connection type is confluent-kafka-connect. This option cannot be used with ‘key’.
- --api-key <api_key>#
API for accessing the This is required only when the credential key is not specified. This option cannot be used with ‘key’.
- --secret <secret>#
This is required only when the key is not specified. This option cannot be used with ‘key’.
- --auth-type <auth_type>#
This is required only when the key is not specified and the cluster is self-hosted. This option cannot be used with ‘key’.
- Options:
NO_AUTH | BASIC | BEARER
- --auth-token <auth_token>#
This is required only when the key is not specified and the cluster is self-hosted. This option cannot be used with ‘key’.
- --cluster-arn <cluster_arn>#
This is required only when the key is not specified and the cluster type is MSK Connect. This option cannot be used with ‘key’.
- --iam-role-arn <iam_role_arn>#
This is required only when the key is not specified and the cluster type is MSK Connect. This option cannot be used with ‘key’.
- --external-id <external_id>#
This is required only when the key is not specified and the cluster type is MSK Connect. This option cannot be used with ‘key’.
- --streaming-system-id <streaming_system_id>#
Streaming system UUID. If we are adding a cluster to an existing streaming system, we should use the UUID here. When this is given, please leave new_streaming_system_name, new_streaming_system_type, dc_id empty. This option cannot be used with ‘dc-id’, ‘new-streaming-system-type’, and ‘new_streaming_system_name’.
- --new-streaming-system-name <new_streaming_system_name>#
Streaming System Name, if we are creating a new streaming system along the new cluster connection. This option cannot be used with ‘streaming-system-id’.
- --new-streaming-system-type <new_streaming_system_type>#
Streaming System Type, if we are creating a new streaming system along the new cluster connection. This option cannot be used with ‘streaming-system-id’.
- Options:
confluent-cloud | msk | self-hosted
- --dc-id <dc_id>#
Data Collector UUID. Only specify when there are more than one data collector in the system, and you are trying to create a new streaming system for the cluster connection. This option cannot be used with ‘streaming-system-id’.
- --mc-cluster-id <mc_cluster_id>#
Existing Streaming Cluster MC UUID. If we are only adding a connection to a specific cluster, we set this. This option cannot be used with ‘new-cluster-name’, ‘new-cluster-type’, and ‘new_cluster_id’.
- --new-cluster-id <new_cluster_id>#
Streaming cluster id in your streaming system, required when creating a new cluster This option cannot be used with ‘mc-cluster-id’.
- --new-cluster-name <new_cluster_name>#
New streaming cluster name at MC side. If not specified, will use cluster ID as the cluster name when creating a new cluster. This option cannot be used with ‘mc-cluster-id’.
add-streaming-system#
Setup a streaming system.
montecarlo integrations add-streaming-system
[OPTIONS]
Options
- --streaming-system-type <streaming_system_type>#
Required Streaming System type. Currently we only support confluent-cloud and self-hosted.
- Options:
confluent-cloud | msk | self-hosted
- --streaming-system-name <streaming_system_name>#
Required Name that helps you identify the streaming system in MC.
- --dc-id <dc_id>#
The data collector UUID that you’d like to run jobs of this system in. This is needed when you have more than one active data collector.
add-tableau#
Setup a Tableau integration. For reports.
montecarlo integrations add-tableau
[OPTIONS]
Options
- --token-name <token_name>#
Name for the personal access token. This option cannot be used with ‘user’, ‘client-id’, ‘password’, ‘secret-value’, ‘username’, and ‘secret_id’. This option requires setting ‘token-value’.
- --token-value <token_value>#
Value for the personal access token. If you prefer a prompt (with hidden input) enter -1. This option cannot be used with ‘user’, ‘client-id’, ‘password’, ‘secret-value’, ‘username’, and ‘secret_id’. This option requires setting ‘token-name’.
- --password <password>#
Password for the service account. If you prefer a prompt (with hidden input) enter -1. This option cannot be used with ‘client-id’, ‘secret-value’, ‘token-value’, ‘username’, ‘secret-id’, and ‘token_name’. This option requires setting ‘user’.
- --user <user>#
Username for the service account. This option cannot be used with ‘client-id’, ‘secret-value’, ‘token-value’, ‘username’, ‘secret-id’, and ‘token_name’. This option requires setting ‘password’.
- --username <username>#
Username for the Connected App. This option cannot be used with ‘token-value’, ‘password’, ‘user’, and ‘token_name’. This option requires setting ‘client-id’, ‘secret-id’, and ‘secret_value’.
- --client-id <client_id>#
Client ID for the Connected App. This option cannot be used with ‘token-value’, ‘password’, ‘user’, and ‘token_name’. This option requires setting ‘username’, ‘secret-id’, and ‘secret_value’.
- --secret-id <secret_id>#
Secret ID for the Connected App. This option cannot be used with ‘token-value’, ‘password’, ‘user’, and ‘token_name’. This option requires setting ‘client-id’, ‘username’, and ‘secret_value’.
- --secret-value <secret_value>#
Value of the Connected App secret. If you prefer a prompt (with hidden input) enter -1 This option cannot be used with ‘token-value’, ‘password’, ‘user’, and ‘token_name’. This option requires setting ‘client-id’, ‘username’, and ‘secret_id’.
- --site-name <site_name>#
Required The Tableau site name.
- --server-name <server_name>#
Required The Tableau server name.
- --name <name>#
Friendly name for the Tableau integration.
- --connection-name <connection_name>#
Friendly name for Tableau connection.
- --verify-ssl, --no-verify-ssl#
Whether to verify the SSL connection (uncheck for self-signed certs).
- Default:
True
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
add-teradata#
Setup a Teradata integration. For metadata, and custom SQL monitors.
montecarlo integrations add-teradata
[OPTIONS]
Options
- --name <name>#
Required Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --port <port>#
HTTP port.
- Default:
1025
- --host <host>#
Required Hostname.
- --user <user>#
Required Username with access to the database.
- --password <password>#
Required User’s password. If you prefer a prompt (with hidden input) enter -1.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --sslmode <sslmode>#
SSL mode for connections to Teradata.
- Options:
ALLOW | PREFER | REQUIRE | VERIFY_CA | VERIFY_FULL
- --logmech <logmech>#
Logon mechanism for Teradata connection.
- Options:
TD2 | BROWSER | JWT | LDAP | KRB5 | TDNEGO
- --ssl-ca-directory <ssl_ca_directory>#
Path to directory of PEM files containing CA certs. This option cannot be used with ‘ssl-ca’.
- --ssl-ca <ssl_ca>#
Path to PEM file containing CA cert(s). This option cannot be used with ‘ssl-ca-directory’.
- --ssl-disabled <ssl_disabled>#
A boolean value that disables usage of TLS.
configure-metadata-events#
Configure S3 metadata events. For tracking data freshness and volume at scale. Requires s3 notifications to be configured first.
montecarlo integrations configure-metadata-events
[OPTIONS]
Options
- --connection-type <connection_type>#
Required Type of the integration. This option cannot be used with ‘connection-id’.
- Options:
databricks-metastore | glue | hive-mysql
- --name <name>#
Friendly name for the created integration (e.g. warehouse). Name must be unique.
- --connection-name <connection_name>#
Friendly name for the connection.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
configure-query-log-events#
Configure S3 query log events. For tracking data freshness and volume at scale. Requires s3 notifications to be configured first.
montecarlo integrations configure-query-log-events
[OPTIONS]
Options
- --connection-type <connection_type>#
Required Type of the integration.
- Options:
hive-s3 | presto-s3
- --name <name>#
Friendly name of the warehouse which the connection will belong to.
- --connection-name <connection_name>#
Friendly name for the connection.
- --role <role>#
Required Assumable role ARN to use for accessing AWS resources.
- --external-id <external_id>#
An external id, per assumable role conditions.
- --format-type <format_type>#
Required Query log format.
- Options:
hive-emr | hive-native | custom
- --source-format <source_format>#
Query log file format. Only required when “custom” is used.
- Options:
json | jsonl
- --mapping-file <mapping_file>#
Mapping of expected to existing query log fields. Only required if “custom” is used.
- --dc-id <dc_id>#
Collector UUID to enable events on. If not specified, collector used by the Warehouse will be used.
- --option-file <option_file>#
Read configuration from FILE.
create-bucket-side-event-infrastructure#
Setup Bucket Side S3 event infrastructure for a lake.
montecarlo integrations create-bucket-side-event-infrastructure
[OPTIONS]
Options
- --bucket <bucket_name>#
Name of bucket to enable events for.
- --prefix <prefix>#
Limit the notifications to objects starting with a prefix (e.g. ‘data/’).
- --suffix <suffix>#
Limit notifications to objects ending with a suffix (e.g. ‘.csv’).
- --topic-arn <topic_arn>#
Use an existing SNS topic (same region as the bucket). Creates a topic if one is not specified or if an MCD topic does not already exist in the region.
- --buckets-filename <buckets_filename>#
Filename that contains bucket config to enable events for This option cannot be used with ‘bucket-name’.
- --event-type <event_type>#
Required Type of event to setup.
- Default:
metadata
- Options:
metadata | query-logs
- --resource-aws-profile <bucket_aws_profile>#
Required The AWS profile to use for operations on S3/SNS.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
create-databricks-webhook-key#
Create an integration key for a Databricks webhook
montecarlo integrations create-databricks-webhook-key
[OPTIONS]
Options
- --integration-name <integration_name>#
Name of associated Databricks metastore integration (required if you have more than one)
- --option-file <option_file>#
Read configuration from FILE.
create-event-topic#
Setup Event Topic for S3 event notifications in a lake.
montecarlo integrations create-event-topic
[OPTIONS]
Options
- --bucket <bucket_name>#
Name of bucket to enable events for.
- --prefix <prefix>#
Limit the notifications to objects starting with a prefix (e.g. ‘data/’).
- --suffix <suffix>#
Limit notifications to objects ending with a suffix (e.g. ‘.csv’).
- --topic-arn <topic_arn>#
Use an existing SNS topic (same region as the bucket). Creates a topic if one is not specified or if an MCD topic does not already exist in the region.
- --buckets-filename <buckets_filename>#
Filename that contains bucket config to enable events for This option cannot be used with ‘bucket-name’.
- --event-type <event_type>#
Required Type of event to setup.
- Default:
metadata
- Options:
metadata | query-logs
- --resource-aws-profile <bucket_aws_profile>#
Required The AWS profile to use for operations on S3/SNS.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
- --option-file <option_file>#
Read configuration from FILE.
create-key#
Create an integration key. The resulting key id and secret will be printed to the console.
montecarlo integrations create-key
[OPTIONS]
Options
- --description <description>#
Required Key description.
- --scope <scope>#
Required Key scope (integration the key can be used for).
- Options:
Spark | DatabricksMetadata
- --option-file <option_file>#
Read configuration from FILE.
create-role#
Create an IAM role from a policy FILE. The returned role ARN and external id should be used for adding lake assets.
montecarlo integrations create-role
[OPTIONS] FILE
Options
- --aws-profile <aws_profile>#
Required The AWS profile indicating where the role is created.
- --agent-id <agent_id>#
ID for the agent. To disambiguate accounts with multiple agents. This option cannot be used with ‘dc-id’.
- --collector-id <dc_id>#
ID for the data collector. To disambiguate accounts with multiple collectors. This option cannot be used with ‘agent-id’.
Arguments
- FILE#
Required argument
delete-key#
Delete an integration key.
montecarlo integrations delete-key
[OPTIONS]
Options
- --key-id <key_id>#
Required Integration key id.
- --option-file <option_file>#
Read configuration from FILE.
disable-metadata-events#
Disable S3 metadata events.
montecarlo integrations disable-metadata-events
[OPTIONS]
Options
- --name <name>#
Resource name (only required if more than one exists)
- --dc-id <dc_id>#
Collector UUID to enable events on. If not specified, collector used by the Warehouse will be used.
- --option-file <option_file>#
Read configuration from FILE.
disable-query-log-events#
Disable S3 query log events.
montecarlo integrations disable-query-log-events
[OPTIONS]
Options
- --name <name>#
Resource name (only required if more than one exists)
- --option-file <option_file>#
Read configuration from FILE.
list#
List all active connections.
montecarlo integrations list [OPTIONS]
Options
- --option-file <option_file>#
Read configuration from FILE.
list-keys#
List all integration keys.
montecarlo integrations list-keys
[OPTIONS]
Options
- --option-file <option_file>#
Read configuration from FILE.
refresh-bi-to-warehouse-connections#
Create or refresh details of BI warehouse sources. Warehouse sources are warehouses connected to a BI container.
If only the bi-container-id parameter is supplied, then the behavior will be the following:
All warehouse sources in the customer’s BI system will be matched against warehouses in Monte Carlo.
(2) If all of the customer’s BI warehouse sources have a match in Monte Carlo, then the warehouse source details will be saved.
(3) Otherwise, the response will contain the details of customer’s BI warehouse sources and Monte Carlo warehouses. The caller will then need to manually save the warehouse source details by specifying the warehouse-source-details parameter.
If the warehouse-source-details parameter is also supplied, then the warehouses will be validated and saved.
montecarlo integrations refresh-bi-to-warehouse-connections
[OPTIONS]
Options
- --bi-container-id <bi_container_id>#
Required ID of the BI container for which to refresh warehouse sources.
- --warehouse-source-details <warehouse_source_details>#
An optional JSON array of warehouse sources. If supplied, these warehouse sources will be created. If an entry with the same bi_container_id, warehouse_resource_id and warehouse_resource_type is found, its bi_warehouse_id will be updated.
Each JSON object in this array has the following fields:
bi_warehouse_id: The ID of the warehouse in the customer’s ID space.
warehouse_resource_id: The ID of the warehouse Monte Carlo’s ID space.
warehouse_resource_type: The type of the warehouse.
E.g. –warehouse-source-details ‘[{“bi_warehouse_id”:”0dd5d40e-8749-71cb-9fa5-2e33b570ff43”, “warehouse_resource_id”: “11b1ad6f-e35c-4532-84d8-2fc88bd53660”, “warehouse_resource_type”: “snowflake”}]’
remove#
Remove an existing connection. Deletes any associated jobs, monitors, etc.
montecarlo integrations remove
[OPTIONS]
Options
- --connection-id <connection_id>#
Required ID for the connection.
- --no-prompt#
Don’t ask for confirmation.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
set-bi-connection-name#
Set the name of a BI Connection.
montecarlo integrations set-bi-connection-name
[OPTIONS]
Options
- --bi-connection-id <bi_connection_id>#
Required UUID of the existing BI Connection
- --new-name <new_name>#
Required Name to give the BI Connection
set-warehouse-name#
Set the name of a Warehouse.
montecarlo integrations set-warehouse-name
[OPTIONS]
Options
- --current-name <current_name>#
Required Current Name of the Warehouse
- --new-name <new_name>#
Required Name to give the Warehouse
show-current-notebook-version#
Get the most up to date databricks notebook version.
montecarlo integrations show-current-notebook-version
[OPTIONS]
show-databricks-metadata-job-info#
Get the Databricks job info for your connection.
montecarlo integrations show-databricks-metadata-job-info
[OPTIONS]
Options
- --connection-id <connection_id>#
Required ID for the connection.
test#
Retest an existing connection.
montecarlo integrations test [OPTIONS]
Options
- --connection-id <connection_id>#
Required ID for the connection.
- --option-file <option_file>#
Read configuration from FILE.
test-confluent-kafka-connect-credentials#
Test and generate credential key for Confluent Cloud accessing Kafka Connect.
montecarlo integrations test-confluent-kafka-connect-credentials
[OPTIONS]
Options
- --confluent-env <confluent_env>#
Required Environment ID in Confluent Cloud.
- --cluster <cluster>#
Required Cluster ID of the Kafka Connect in Confluent Cloud.
- --api-key <api_key>#
Required API Key.
- --secret <secret>#
Required Secret of the API Key.
- --url <url>#
Special URL for accessing Kafka Connect API in Confluent Cloud. By default, we use the cloud API URL.
- --dc-id <dc_id>#
Data Collector UUID, if we’d like to test the credentials against the specific dc, or you have multiple DC.
test-confluent-kafka-credentials#
Test and generate credential key for Confluent Kafka Connection.
montecarlo integrations test-confluent-kafka-credentials
[OPTIONS]
Options
- --cluster <cluster>#
Required Cluster ID in Confluent Cloud.
- --api-key <api_key>#
Required API Key.
- --secret <secret>#
Required Secret of the API Key.
- --url <url>#
Required URL for accessing the Kafka Cluster in Confluent Cloud.
- --dc-id <dc_id>#
Data Collector UUID, if we’d like to test the credentials against the specific dc, or you have multiple DC.
test-msk-kafka-connect-credentials#
Test and generate credential key for a MSK Kafka Connect Connection.
montecarlo integrations test-msk-kafka-connect-credentials
[OPTIONS]
Options
- --cluster-arn <cluster_arn>#
Required ARN of the MSK cluster the connectors are running against.
- --iam-role-arn <iam_role_arn>#
Required ARN of an assumable IAM role that will be used for collection.
- --external-id <external_id>#
Required Optional external id, if required to assume IAM role for collection.
- --dc-id <dc_id>#
Data Collector UUID, if we’d like to test the credentials against the specific dc, or you have multiple DC.
test-msk-kafka-credentials#
Test and generate credential key for a MSK Kafka Connection (via REST proxy).
montecarlo integrations test-msk-kafka-credentials
[OPTIONS]
Options
- --cluster <cluster>#
Required Cluster ID
- --url <url>#
Required URL for accessing the REST Proxy.
- --auth-type <auth_type>#
Required The type of auth used to connect to the server
- Options:
NO_AUTH | BASIC | BEARER
- --auth-token <auth_token>#
The auth token used to connect to the server. Used for basic and bearer auth types.
- --dc-id <dc_id>#
Data Collector UUID, if we’d like to test the credentials against the specific dc, or you have multiple DC.
test-self-hosted-kafka-connect-credentials#
Test and generate credential key for a Self Hosted Kafka Connect.
montecarlo integrations test-self-hosted-kafka-connect-credentials
[OPTIONS]
Options
- --cluster <cluster>#
Required Cluster ID of the Kafka Connect.
- --url <url>#
Required URL for accessing the self hosted Kafka Connect Rest Server.
- --auth-type <auth_type>#
Required The type of auth used to connect to the server
- Options:
NO_AUTH | BASIC | BEARER
- --auth-token <auth_token>#
The auth token used to connect to the server. Used for basic and bearer auth types.
- --dc-id <dc_id>#
Data Collector UUID, if we’d like to test the credentials against the specific dc, or you have multiple DC.
test-self-hosted-kafka-credentials#
Test and generate credential key for a Self Hosted Kafka Connection.
montecarlo integrations test-self-hosted-kafka-credentials
[OPTIONS]
Options
- --cluster <cluster>#
Required Cluster ID
- --url <url>#
Required URL for accessing the self hosted Kafka Rest Proxy.
- --auth-type <auth_type>#
Required The type of auth used to connect to the server
- Options:
NO_AUTH | BASIC | BEARER
- --auth-token <auth_token>#
The auth token used to connect to the server. Used for basic and bearer auth types.
- --dc-id <dc_id>#
Data Collector UUID, if we’d like to test the credentials against the specific dc, or you have multiple DC.
update#
Update credentials for a connection. Only replaces/inserts the keys in changes by default.
montecarlo integrations update
[OPTIONS]
Options
- --connection-id <connection_id>#
Required ID for the connection.
- --changes <changes>#
Required Credential key,value pairs as JSON.
E.g. –changes ‘{“user”:”Apollo”}’
- --skip-validation#
Skip validating credentials.
- Default:
False
- --replace-all#
Replace all credentials rather than just inserting/updating the keys in changes.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
update-databricks-notebook#
Update Databricks Notebook to the latest version.
montecarlo integrations update-databricks-notebook
[OPTIONS]
Options
- --connection-id <connection_id>#
Required ID for the connection.
update-teradata#
Update a Teradata integration. For metadata, and custom SQL monitors.
montecarlo integrations update-teradata
[OPTIONS]
Options
- --host <host>#
Hostname.
- --port <port>#
Port to use for connection.
- --user <user>#
Username with access to the database.
- --password <password>#
User’s password. If you prefer a prompt (with hidden input) enter -1.
- --sslmode <sslmode>#
SSL mode for connections to Teradata.
- Options:
ALLOW | PREFER | REQUIRE | VERIFY_CA | VERIFY_FULL
- --logmech <logmech>#
Logon mechanism for Teradata connection.
- Options:
TD2 | BROWSER | JWT | LDAP | KRB5 | TDNEGO
- --ssl-ca-directory <ssl_ca_directory>#
Path to directory of PEM files containing CA certs. This option cannot be used with ‘ssl-ca’.
- --ssl-ca <ssl_ca>#
Path to PEM file containing CA cert(s). This option cannot be used with ‘ssl-ca-directory’.
- --ssl-disabled <ssl_disabled>#
A boolean value that disables usage of TLS.
- --skip-validation#
Skip all connection tests. This option cannot be used with ‘validate-only’.
- --validate-only#
Run connection tests without adding. This option cannot be used with ‘skip-validation’.
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --connection-id <connection_id>#
Required ID for the connection.
keys#
Manage Integration Gateway Keys
montecarlo keys [OPTIONS] COMMAND [ARGS]...
add-airflow#
Add a new Airflow Integration Key
montecarlo keys add-airflow [OPTIONS]
Options
- --name <name>#
Name of the Airflow Connection, required to disambiguate if multiple Airflow connections are defined
- --description <description>#
Required Description for the key
delete-airflow#
Delete an existing Airflow Integration Key
montecarlo keys delete-airflow
[OPTIONS]
Options
- --key-id <key_id>#
Required Id of the key to delete
list-airflow#
List existing Airflow Integration Keys in the account
montecarlo keys list-airflow [OPTIONS]
Options
- --name <name>#
Show keys only for the Airflow Connection with this name
management#
Manage account settings.
montecarlo management [OPTIONS] COMMAND [ARGS]...
configure-pii-filtering#
Configure PII filtering preferences for the account.
montecarlo management configure-pii-filtering
[OPTIONS]
Options
- --enable, --disable#
Whether PII filtering should be active for the account.
- --fail-mode <fail_mode>#
Whether PII filter failures will allow (OPEN) or prevent (CLOSE) data flow for this account.
- Options:
CLOSE | OPEN
get-collection-block-list#
List entities blocked from collection on this account.
montecarlo management get-collection-block-list
[OPTIONS]
Options
- --resource-name <resource_name>#
Name of a specific resource to filter by. Shows all resources by default.
get-pii-preferences#
Get PII filtering preferences.
montecarlo management get-pii-preferences
[OPTIONS]
update-collection-block-list#
Update entities for which collection is blocked on this account.
montecarlo management update-collection-block-list
[OPTIONS]
Options
- --add, --remove#
Required Whether the entities being specified should be added or removed from the block list.
- --resource-name <resource_name>#
Name of a specific resource to apply collection block to. Only warehouse names are supported for now. This option cannot be used with ‘filename’. This option requires setting ‘project’.
- --project <project>#
Top-level object hierarchy e.g. database, catalog, etc. This option cannot be used with ‘filename’. This option requires setting ‘resource-name’.
- --dataset <dataset>#
Intermediate object hierarchy e.g. schema, database, etc. This option cannot be used with ‘filename’. This option requires setting ‘project’, and ‘resource_name’.
- --collection-block-list-filename <filename>#
Filename that contains collection block definitions. This file is expected to be in a CSV format with the headers resource_name, project, and dataset. This option cannot be used with ‘project’, ‘dataset’, and ‘resource_name’.
monitors#
Manage monitors.
montecarlo monitors [OPTIONS] COMMAND [ARGS]...
apply#
Compile and apply monitor configuration.
montecarlo monitors apply [OPTIONS]
Options
- --project-dir <project_dir>#
Base directory of MC project (where montecarlo.yml is located). By default, this is set to the current working directory
- --namespace <namespace>#
Namespace of monitors configuration. This value will be ignored if we find a namespace in montecarlo.yml
- --dry-run#
Dry run (just shows planned changes but doesn’t apply them.)
- Default:
False
- --dbt-manifest <dbt_manifest>#
Path to dbt manifest used for resolving dbt ref() in monitor config
- --auto-yes#
Skip any interactive approval.
- Default:
False
- --create-non-ingested-tables#
force create non-ingested tables if they don’t exist.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
compile#
Compile monitor configuration.
montecarlo monitors compile [OPTIONS]
Options
- --project-dir <project_dir>#
Base directory of MC project (where montecarlo.yml is located). By default, this is set to the current working directory
- --namespace <namespace>#
Namespace of monitors configuration. This value will be ignored if we find a namespace in montecarlo.yml
- --dbt-manifest <dbt_manifest>#
Path to dbt manifest used for resolving dbt ref() in monitor config
convert-to-mac#
Convert monitors from UI to monitors configuration, exporting the monitors as YAML in the process.
montecarlo monitors convert-to-mac
[OPTIONS]
Options
- --namespace <namespace>#
Required Namespace for the exported monitors.
- --project-dir <project_dir>#
Required Path to a directory to export a MC monitor config project to.
- --monitors-file <monitors_file>#
File with monitor UUIDs to convert. One line per monitor.
- --dry-run#
Dry run (only export the monitors as monitors as code, but do not convert existing ones).
- Default:
False
convert-to-ui#
Convert monitors from a monitors as code namespace to UI monitors.
montecarlo monitors convert-to-ui
[OPTIONS]
Options
- --namespace <namespace>#
Required Namespace to convert.
- --dry-run#
Dry run (just shows planned changes but doesn’t apply them.)
- Default:
False
delete#
Delete monitor configuration.
montecarlo monitors delete [OPTIONS]
Options
- --project-dir <project_dir>#
Base directory of MC project (where montecarlo.yml is located). By default, this is set to the current working directory
- --namespace <namespace>#
Required Namespace of monitors configuration.
- --dry-run#
Dry run (just shows planned changes but doesn’t apply them.
- Default:
False
- --option-file <option_file>#
Read configuration from FILE.
export-as-latest#
Export monitors in a monitor as code namespace upgraded to the latest version of monitors of monitors as code. Will only export monitors that can be upgraded.
montecarlo monitors export-as-latest
[OPTIONS]
Options
- --namespace <namespace>#
Required Namespace to export as latest.
- --monitors-file <monitors_file>#
File with monitor UUIDs to export as latest. One line per monitor.
export-migrated-dt#
Export monitors as code configuration for a given namespace. Only generates configuration for system migrated Dimension Tracking monitors.
montecarlo monitors export-migrated-dt
[OPTIONS]
Options
- --project-dir <project_dir>#
Base directory of MC project (where montecarlo.yml is located). By default, this is set to the current working directory
- --namespace <namespace>#
Namespace of monitors to be exported. This value will be ignored if we find a namespace in montecarlo.yml
- --option-file <option_file>#
Read configuration from FILE.
generate-from-dbt-tests#
Generate MC monitor config YAML from dbt tests.
montecarlo monitors generate-from-dbt-tests
[OPTIONS]
Options
- --output-path <output_path>#
Required Path for MC monitor config output YAML
- --dbt-manifest <dbt_manifest>#
Required Path to dbt manifest containing tests to generate MC monitor config for
- --test-type <test_type>#
Filter for types of dbt tests to convert. Can pass multiple. Defaults to all. Pass SINGULAR to filter to singular tests without a shared type.
- --label <label>#
Label to apply to all generated MC monitors. Can pass multiple.
get-template#
Get the monitors configuration for a given namespace.
montecarlo monitors get-template
[OPTIONS]
Options
- --namespace <namespace>#
Required Namespace for the exported monitors.
- --project-dir <project_dir>#
Required Path to a directory to export a the template to.
list#
List monitors ordered by update recency.
montecarlo monitors list [OPTIONS]
Options
- --limit <limit>#
Max number of monitors to list.
- Default:
100
- --monitor-type <monitor_type>#
List monitors with monitor_type
- Options:
CIRCUIT_BREAKER_COMPATIBLE | CUSTOM_SQL | TABLE_METRIC | FRESHNESS | VOLUME | STATS | CATEGORIES | JSON_SCHEMA
- --namespace <namespace>#
List only monitors in this namespace
namespaces#
List all namespaces.
montecarlo monitors namespaces
[OPTIONS]
Options
- --limit <limit>#
Max number of namespaces to list.
- Default:
100
platform#
Manage platform settings.
montecarlo platform [OPTIONS] COMMAND [ARGS]...
list#
Lists all services in the account.
montecarlo platform list [OPTIONS]
test-migration#
Tests if all connections can be migrated to the Monte Carlo Platform.
montecarlo platform test-migration
[OPTIONS]
Options
- --service-id <service_id>#
ID for the service as listed by ‘platform list’ command. To disambiguate accounts with multiple services.
secrets#
Manage account secrets.
montecarlo secrets [OPTIONS] COMMAND [ARGS]...
create#
Add a new secret to the account.
montecarlo secrets create [OPTIONS]
Options
- --name <name>#
Required Name of the secret, to reference it when using the secret.
- --scope <scope>#
Scope where the secret can be used.
- Default:
global
- --value <value>#
Required Secret value
- --expires-at <expires_at>#
A date in the future when the secret should expire and no longer be readable.
- --description <description>#
Description for the secret.
delete#
Delete a secret from the account.
montecarlo secrets delete [OPTIONS]
Options
- --name <name>#
Required Name of the secret.
get#
Get a secret properties, optionally requesting the secret value.
montecarlo secrets get [OPTIONS]
Options
- --name <name>#
Required Name of the secret.
- --reveal#
Show the secret value. Only the owner of the secret or user with special permissions can get the secret value.
- Default:
False
list#
List all the secrets in the account.
montecarlo secrets list [OPTIONS]
validate#
Validate that the CLI can Connect to Monte Carlo.
montecarlo validate [OPTIONS]