diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..b0de50f --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,17 @@ +{ + "permissions": { + "allow": [ + "Bash(cargo build:*)", + "Bash(cargo check:*)", + "Bash(where clang:*)", + "Read(//c/Program Files/LLVM/bin/**)", + "Bash(echo \"LIBCLANG_PATH=$LIBCLANG_PATH\")", + "Read(//c/Program Files/**)", + "Bash(winget list:*)", + "Bash(cargo tree:*)", + "Bash(where llvm-config:*)", + "Read(//c/msys64/mingw64/bin/**)", + "Read(//c/msys64/clang64/bin/**)" + ] + } +} diff --git a/README.md b/README.md index a6b492c..c2c3fde 100644 --- a/README.md +++ b/README.md @@ -68,6 +68,8 @@ examples/databricks/serverless dev \ -e AWS_ACCOUNT_ID=${AWS_ACCOUNT_ID} \ -e DATABRICKS_ACCOUNT_ID=${DATABRICKS_ACCOUNT_ID} \ -e DATABRICKS_AWS_ACCOUNT_ID=${DATABRICKS_AWS_ACCOUNT_ID} \ +--show-queries \ +--log-level debug \ --dry-run pgrep -f "stackql srv" diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/ec2_permissions.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/cross_account_role/ec2_permissions.json similarity index 100% rename from examples/databricks/serverless/resources/aws/iam/policy_statements/ec2_permissions.json rename to examples/databricks/serverless/resources/aws/iam/policy_statements/cross_account_role/ec2_permissions.json diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/iam_service_linked_role.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/cross_account_role/iam_service_linked_role.json similarity index 100% rename from examples/databricks/serverless/resources/aws/iam/policy_statements/iam_service_linked_role.json rename to examples/databricks/serverless/resources/aws/iam/policy_statements/cross_account_role/iam_service_linked_role.json diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/kms_decrypt.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/kms_decrypt.json new file mode 100644 index 0000000..32ff0c2 --- /dev/null +++ b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/kms_decrypt.json @@ -0,0 +1,11 @@ +{ + "Effect": "Allow", + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:GenerateDataKey*" + ], + "Resource": [ + "arn:aws:kms:" + ] +} diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_list.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_list.json new file mode 100644 index 0000000..692a10c --- /dev/null +++ b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_list.json @@ -0,0 +1,13 @@ +{ + "Sid": "ManagedFileEventsListStatement", + "Effect": "Allow", + "Action": [ + "sqs:ListQueues", + "sqs:ListQueueTags", + "sns:ListTopics" + ], + "Resource": [ + "arn:aws:sqs:*:*:csms-*", + "arn:aws:sns:*:*:csms-*" + ] +} diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_setup.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_setup.json new file mode 100644 index 0000000..c4eabbd --- /dev/null +++ b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_setup.json @@ -0,0 +1,30 @@ +{ + "Sid": "ManagedFileEventsSetupStatement", + "Effect": "Allow", + "Action": [ + "s3:GetBucketNotification", + "s3:PutBucketNotification", + "sns:ListSubscriptionsByTopic", + "sns:GetTopicAttributes", + "sns:SetTopicAttributes", + "sns:CreateTopic", + "sns:TagResource", + "sns:Publish", + "sns:Subscribe", + "sqs:CreateQueue", + "sqs:DeleteMessage", + "sqs:ReceiveMessage", + "sqs:SendMessage", + "sqs:GetQueueUrl", + "sqs:GetQueueAttributes", + "sqs:SetQueueAttributes", + "sqs:TagQueue", + "sqs:ChangeMessageVisibility", + "sqs:PurgeQueue" + ], + "Resource": [ + "{{ aws_s3_metastore_bucket.arn }}", + "arn:aws:sqs:*:*:csms-*", + "arn:aws:sns:*:*:csms-*" + ] +} diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_teardown.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_teardown.json new file mode 100644 index 0000000..deea34f --- /dev/null +++ b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/managed_file_events_teardown.json @@ -0,0 +1,13 @@ +{ + "Sid": "ManagedFileEventsTeardownStatement", + "Effect": "Allow", + "Action": [ + "sns:Unsubscribe", + "sns:DeleteTopic", + "sqs:DeleteQueue" + ], + "Resource": [ + "arn:aws:sqs:*:*:csms-*", + "arn:aws:sns:*:*:csms-*" + ] +} diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/s3_access.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/s3_access.json new file mode 100644 index 0000000..2c80181 --- /dev/null +++ b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/s3_access.json @@ -0,0 +1,17 @@ +{ + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + "s3:ListBucket", + "s3:GetBucketLocation", + "s3:ListBucketMultipartUploads", + "s3:ListMultipartUploadParts", + "s3:AbortMultipartUpload" + ], + "Resource": [ + "{{ aws_s3_metastore_bucket.arn }}/*", + "{{ aws_s3_metastore_bucket.arn }}" + ] +} diff --git a/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/sts_assume_role.json b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/sts_assume_role.json new file mode 100644 index 0000000..5654207 --- /dev/null +++ b/examples/databricks/serverless/resources/aws/iam/policy_statements/metastore_access_role/sts_assume_role.json @@ -0,0 +1,9 @@ +{ + "Effect": "Allow", + "Action": [ + "sts:AssumeRole" + ], + "Resource": [ + "arn:aws:iam::{{ databricks_aws_account_id }}:role/{{ stack_name }}-{{ stack_env }}-metastore-role" + ] +} diff --git a/examples/databricks/serverless/resources/aws/s3/bucket_policies.iql b/examples/databricks/serverless/resources/aws/s3/bucket_policies.iql new file mode 100644 index 0000000..21dcc12 --- /dev/null +++ b/examples/databricks/serverless/resources/aws/s3/bucket_policies.iql @@ -0,0 +1,44 @@ +/*+ exists */ +SELECT count(*) as count +FROM awscc.s3.bucket_policies +WHERE region = '{{ region }}' AND +Identifier = '{{ bucket_name }}' +; + +/*+ create */ +INSERT INTO awscc.s3.bucket_policies ( + Bucket, + PolicyDocument, + region +) +SELECT + '{{ bucket_name }}', + '{{ policy_document }}', + '{{ region }}'; + +/*+ update */ +UPDATE awscc.s3.bucket_policies +SET PatchDocument = string('{{ { + "PolicyDocument": policy_document +} | generate_patch_document }}') +WHERE + region = '{{ region }}' AND + Identifier = '{{ bucket_name }}'; + +/*+ statecheck, retries=5, retry_delay=10 */ +SELECT +count(*) as count +FROM ( + SELECT + AWS_POLICY_EQUAL(policy_document, '{{ policy_document }}') as test_policy_document + FROM awscc.s3.bucket_policies + WHERE region = '{{ region }}' + AND Identifier = '{{ bucket_name }}' +)t +WHERE test_policy_document = 1; + +/*+ delete */ +DELETE FROM awscc.s3.bucket_policies +WHERE + Identifier = '{{ bucket_name }}' AND + region = '{{ region }}'; \ No newline at end of file diff --git a/examples/databricks/serverless/resources/aws/s3/buckets.iql b/examples/databricks/serverless/resources/aws/s3/buckets.iql index bd2a66f..b50a316 100644 --- a/examples/databricks/serverless/resources/aws/s3/buckets.iql +++ b/examples/databricks/serverless/resources/aws/s3/buckets.iql @@ -1,7 +1,7 @@ /*+ exists */ SELECT count(*) as count FROM awscc.s3.buckets -WHERE region = 'us-east-1' AND +WHERE region = '{{ region }}' AND Identifier = '{{ bucket_name }}' ; @@ -36,29 +36,30 @@ SET PatchDocument = string('{{ { WHERE region = '{{ region }}' AND Identifier = '{{ bucket_name }}'; -/*+ statecheck, retries=3, retry_delay=2 */ -SELECT count(*) as count -FROM awscc.s3.buckets -WHERE -region = 'us-east-1' AND -Identifier = '{{ bucket_name }}' AND -JSON_EQUAL(bucket_encryption, '{{ bucket_encryption }}') AND -JSON_EQUAL(ownership_controls, '{{ ownership_controls }}') AND -JSON_EQUAL(public_access_block_configuration, '{{ public_access_block_configuration }}') AND -JSON_EQUAL(tags, '{{ tags }}') AND -JSON_EQUAL(versioning_configuration, '{{ versioning_configuration }}') -; - - -/*+ exports */ -SELECT -bucket_name, -arn -FROM awscc.s3.buckets -WHERE region = 'us-east-1' AND -Identifier = '{{ bucket_name }}'; +/*+ exports, retries=3, retry_delay=5 */ +SELECT +arn, +bucket_name +FROM ( + SELECT + arn, + bucket_name, + JSON_EQUAL(ownership_controls, '{{ ownership_controls }}') as test_ownership_controls, + JSON_EQUAL(bucket_encryption, '{{ bucket_encryption }}') as test_bucket_encryption, + JSON_EQUAL(public_access_block_configuration, '{{ public_access_block_configuration }}') as test_public_access_block_configuration, + JSON_EQUAL(versioning_configuration, '{{ versioning_configuration }}') as test_versioning_configuration, + AWS_POLICY_EQUAL(tags, '{{ tags }}') as test_tags + FROM awscc.s3.buckets + WHERE region = '{{ region }}' + AND Identifier = '{{ bucket_name }}' +)t +WHERE test_ownership_controls = 1 +AND test_bucket_encryption = 1 +AND test_public_access_block_configuration = 1 +AND test_versioning_configuration = 1 +AND test_tags = 1; /*+ delete */ DELETE FROM awscc.s3.buckets WHERE Identifier = '{{ bucket_name }}' -AND region = 'us-east-1'; \ No newline at end of file +AND region = '{{ region }}'; \ No newline at end of file diff --git a/examples/databricks/serverless/resources/databricks_account/account_groups.iql b/examples/databricks/serverless/resources/databricks_account/account_groups.iql new file mode 100644 index 0000000..4c3f921 --- /dev/null +++ b/examples/databricks/serverless/resources/databricks_account/account_groups.iql @@ -0,0 +1,83 @@ +/*+ exists */ +SELECT count(*) as count +FROM databricks_account.iam.account_groups +WHERE account_id = '{{ account_id }}' +AND filter = 'displayName Eq "{{ displayName }}"' +; + +/*+ create */ +INSERT INTO databricks_account.iam.account_groups ( +display_name, +external_id, +id, +members, +meta, +roles, +account_id +) +SELECT +'{{ display_name }}', +'{{ external_id }}', +'{{ id }}', +'{{ members }}', +'{{ meta }}', +'{{ roles }}', +'{{ account_id }}' +RETURNING +id, +account_id, +displayName, +externalId, +members, +meta, +roles +; + +/*+ update */ +UPDATE databricks_account.iam.account_groups +SET +operations = '{{ operations }}', +schemas = '{{ schemas }}' +WHERE +account_id = '{{ account_id }}' --required +AND id = '{{ id }}' --required; + +/*+ statecheck, retries=5, retry_delay=10 */ +SELECT count(*) as count +FROM databricks_account.iam.account_groups +WHERE +id = '{{ id }}' AND +members = '{{ members }}' AND +meta = '{{ meta }}' AND +roles = '{{ roles }}' AND +account_id = '{{ account_id }}' -- required +AND attributes = '{{ attributes }}' +AND count = '{{ count }}' +AND excluded_attributes = '{{ excluded_attributes }}' +AND filter = '{{ filter }}' +AND sort_by = '{{ sort_by }}' +AND sort_order = '{{ sort_order }}' +AND start_index = '{{ start_index }}' +; + +/*+ exports */ +SELECT id, +members, +meta, +roles +FROM databricks_account.iam.account_groups +WHERE account_id = '{{ account_id }}' -- required +AND attributes = '{{ attributes }}' +AND count = '{{ count }}' +AND excluded_attributes = '{{ excluded_attributes }}' +AND filter = '{{ filter }}' +AND sort_by = '{{ sort_by }}' +AND sort_order = '{{ sort_order }}' +AND start_index = '{{ start_index }}' +; + +/*+ delete */ +DELETE FROM databricks_account.iam.account_groups +WHERE account_id = '{{ account_id }}' --required +AND id = '{{ id }}' --required +; \ No newline at end of file diff --git a/examples/databricks/serverless/resources/databricks_account/storage.iql b/examples/databricks/serverless/resources/databricks_account/storage.iql new file mode 100644 index 0000000..f32f66d --- /dev/null +++ b/examples/databricks/serverless/resources/databricks_account/storage.iql @@ -0,0 +1,30 @@ +/*+ exists */ +SELECT count(*) as count +FROM databricks_account.provisioning.storage +WHERE account_id = '{{ account_id }}' +AND storage_configuration_name = '{{ storage_configuration_name }}' +; + +/*+ create */ +INSERT INTO databricks_account.provisioning.storage ( +storage_configuration_name, +root_bucket_info, +account_id +) +SELECT +'{{ storage_configuration_name }}', +'{{ root_bucket_info }}', +'{{ account_id }}'; + +/*+ exports, retries=5, retry_delay=10 */ +SELECT storage_configuration_id +FROM databricks_account.provisioning.storage +WHERE account_id = '{{ account_id }}' +AND storage_configuration_name = '{{ storage_configuration_name }}' +AND JSON_EXTRACT(root_bucket_info, '$.bucket_name') = '{{ root_bucket_info | from_json | get(key="bucket_name") }}'; + +/*+ delete */ +DELETE FROM databricks_account.provisioning.storage +WHERE account_id = '{{ account_id }}' +AND storage_configuration_id = '{{ storage_configuration_id }}' +; \ No newline at end of file diff --git a/examples/databricks/serverless/resources/databricks_account/workspaces.iql b/examples/databricks/serverless/resources/databricks_account/workspaces.iql new file mode 100644 index 0000000..db82e1a --- /dev/null +++ b/examples/databricks/serverless/resources/databricks_account/workspaces.iql @@ -0,0 +1,72 @@ +/*+ exists */ +SELECT count(*) as count +FROM databricks_account.provisioning.workspaces +WHERE account_id = '{{ account_id }}' +AND workspace_name = '{{ workspace_name }}' +; + +/*+ create */ +INSERT INTO databricks_account.provisioning.workspaces ( +aws_region, +credentials_id, +pricing_tier, +storage_configuration_id, +workspace_name, +account_id +) +SELECT +'{{ aws_region }}', +'{{ credentials_id }}', +'{{ pricing_tier }}', +'{{ storage_configuration_id }}', +'{{ workspace_name }}', +'{{ account_id }}' +; + +/*+ update */ +UPDATE databricks_account.provisioning.workspaces +SET customer_facing_workspace = '{{ { + "credentials_id": credentials_id, + "storage_configuration_id": storage_configuration_id, + "compute_mode": compute_mode, + "pricing_tier": pricing_tier +} | generate_patch_document }}' +WHERE account_id = '{{ account_id }}' +AND workspace_id IN ( + SELECT workspace_id + FROM databricks_account.provisioning.workspaces + WHERE account_id = '{{ account_id }}' + AND workspace_name = '{{ workspace_name }}' +) +AND update_mask = 'credentials_id,storage_configuration_id,compute_mode,pricing_tier' +; + +/*+ statecheck, retries=5, retry_delay=10 */ +SELECT count(*) as count +FROM databricks_account.provisioning.workspaces +WHERE +credentials_id = '{{ credentials_id }}' AND +storage_configuration_id = '{{ storage_configuration_id }}' AND +workspace_name = '{{ workspace_name }}' AND +aws_region = '{{ aws_region }}' AND +pricing_tier = '{{ pricing_tier }}' AND +account_id = '{{ account_id }}' +; + +/*+ exports */ +SELECT +workspace_name, +workspace_id, +deployment_name, +workspace_status, +'https://' || deployment_name || '.cloud.databricks.com' AS workspace_url +FROM databricks_account.provisioning.workspaces +WHERE account_id = '{{ account_id }}' +AND workspace_name = '{{ workspace_name }}' +; + +/*+ delete */ +DELETE FROM databricks_account.provisioning.workspaces +WHERE account_id = '{{ account_id }}' +AND workspace_id = '{{ workspace_id }}' +; \ No newline at end of file diff --git a/examples/databricks/serverless/stackql_manifest.yml b/examples/databricks/serverless/stackql_manifest.yml index fc8f466..9f15ce2 100644 --- a/examples/databricks/serverless/stackql_manifest.yml +++ b/examples/databricks/serverless/stackql_manifest.yml @@ -57,8 +57,8 @@ resources: value: - PolicyDocument: Statement: - - file(aws/iam/policy_statements/ec2_permissions.json) - - file(aws/iam/policy_statements/iam_service_linked_role.json) + - file(aws/iam/policy_statements/cross_account_role/ec2_permissions.json) + - file(aws/iam/policy_statements/cross_account_role/iam_service_linked_role.json) Version: '2012-10-17' PolicyName: "{{ stack_name }}-{{ stack_env }}-policy" - name: managed_policy_arns @@ -127,211 +127,163 @@ resources: - bucket_name - arn - # - name: aws/s3/workspace_bucket_policy - # file: aws/s3/s3_bucket_policy.iql - # props: - # - name: policy_document - # value: - # Version: "2012-10-17" - # Statement: - # - Sid: Grant Databricks Access - # Effect: Allow - # Principal: - # AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root" - # Action: - # - "s3:GetObject" - # - "s3:GetObjectVersion" - # - "s3:PutObject" - # - "s3:DeleteObject" - # - "s3:ListBucket" - # - "s3:GetBucketLocation" - # Resource: - # - "{{ aws_s3_workspace_bucket_arn }}/*" - # - "{{ aws_s3_workspace_bucket_arn }}" + - name: aws_s3_workspace_bucket_policy + file: aws/s3/bucket_policies.iql + props: + - name: bucket_name + value: "{{ aws_s3_workspace_bucket.bucket_name }}" + - name: policy_document + value: + Version: "2012-10-17" + Statement: + - Sid: Grant Databricks Access + Effect: Allow + Principal: + AWS: "arn:aws:iam::{{ databricks_aws_account_id }}:root" + Action: + - "s3:GetObject" + - "s3:GetObjectVersion" + - "s3:PutObject" + - "s3:DeleteObject" + - "s3:ListBucket" + - "s3:GetBucketLocation" + Resource: + - "{{ aws_s3_workspace_bucket.arn }}/*" + - "{{ aws_s3_workspace_bucket.arn }}" - # - name: databricks_account/storage_configuration - # props: - # - name: storage_configuration_name - # value: "{{ stack_name }}-{{ stack_env }}-storage" - # - name: root_bucket_info - # value: - # bucket_name: "{{ aws_s3_workspace_bucket_name }}" - # exports: - # - databricks_storage_configuration_id + - name: databricks_storage_configuration + file: databricks_account/storage.iql + props: + - name: account_id + value: "{{ databricks_account_id }}" + - name: storage_configuration_name + value: "{{ stack_name }}-{{ stack_env }}-storage" + - name: root_bucket_info + value: + bucket_name: "{{ aws_s3_workspace_bucket.bucket_name }}" + exports: + - storage_configuration_id # ==================================================================================== # UC Storage Credential and Metastore Catalog Bucket # ==================================================================================== - # - name: aws/s3/metastore_bucket - # file: aws/s3/s3_bucket.iql - # props: - # - name: bucket_name - # value: "{{ stack_name }}-{{ stack_env }}-metastore" - # - name: ownership_controls - # value: - # Rules: - # - ObjectOwnership: "BucketOwnerPreferred" - # - name: bucket_encryption - # value: - # ServerSideEncryptionConfiguration: - # - BucketKeyEnabled: true - # ServerSideEncryptionByDefault: - # SSEAlgorithm: "AES256" - # - name: public_access_block_configuration - # value: - # BlockPublicAcls: true - # IgnorePublicAcls: true - # BlockPublicPolicy: true - # RestrictPublicBuckets: true - # - name: versioning_configuration - # value: - # Status: "Suspended" - # exports: - # - arn: aws_s3_metastore_bucket_arn - # - bucket_name: aws_s3_metastore_bucket_name - - # - name: aws/iam/metastore_access_role - # file: aws/iam/iam_role.iql - # props: - # - name: role_name - # value: "{{ stack_name }}-{{ stack_env }}-metastore-role" - # - name: assume_role_policy_document - # value: - # Version: "2012-10-17" - # Statement: - # - Effect: "Allow" - # Principal: - # AWS: - # - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL" - # Action: "sts:AssumeRole" - # Condition: - # StringEquals: - # sts:ExternalId: "0000" # Placeholder - # - name: description - # value: 'Unity Catalog metastore access role for ({{ stack_name }}-{{ stack_env }})' - # - name: path - # value: '/' - # - name: policies - # value: - # - PolicyName: "MetastoreS3Policy" - # PolicyDocument: - # Version: "2012-10-17" - # Statement: - # - Effect: "Allow" - # Action: - # - "s3:GetObject" - # - "s3:PutObject" - # - "s3:DeleteObject" - # - "s3:ListBucket" - # - "s3:GetBucketLocation" - # - "s3:ListBucketMultipartUploads" - # - "s3:ListMultipartUploadParts" - # - "s3:AbortMultipartUpload" - # Resource: - # - "{{ aws_s3_metastore_bucket_arn }}/*" - # - "{{ aws_s3_metastore_bucket_arn }}" - - # # - Effect: "Allow" - # # Action: - # # - "kms:Decrypt" - # # - "kms:Encrypt" - # # - "kms:GenerateDataKey*" - # # Resource: - # # - "arn:aws:kms:" - - # - Effect: "Allow" - # Action: - # - "sts:AssumeRole" - # Resource: - # - "arn:aws:iam::{{ databricks_aws_account_id }}:role/{{ stack_name }}-{{ stack_env }}-metastore-role" - - # - Sid: "ManagedFileEventsSetupStatement" - # Effect: "Allow" - # Action: - # - "s3:GetBucketNotification" - # - "s3:PutBucketNotification" - # - "sns:ListSubscriptionsByTopic" - # - "sns:GetTopicAttributes" - # - "sns:SetTopicAttributes" - # - "sns:CreateTopic" - # - "sns:TagResource" - # - "sns:Publish" - # - "sns:Subscribe" - # - "sqs:CreateQueue" - # - "sqs:DeleteMessage" - # - "sqs:ReceiveMessage" - # - "sqs:SendMessage" - # - "sqs:GetQueueUrl" - # - "sqs:GetQueueAttributes" - # - "sqs:SetQueueAttributes" - # - "sqs:TagQueue" - # - "sqs:ChangeMessageVisibility" - # - "sqs:PurgeQueue" - # Resource: - # - "{{ aws_s3_metastore_bucket_arn }}" - # - "arn:aws:sqs:*:*:csms-*" - # - "arn:aws:sns:*:*:csms-*" - - # - Sid: "ManagedFileEventsListStatement" - # Effect: "Allow" - # Action: - # - "sqs:ListQueues" - # - "sqs:ListQueueTags" - # - "sns:ListTopics" - # Resource: - # - "arn:aws:sqs:*:*:csms-*" - # - "arn:aws:sns:*:*:csms-*" + - name: aws_s3_metastore_bucket + file: aws/s3/buckets.iql + props: + - name: bucket_name + value: "{{ stack_name }}-{{ stack_env }}-metastore" + - name: ownership_controls + value: + Rules: + - ObjectOwnership: "BucketOwnerPreferred" + - name: bucket_encryption + value: + ServerSideEncryptionConfiguration: + - BucketKeyEnabled: true + ServerSideEncryptionByDefault: + SSEAlgorithm: "AES256" + - name: public_access_block_configuration + value: + BlockPublicAcls: true + IgnorePublicAcls: true + BlockPublicPolicy: true + RestrictPublicBuckets: true + - name: versioning_configuration + value: + Status: "Suspended" + - name: tags + value: [] + merge: + - global_tags + exports: + - arn + - bucket_name - # - Sid: "ManagedFileEventsTeardownStatement" - # Effect: "Allow" - # Action: - # - "sns:Unsubscribe" - # - "sns:DeleteTopic" - # - "sqs:DeleteQueue" - # Resource: - # - "arn:aws:sqs:*:*:csms-*" - # - "arn:aws:sns:*:*:csms-*" - # - name: tags - # value: - # - Key: Purpose - # Value: "Unity Catalog Storage Credential" - # merge: - # - global_tags - # skip_validation: true - # exports: - # - aws_iam_role_arn: metastore_access_role_arn + - name: aws_metastore_access_role + file: aws/iam/roles.iql + props: + - name: role_name + value: "{{ stack_name }}-{{ stack_env }}-metastore-role" + - name: assume_role_policy_document + value: + Version: "2012-10-17" + Statement: + - Effect: "Allow" + Principal: + AWS: + - "arn:aws:iam::414351767826:role/unity-catalog-prod-UCMasterRole-14S5ZJVKOTYTL" + Action: "sts:AssumeRole" + Condition: + StringEquals: + sts:ExternalId: "0000" # Placeholder + - name: description + value: 'Unity Catalog metastore access role for ({{ stack_name }}-{{ stack_env }})' + - name: path + value: '/' + - name: policies + value: + - PolicyName: "MetastoreS3Policy" + PolicyDocument: + Version: "2012-10-17" + Statement: + - file(aws/iam/policy_statements/metastore_access_role/s3_access.json) + # - file(aws/iam/policy_statements/metastore_access_role/kms_decrypt.json) + - file(aws/iam/policy_statements/metastore_access_role/sts_assume_role.json) + - file(aws/iam/policy_statements/metastore_access_role/managed_file_events_setup.json) + - file(aws/iam/policy_statements/metastore_access_role/managed_file_events_list.json) + - file(aws/iam/policy_statements/metastore_access_role/managed_file_events_teardown.json) + - name: managed_policy_arns + value: [] + - name: max_session_duration + value: 3600 + - name: permissions_boundary + value: "" + - name: tags + value: + - Key: Purpose + Value: "Unity Catalog Storage Credential" + merge: + - global_tags + skip_validation: true + exports: + - role_name + - arn # ==================================================================================== # DBX Workspace # ==================================================================================== - # - name: databricks_account/workspace - # props: - # - name: workspace_name - # value: "{{ stack_name }}-{{ stack_env }}-workspace" - # - name: aws_region - # value: "{{ region }}" - # - name: credentials_id - # value: "{{ databricks_credentials_id }}" - # - name: storage_configuration_id - # value: "{{ databricks_storage_configuration_id }}" - # - name: pricing_tier - # value: PREMIUM - # exports: - # - databricks_workspace_name - # - databricks_workspace_id - # - databricks_deployment_name - # - databricks_workspace_status - # - databricks_workspace_url + - name: databricks_workspace + file: databricks_account/workspaces.iql + props: + - name: account_id + value: "{{ databricks_account_id }}" + - name: aws_region + value: "{{ region }}" + - name: pricing_tier + value: PREMIUM + - name: credentials_id + value: "{{ databricks_account_credentials.credentials_id }}" + - name: storage_configuration_id + value: "{{ databricks_storage_configuration.storage_configuration_id }}" + - name: workspace_name + value: "{{ stack_name }}-{{ stack_env }}-workspace" + exports: + - workspace_name + - workspace_id + - deployment_name + - workspace_status + - workspace_url - # - name: databricks_account/workspace_group - # props: - # - name: display_name - # value: "{{ stack_name }}-{{ stack_env }}-workspace-admins" - # exports: - # - databricks_group_id - # - databricks_group_name + - name: workspace_admins_group + file: databricks_account/account_groups.iql + props: + - name: display_name + value: "{{ stack_name }}-{{ stack_env }}-workspace-admins" + exports: + - id + - display_name # - name: databricks_account/get_users # type: query diff --git a/src/commands/info.rs b/src/commands/info.rs index 32d0a9d..c40c33a 100644 --- a/src/commands/info.rs +++ b/src/commands/info.rs @@ -95,17 +95,17 @@ pub fn execute() { } } - // Display contributors if available - let raw_contributors = option_env!("CONTRIBUTORS").unwrap_or(""); + // Display contributors from embedded contributors.csv + let raw_contributors = include_str!("../../contributors.csv"); let contributors: Vec<&str> = raw_contributors - .split(',') + .lines() .filter(|s| !s.trim().is_empty()) .collect(); if !contributors.is_empty() { - println!("\n{}", "Special thanks to:".green().bold()); + println!("\n{}", "Special Thanks to our Contributors".green().bold()); - for chunk in contributors.chunks(5) { + for chunk in contributors.chunks(4) { println!(" {}", chunk.join(", ")); } } diff --git a/src/main.rs b/src/main.rs index ddaee17..c3d2cf0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,7 +57,7 @@ fn main() { Arg::new("server") .long("server") .alias("host") - .short('h') + .short('H') .help("StackQL server host to connect to") .global(true) .default_value(DEFAULT_SERVER_HOST)