From bdd4f4534fe514353c8a731e045f5e5e9a2b6051 Mon Sep 17 00:00:00 2001 From: Matthew Runyon Date: Thu, 20 Feb 2025 16:53:08 -0600 Subject: [PATCH 1/6] AWS test action --- action.yml | 44 ++++++++++++++------------------------------ 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/action.yml b/action.yml index 7137244..ba6cc99 100644 --- a/action.yml +++ b/action.yml @@ -1,5 +1,5 @@ name: Sync Salmon Directory -description: Syncs a directory to a Google Cloud bucket using rclone. +description: Syncs a directory to a bucket using rclone. author: "deephaven" inputs: source: @@ -13,19 +13,15 @@ inputs: bucket: required: true type: string - description: "The Google Cloud bucket to sync to." - credentials: + description: "The bucket to sync to." + aws-role: required: true type: string - description: "The Google Cloud credentials. Should be base64 encoded." - cache-bust-token: - required: true - type: string - description: "The cache-bust token" - docs-url: - required: true - type: string - description: "The doc site URL" + description: "The AWS role to assume." + temporary: + required: false + type: boolean + description: "If true, the files will be marked as temporary and deleted after 14 days." runs: using: "composite" @@ -35,28 +31,16 @@ runs: with: version: v1.68.1 - - name: Decode credentials - shell: bash - run: | - echo $RCLONE_GCS_SERVICE_ACCOUNT_CREDENTIALS_ENCODED | base64 --decode > $HOME/credentials.json - env: - RCLONE_GCS_SERVICE_ACCOUNT_CREDENTIALS_ENCODED: ${{ inputs.credentials }} + - name: AWS OIDC Auth + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-east-2 # Docs are hosted in us-east-2 + role-to-assume: ${{ inputs.aws-role }} - name: Sync source to destination shell: bash env: - RCLONE_CONFIG_GCS_TYPE: "google cloud storage" RCLONE_GCS_SERVICE_ACCOUNT_FILE: $HOME/credentials.json RCLONE_GCS_BUCKET_POLICY_ONLY: "true" - run: rclone sync ${{ inputs.source }} gcs:${{ inputs.bucket }}/${{ inputs.destination }} - - - name: Bust cache - shell: bash - env: - CACHE_BUST_TOKEN: ${{ inputs.cache-bust-token }} run: | - curl --fail-with-body --show-error --silent \ - --request POST \ - --header "authorization: Bearer $CACHE_BUST_TOKEN" \ - --data "{ \"tags\": [ \"${{ inputs.destination }}\" ]}" \ - --url ${{ inputs.docs-url }}/api/cache-bust/ + rclone sync --fast-list --checksum --s3-env-auth --s3-no-check-bucket ${{ inputs.source }} s3:${{ inputs.bucket }}/${{ inputs.destination }} --header-upload "x-amz-tagging: temporary=true" From f8737e833f23a3be56f772e3b4c05064d362ba31 Mon Sep 17 00:00:00 2001 From: Matthew Runyon Date: Thu, 20 Feb 2025 17:24:27 -0600 Subject: [PATCH 2/6] Add missing env variables --- action.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/action.yml b/action.yml index ba6cc99..80b228a 100644 --- a/action.yml +++ b/action.yml @@ -40,7 +40,9 @@ runs: - name: Sync source to destination shell: bash env: - RCLONE_GCS_SERVICE_ACCOUNT_FILE: $HOME/credentials.json - RCLONE_GCS_BUCKET_POLICY_ONLY: "true" + RCLONE_S3_PROVIDER: AWS + RCLONE_S3_REGION: us-east-2 + RCLONE_S3_ENV_AUTH: true + RCLONE_S3_NO_CHECK_BUCKET: true # Don't try to create the bucket and fail if it doesn't exist run: | - rclone sync --fast-list --checksum --s3-env-auth --s3-no-check-bucket ${{ inputs.source }} s3:${{ inputs.bucket }}/${{ inputs.destination }} --header-upload "x-amz-tagging: temporary=true" + rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.bucket }}/${{ inputs.destination }} ${{ inputs.temporary && '--header-upload "x-amz-tagging: temporary=true"' }} From 4ddc1e8b835052de4f7abe7023b7404979b5e241 Mon Sep 17 00:00:00 2001 From: Matthew Runyon Date: Thu, 20 Feb 2025 17:44:37 -0600 Subject: [PATCH 3/6] Simplify inputs for DH use --- README.md | 31 +++++++++++++------------------ action.yml | 16 ++++++++-------- 2 files changed, 21 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index 552bd2f..675a8b9 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # salmon-sync -This is a Github Action that syncs a folder to a Google Cloud bucket using `rclone` and then send an authenticated request to the doc site to invalidate the cache for the doc version. -This action is only meant to work for Deephaven's documentation. It could be used in a more general purpose way to sync a folder into any Google cloud bucket (with the proper credentials), but that is subject to change and may break in any version. +This is a Github Action that syncs a folder to a S3 bucket using `rclone`. +This action is only meant to work for Deephaven's documentation. ## Parameters @@ -15,22 +15,18 @@ inputs: required: true type: string description: "The destination directory to sync. Relative to the bucket. It is recommended to use the GitHub repo path (such as deephaven/salmon-sync) as the minimum base to prevent collisions." - bucket: + production: required: true - type: string - description: "The Google Cloud bucket to sync to." - credentials: - required: true - type: string - description: "The Google Cloud credentials. Should be base64 encoded." - cache-bust-token: + type: boolean + description: "If true, the files will be deployed to the production site. Otherwise they will be deployed to the preview site." + temporary: required: true - type: string - description: "The cache-bust token" - docs-url: + type: boolean + description: "If true, the files will be marked as temporary and deleted after 14 days. Otherwise they will persist in S3 indefinitely." + aws-role: required: true type: string - description: "The doc site URL" + description: "The AWS role to assume." ``` ## Example @@ -44,8 +40,7 @@ Here is an example that syncs from the local path `temp/blog` to the blog sectio with: source: temp/blog destination: deephaven/deephaven.io/blog - bucket: ${{ vars.DOCS_PROD_BUCKET }} # or ${{ vars.DOCS_PREVIEW_BUCKET }} - credentials: ${{ secrets.DOCS_GOOGLE_CLOUD_CREDENTIALS }} - cache-bust-token: ${{ secrets.DOCS_CACHE_BUST_TOKEN }} - docs-url: ${{ vars.DOCS_PROD_URL }} # or ${{ vars.DOCS_PREVIEW_URL }} + production: true # false for pr previews + temporary: false # true will delete non-production files after 14 days + aws-role: ${{ vars.DOCS_AWS_ROLE }} ``` diff --git a/action.yml b/action.yml index 80b228a..bef81ad 100644 --- a/action.yml +++ b/action.yml @@ -10,18 +10,18 @@ inputs: required: true type: string description: "The destination directory to sync. Relative to the bucket. It is recommended to use the GitHub repo path (such as deephaven/salmon-sync) as the minimum base to prevent collisions." - bucket: + production: required: true - type: string - description: "The bucket to sync to." + type: boolean + description: "If true, the files will be deployed to the production site. Otherwise they will be deployed to the preview site." + temporary: + required: true + type: boolean + description: "If true, the files will be marked as temporary and deleted after 14 days. Otherwise they will persist in S3 indefinitely." aws-role: required: true type: string description: "The AWS role to assume." - temporary: - required: false - type: boolean - description: "If true, the files will be marked as temporary and deleted after 14 days." runs: using: "composite" @@ -45,4 +45,4 @@ runs: RCLONE_S3_ENV_AUTH: true RCLONE_S3_NO_CHECK_BUCKET: true # Don't try to create the bucket and fail if it doesn't exist run: | - rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.bucket }}/${{ inputs.destination }} ${{ inputs.temporary && '--header-upload "x-amz-tagging: temporary=true"' }} + rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary && '--header-upload "x-amz-tagging: temporary=true"' }} From 3766df482a5f0875b569fd207103fb0ac74c7c4b Mon Sep 17 00:00:00 2001 From: Matthew Runyon Date: Thu, 20 Feb 2025 18:05:33 -0600 Subject: [PATCH 4/6] Fix for some omitted inputs --- README.md | 14 ++++++++------ action.yml | 17 ++++++++++------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 675a8b9..d8bcd09 100644 --- a/README.md +++ b/README.md @@ -15,18 +15,20 @@ inputs: required: true type: string description: "The destination directory to sync. Relative to the bucket. It is recommended to use the GitHub repo path (such as deephaven/salmon-sync) as the minimum base to prevent collisions." - production: + aws-role: required: true + type: string + description: "The AWS role to assume." + production: + required: false + default: "false" type: boolean description: "If true, the files will be deployed to the production site. Otherwise they will be deployed to the preview site." temporary: - required: true + required: false + default: "true" type: boolean description: "If true, the files will be marked as temporary and deleted after 14 days. Otherwise they will persist in S3 indefinitely." - aws-role: - required: true - type: string - description: "The AWS role to assume." ``` ## Example diff --git a/action.yml b/action.yml index bef81ad..e8246ed 100644 --- a/action.yml +++ b/action.yml @@ -10,18 +10,20 @@ inputs: required: true type: string description: "The destination directory to sync. Relative to the bucket. It is recommended to use the GitHub repo path (such as deephaven/salmon-sync) as the minimum base to prevent collisions." - production: + aws-role: required: true + type: string + description: "The AWS role to assume." + production: + required: false + default: "false" type: boolean description: "If true, the files will be deployed to the production site. Otherwise they will be deployed to the preview site." temporary: - required: true + required: false + default: "true" type: boolean description: "If true, the files will be marked as temporary and deleted after 14 days. Otherwise they will persist in S3 indefinitely." - aws-role: - required: true - type: string - description: "The AWS role to assume." runs: using: "composite" @@ -44,5 +46,6 @@ runs: RCLONE_S3_REGION: us-east-2 RCLONE_S3_ENV_AUTH: true RCLONE_S3_NO_CHECK_BUCKET: true # Don't try to create the bucket and fail if it doesn't exist + # Check temporary != 'false' so any other value is marked as temporary since there's no actual validation of boolean or required inputs run: | - rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary && '--header-upload "x-amz-tagging: temporary=true"' }} + rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production == 'true' && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary != 'false' && '--header-upload "x-amz-tagging: temporary=true"' }} From f7552a949486ce8ef0f75b8577eda41cad23a6bd Mon Sep 17 00:00:00 2001 From: Matthew Runyon Date: Thu, 20 Feb 2025 18:10:27 -0600 Subject: [PATCH 5/6] Add comment --- action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/action.yml b/action.yml index e8246ed..2d3d7b6 100644 --- a/action.yml +++ b/action.yml @@ -47,5 +47,6 @@ runs: RCLONE_S3_ENV_AUTH: true RCLONE_S3_NO_CHECK_BUCKET: true # Don't try to create the bucket and fail if it doesn't exist # Check temporary != 'false' so any other value is marked as temporary since there's no actual validation of boolean or required inputs + # The production bucket doesn't have a lifecycle rule, so omitting temporary just results in a tag on files. They won't actually be deleted run: | rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production == 'true' && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary != 'false' && '--header-upload "x-amz-tagging: temporary=true"' }} From 583e331f81ef42f962b4e867d2eec0818e2a87a3 Mon Sep 17 00:00:00 2001 From: Matthew Runyon Date: Thu, 20 Feb 2025 18:45:44 -0600 Subject: [PATCH 6/6] Fix for temporary false --- action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yml b/action.yml index 2d3d7b6..0ddac3e 100644 --- a/action.yml +++ b/action.yml @@ -49,4 +49,4 @@ runs: # Check temporary != 'false' so any other value is marked as temporary since there's no actual validation of boolean or required inputs # The production bucket doesn't have a lifecycle rule, so omitting temporary just results in a tag on files. They won't actually be deleted run: | - rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production == 'true' && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary != 'false' && '--header-upload "x-amz-tagging: temporary=true"' }} + rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production == 'true' && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary != 'false' && '--header-upload "x-amz-tagging: temporary=true"' || '' }}