diff --git a/README.md b/README.md index 552bd2f..d8bcd09 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # salmon-sync -This is a Github Action that syncs a folder to a Google Cloud bucket using `rclone` and then send an authenticated request to the doc site to invalidate the cache for the doc version. -This action is only meant to work for Deephaven's documentation. It could be used in a more general purpose way to sync a folder into any Google cloud bucket (with the proper credentials), but that is subject to change and may break in any version. +This is a Github Action that syncs a folder to a S3 bucket using `rclone`. +This action is only meant to work for Deephaven's documentation. ## Parameters @@ -15,22 +15,20 @@ inputs: required: true type: string description: "The destination directory to sync. Relative to the bucket. It is recommended to use the GitHub repo path (such as deephaven/salmon-sync) as the minimum base to prevent collisions." - bucket: + aws-role: required: true type: string - description: "The Google Cloud bucket to sync to." - credentials: - required: true - type: string - description: "The Google Cloud credentials. Should be base64 encoded." - cache-bust-token: - required: true - type: string - description: "The cache-bust token" - docs-url: - required: true - type: string - description: "The doc site URL" + description: "The AWS role to assume." + production: + required: false + default: "false" + type: boolean + description: "If true, the files will be deployed to the production site. Otherwise they will be deployed to the preview site." + temporary: + required: false + default: "true" + type: boolean + description: "If true, the files will be marked as temporary and deleted after 14 days. Otherwise they will persist in S3 indefinitely." ``` ## Example @@ -44,8 +42,7 @@ Here is an example that syncs from the local path `temp/blog` to the blog sectio with: source: temp/blog destination: deephaven/deephaven.io/blog - bucket: ${{ vars.DOCS_PROD_BUCKET }} # or ${{ vars.DOCS_PREVIEW_BUCKET }} - credentials: ${{ secrets.DOCS_GOOGLE_CLOUD_CREDENTIALS }} - cache-bust-token: ${{ secrets.DOCS_CACHE_BUST_TOKEN }} - docs-url: ${{ vars.DOCS_PROD_URL }} # or ${{ vars.DOCS_PREVIEW_URL }} + production: true # false for pr previews + temporary: false # true will delete non-production files after 14 days + aws-role: ${{ vars.DOCS_AWS_ROLE }} ``` diff --git a/action.yml b/action.yml index 7137244..0ddac3e 100644 --- a/action.yml +++ b/action.yml @@ -1,5 +1,5 @@ name: Sync Salmon Directory -description: Syncs a directory to a Google Cloud bucket using rclone. +description: Syncs a directory to a bucket using rclone. author: "deephaven" inputs: source: @@ -10,22 +10,20 @@ inputs: required: true type: string description: "The destination directory to sync. Relative to the bucket. It is recommended to use the GitHub repo path (such as deephaven/salmon-sync) as the minimum base to prevent collisions." - bucket: + aws-role: required: true type: string - description: "The Google Cloud bucket to sync to." - credentials: - required: true - type: string - description: "The Google Cloud credentials. Should be base64 encoded." - cache-bust-token: - required: true - type: string - description: "The cache-bust token" - docs-url: - required: true - type: string - description: "The doc site URL" + description: "The AWS role to assume." + production: + required: false + default: "false" + type: boolean + description: "If true, the files will be deployed to the production site. Otherwise they will be deployed to the preview site." + temporary: + required: false + default: "true" + type: boolean + description: "If true, the files will be marked as temporary and deleted after 14 days. Otherwise they will persist in S3 indefinitely." runs: using: "composite" @@ -35,28 +33,20 @@ runs: with: version: v1.68.1 - - name: Decode credentials - shell: bash - run: | - echo $RCLONE_GCS_SERVICE_ACCOUNT_CREDENTIALS_ENCODED | base64 --decode > $HOME/credentials.json - env: - RCLONE_GCS_SERVICE_ACCOUNT_CREDENTIALS_ENCODED: ${{ inputs.credentials }} + - name: AWS OIDC Auth + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-east-2 # Docs are hosted in us-east-2 + role-to-assume: ${{ inputs.aws-role }} - name: Sync source to destination shell: bash env: - RCLONE_CONFIG_GCS_TYPE: "google cloud storage" - RCLONE_GCS_SERVICE_ACCOUNT_FILE: $HOME/credentials.json - RCLONE_GCS_BUCKET_POLICY_ONLY: "true" - run: rclone sync ${{ inputs.source }} gcs:${{ inputs.bucket }}/${{ inputs.destination }} - - - name: Bust cache - shell: bash - env: - CACHE_BUST_TOKEN: ${{ inputs.cache-bust-token }} + RCLONE_S3_PROVIDER: AWS + RCLONE_S3_REGION: us-east-2 + RCLONE_S3_ENV_AUTH: true + RCLONE_S3_NO_CHECK_BUCKET: true # Don't try to create the bucket and fail if it doesn't exist + # Check temporary != 'false' so any other value is marked as temporary since there's no actual validation of boolean or required inputs + # The production bucket doesn't have a lifecycle rule, so omitting temporary just results in a tag on files. They won't actually be deleted run: | - curl --fail-with-body --show-error --silent \ - --request POST \ - --header "authorization: Bearer $CACHE_BUST_TOKEN" \ - --data "{ \"tags\": [ \"${{ inputs.destination }}\" ]}" \ - --url ${{ inputs.docs-url }}/api/cache-bust/ + rclone sync --fast-list --checksum ${{ inputs.source }} :s3:${{ inputs.production == 'true' && 'deephaven-docs' || 'deephaven-docs-preview' }}/${{ inputs.destination }} ${{ inputs.temporary != 'false' && '--header-upload "x-amz-tagging: temporary=true"' || '' }}