diff --git a/README.md b/README.md index b99f95a..939eb6e 100644 --- a/README.md +++ b/README.md @@ -42,16 +42,17 @@ jobs: openai_api_key: ${{ secrets.OPENAI_API_KEY }} ``` -| Input | Description | Required | Default | -| ----------------- | ----------------------------------------------------- | -------- | -------------------------- | -| `github_token` | The GitHub token to use for the Action | Yes | | -| `openai_api_key` | The [OpenAI API key] to use, keep it hidden | Yes | | -| `pull_request_id` | The ID of the pull request to use | No | Extracted from metadata | -| `openai_model` | The [OpenAI model] to use | No | `gpt-3.5-turbo` | -| `max_tokens` | The maximum number of **prompt tokens** to use | No | `1000` | -| `temperature` | Higher values will make the model more creative (0-2) | No | `0.6` | -| `sample_prompt` | The prompt to use for giving context to the model | No | See `SAMPLE_PROMPT` | -| `sample_response` | A sample response for giving context to the model | No | See `GOOD_SAMPLE_RESPONSE` | +| Input | Description | Required | Default | +| ------------------- | -------------------------------------------------------------- | -------- | -------------------------- | +| `github_token` | The GitHub token to use for the Action | Yes | | +| `openai_api_key` | The [OpenAI API key] to use, keep it hidden | Yes | | +| `pull_request_id` | The ID of the pull request to use | No | Extracted from metadata | +| `openai_model` | The [OpenAI model] to use | No | `gpt-3.5-turbo` | +| `max_tokens` | The maximum number of **prompt tokens** to use | No | `1000` | +| `temperature` | Higher values will make the model more creative (0-2) | No | `0.6` | +| `sample_prompt` | The prompt to use for giving context to the model | No | See `SAMPLE_PROMPT` | +| `sample_response` | A sample response for giving context to the model | No | See `GOOD_SAMPLE_RESPONSE` | +| `completion_prompt` | The prompt to use for the model to generate the PR description | No | See `COMPLETION_PROMPT` | [OpenAI API key]: https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key diff --git a/action.yml b/action.yml index e1e9a97..a1e54d8 100644 --- a/action.yml +++ b/action.yml @@ -35,6 +35,10 @@ inputs: description: 'A sample of an ideal response based on the sample prompt' required: false default: '' + completion_prompt: + description: 'Prompt to use as the final prompt to the model, refer to COMPLETION_PROMPT in the Python file.' + required: false + default: '' runs: using: 'docker' diff --git a/autofill_description.py b/autofill_description.py index cee7a2c..265ac50 100755 --- a/autofill_description.py +++ b/autofill_description.py @@ -46,6 +46,11 @@ Overall, this change will improve the quality of the project by helping us detect and prevent memory errors. """ +COMPLETION_PROMPT = f""" +Write a pull request description focusing on the motivation behind the change and why it improves the project. +Go straight to the point. The following changes took place: \n +""" + def main(): parser = argparse.ArgumentParser( @@ -98,6 +103,9 @@ def main(): model_sample_response = os.environ.get( "INPUT_MODEL_SAMPLE_RESPONSE", GOOD_SAMPLE_RESPONSE ) + completion_prompt = os.environ.get( + "INPUT_COMPLETION_PROMPT", COMPLETION_PROMPT + ) authorization_header = { "Accept": "application/vnd.github.v3+json", "Authorization": "token %s" % github_token, @@ -153,12 +161,6 @@ def main(): pull_request_files.extend(pull_files_chunk) - completion_prompt = f""" -Write a pull request description focusing on the motivation behind the change and why it improves the project. -Go straight to the point. - -The title of the pull request is "{pull_request_title}" and the following changes took place: \n -""" for pull_request_file in pull_request_files: # Not all PR file metadata entries may contain a patch section # For example, entries related to removed binary files may not contain it @@ -185,6 +187,10 @@ def main(): }, {"role": "user", "content": model_sample_prompt}, {"role": "assistant", "content": model_sample_response}, + { + "role": "user", + "content": "Title of the pull request: " + pull_request_title, + }, {"role": "user", "content": completion_prompt}, ], temperature=model_temperature,