Skip to content

Commit 1dbfa80

Browse files
authored
Merge pull request #195 from ministryofjustice/DBA-569
Dba 569
2 parents 10eff18 + 703c61e commit 1dbfa80

30 files changed

+937
-148
lines changed

.github/workflows/oracle-db-release-update.yml

+31-26
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
name: "Oracle: Release Update"
2-
run-name: "Oracle: ${{ github.event.inputs.TargetEnvironment }}-release-update"
32
on:
43
workflow_dispatch:
54
inputs:
@@ -46,7 +45,7 @@ on:
4645
required: true
4746
type: choice
4847
options:
49-
- "/u01/software/19c/patches"
48+
- "/u02/stage"
5049
ComboPatch:
5150
description: "Combo Patch to Install (default = use value from environment configuration)"
5251
required: true
@@ -55,15 +54,16 @@ on:
5554
- "default"
5655
- "34773504:p34773504_190000_Linux-x86-64.zip:(19.18)"
5756
- "35370167:p35370167_190000_Linux-x86-64.zip:(19.20)"
57+
- "36031453:p36031453_190000_Linux-x86-64.zip:(19.22)"
5858
OPatch:
5959
description: "OPatch Utility to Use (default = use value from environment configuration)"
6060
required: true
6161
type: choice
6262
options:
6363
- "default"
64-
- "6880880:p6880880_190000_Linux-x86-64.12.2.0.1.29.zip:(12.2.0.1.29)"
65-
- "6880880:p6880880_190000_Linux-x86-64.12.2.0.1.32.zip:(12.2.0.1.32)"
6664
- "6880880:p6880880_190000_Linux-x86-64.12.2.0.1.36.zip:(12.2.0.1.36)"
65+
- "6880880:p6880880_190000_Linux-x86-64.12.2.0.1.36.zip:(12.2.0.1.39)"
66+
- "6880880:p6880880_190000_Linux-x86-64.12.2.0.1.41.zip:(12.2.0.1.41)"
6767
AWSSnapshot:
6868
description: "Number of Days to Keep AWS Snapshot of Primary Database Host"
6969
required: true
@@ -91,6 +91,8 @@ on:
9191
type: string
9292
default: "main"
9393

94+
run-name: "Oracle: ${{ format('{0}-release-update-{1}',github.event.inputs.TargetEnvironment,tojson(inputs)) }}"
95+
9496
env:
9597
ansible_config: operations/playbooks/ansible.cfg
9698
command: ansible-playbook operations/playbooks/oracle_release_update/playbook.yml
@@ -130,21 +132,18 @@ jobs:
130132
ref: ${{ github.event.inputs.SourceConfigVersion }}
131133
fetch-depth: 0
132134

133-
- name: Install yq
134-
uses: dcarbone/install-yq-action@v1.1.1
135+
- name: Checkout Role From modernisation-platform-configuration-management
136+
uses: actions/checkout@v4
135137
with:
136-
download-compressed: true
137-
version: "v4.35.1"
138-
force: true
139-
140-
- name: Count Standby Databases Configured In Ansible Inventory
141-
id: countstandbydbs
142-
working-directory: ${{ env.inventory }}
143-
run: |
144-
database_environment="environment_name_$(echo ${{ github.event.inputs.TargetEnvironment}} | sed 's/delius-core-dev/delius_core_development_dev/;s/delius-core-test/delius_core_test_test/;s/delius-core-training/delius_core_test_training/;s/delius-core-stage/delius_core_preproduction_stage/;s/delius-core-pre-prod/delius_core_preproduction_pre_prod/;s/delius-core-prod/delius_core_production_prod/')"
145-
database_type=$(echo ${{ github.event.inputs.TargetHost }} | cut -d_ -f1)
146-
high_availability_count=$(yq .high_availability_count.${database_type} group_vars/${database_environment}_all.yml)
147-
echo "high_availability_count=$high_availability_count"
138+
repository: ministryofjustice/modernisation-platform-configuration-management
139+
sparse-checkout-cone-mode: false
140+
sparse-checkout: |
141+
ansible/roles/secretsmanager-passwords
142+
ansible/roles/get-ec2-facts
143+
ansible/roles/get-modernisation-platform-facts
144+
path: roles
145+
ref: ${{ github.event.inputs.SourceConfigVersion }}
146+
fetch-depth: 0
148147

149148
- name: Checkout From hmpps-delius-operational-automation
150149
uses: actions/checkout@v4
@@ -153,6 +152,8 @@ jobs:
153152
sparse-checkout-cone-mode: false
154153
sparse-checkout: |
155154
playbooks/oracle_release_update
155+
playbooks/delius_oem_metrics_setup
156+
common/*
156157
ansible.cfg
157158
path: operations
158159
ref: ${{ github.event.inputs.SourceCodeVersion }}
@@ -176,13 +177,17 @@ jobs:
176177
- name: Run Release Update Playbook
177178
run: |
178179
export ANSIBLE_CONFIG=$ansible_config
180+
# Link the checked out configuration roles to somewhere Ansible will be able to find them
181+
ln -svf $PWD/roles/ansible/roles $PWD/operations/playbooks/oracle_release_update/roles
182+
# Link the OEM Metrics Setup Play to allow it to be used as a role
183+
ln -svf $PWD/operations/playbooks/delius_oem_metrics_setup/delius_oem_metrics_setup $PWD/roles/ansible/roles/delius_oem_metrics_setup
179184
$command -i $inventory \
180-
-e target_hosts=${{ steps.prepareinventorynames.outputs.hosts }}
181-
-e apply_mode=${{ github.event.inputs.ApplyMode }}
182-
-e oracle_patch_directory=${{ github.event.inputs.OraclePatchDirectory }}
183-
-e combo_patch_info=${{ github.event.inputs.ComboPatchInfo }}
184-
-e opatch_info=${{github.event.inputs.Opatch }}
185-
-e "keep_aws_snapshot='${{github.event.inputs.AWSSnapShot }}'"
186-
-e high_availability_count=${{ steps.countstandbydbs.outputs.high_availability_count }}
187-
-e gi_ru_patch_info=''
185+
-e target_hosts=${{ steps.prepareinventorynames.outputs.hosts }} \
186+
-e apply_mode=${{ github.event.inputs.ApplyMode }} \
187+
-e oracle_patch_directory=${{ github.event.inputs.OraclePatchDirectory }} \
188+
-e combo_patch_info='${{ github.event.inputs.ComboPatch }}' \
189+
-e opatch_info='${{github.event.inputs.Opatch }}' \
190+
-e "keep_aws_snapshot='${{github.event.inputs.AWSSnapShot }}'" \
191+
-e high_availability_count=${{ steps.countstandbydbs.outputs.high_availability_count }} \
192+
-e gi_ru_patch_info='' \
188193
-e ojvm_ru_patch_info=''
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
---
2+
- hosts: "{{ target_hosts }}"
3+
gather_facts: no
4+
become: yes
5+
become_user: oracle
6+
roles:
7+
- deinstall_oracle
8+
9+
# Run the following to Update OEM Targets *** only when inside of AWS ***
10+
# This playbook calls an SSM Automation to run the OEM Changes within the associated
11+
# Engineering Environment. We loop through each host in the environment
12+
# where the deinstallation completed successfully.
13+
- hosts: localhost
14+
gather_facts: no
15+
become: no
16+
tasks:
17+
- include_vars:
18+
file: release_update/vars/main.yml
19+
- name: Run SSM Automation to Update OEM Targets
20+
include: release_update/tasks/update_oem_targets.yml
21+
vars:
22+
update_host: "{{ item }}"
23+
grid_home: "{{ oem_gi_home | replace('DO_NOT_DEINSTALL','NONE') }}"
24+
database_home: "{{ oem_db_home | replace('DO_NOT_DEINSTALL','NONE') }}"
25+
document_name: "oracle-delete-home-oem"
26+
loop: "{{ target_hosts.split(',') }}"
27+
run_once: yes
28+
when:
29+
- is_aws_environment
30+
- ( groups[item][0] | default('UNDEFINED_TARGET') ) in deinstalled_targets
31+
32+
# Run the following to Update OEM Targets *** only when run on non-AWS (e.g. local VMs) ***
33+
# This playbook runs on the Primary OEM Host directly since AWS IAM / Engineering Accounts
34+
# are not relevant if not running in AWS
35+
- hosts: "{{ groups['oem_primarydb'][0] }}"
36+
gather_facts: no
37+
become: yes
38+
become_user: oracle
39+
vars:
40+
oracle_grid_new_oracle_home: "{{ hostvars.localhost.oem_gi_home }}"
41+
oracle_database_new_oracle_home: "{{ hostvars.localhost.oem_db_home }}"
42+
deletion_targets: "{{ hostvars.localhost.deinstalled_targets | default([]) }}"
43+
roles:
44+
- { role: update_oem_after_deinstall, when: not hostvars.localhost.is_aws_environment }
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
2+
#!/bin/bash
3+
#
4+
# Run a number of checks to confirm that the Oracle Home we are about to deinstall is not being used.
5+
#
6+
7+
DEINSTALL_HOME=$1
8+
9+
# Determine current active database and grid homes
10+
. ~oracle/.bash_profile
11+
DB_ORACLE_HOME=${ORACLE_HOME}
12+
export ORAENV_ASK=NO
13+
export DB_SID=${ORACLE_SID}
14+
export ORACLE_SID=+ASM
15+
. oraenv >/dev/null
16+
GI_ORACLE_HOME=${ORACLE_HOME}
17+
export ORACLE_SID=${DB_SID}
18+
. oraenv >/dev/null
19+
20+
if [[ $( ls -l /proc/*/exe 2>/dev/null | awk -F\-\> '{print $2}' | xargs dirname | grep -c "^${DEINSTALL_HOME}[/].*") -gt 0 ]];
21+
then
22+
echo "Active processes found using ${DEINSTALL_HOME}."
23+
exit 1
24+
fi
25+
26+
if [[ $(grep -c "[^#].*${DEINSTALL_HOME}:.*" /etc/oratab) -gt 0 ]];
27+
then
28+
echo "References to ${DEINSTALL_HOME} found in /etc/oratab."
29+
exit 1
30+
fi
31+
32+
if [[ $(grep "^ORA_CRS_HOME" /etc/init.d/init.ohasd | awk -F= '{print $2}') == "${DEINSTALL_HOME}" ]];
33+
then
34+
echo "${DEINSTALL_HOME} used in /etc/init.d/init.ohasd."
35+
exit 1
36+
fi
37+
38+
if [[ $(. ~oracle/.bash_profile; srvctl config database | xargs -I {} srvctl config database -d {} | grep "Oracle home:" | cut -d: -f2 | sed 's/^ //') == "${DEINSTALL_HOME}" ]];
39+
then
40+
echo "Oracle database server configuration contains ${DEINSTALL_HOME}."
41+
exit 1
42+
fi
43+
44+
IFS=$'\n'
45+
# When checking for references in the current DBS directory for Grid Infrastructure we can ignore the ab_+ASM.dat
46+
# mapping file as this may contain environment variables set at the time of previous upgrade/patching which are
47+
# not relevant. This file is reset (to exclude these variables) by restarting ASM but this is not convenient in higher environments.
48+
for x in $(ls -1 ${GI_ORACLE_HOME}/dbs/* | grep -v ab_+ASM.dat | xargs grep "${DEINSTALL_HOME}" 2>/dev/null);
49+
do
50+
echo "$x ${DEINSTALL_HOME}"
51+
exit 1
52+
done
53+
54+
# Check if the ASM mapping file contains any references to the old home other than the known obsolete environment settings
55+
for x in $(strings ${GI_ORACLE_HOME}/dbs/ab_+ASM.dat | grep "${DEINSTALL_HOME}" | grep -v ^oracleHome= | grep -v ^OPATCHAUTO_PERL_PATH= | grep -v ^HOME= | grep -v ^CLASSPATH=)
56+
do
57+
echo "ab_+ASM.dat $x"
58+
exit 1
59+
done
60+
61+
62+
# If there are old controlfile snapshots present these may contain reference to previous Oracle Homes
63+
# Simply create a new snapshot controlfile if required
64+
if [[ -f ${DB_ORACLE_HOME}/dbs/snapcf${ORACLE_SID}.f ]]; then
65+
66+
grep "${DEINSTALL_HOME}" ${DB_ORACLE_HOME}/dbs/snapcf${ORACLE_SID}.f > /dev/null
67+
if [[ $? -eq 0 ]];
68+
then
69+
rman target / <<EORMAN
70+
backup current controlfile;
71+
exit
72+
EORMAN
73+
fi
74+
75+
# Sometimes it is necessary to repeat the above step to clear all references
76+
grep "${DEINSTALL_HOME}" ${DB_ORACLE_HOME}/dbs/snapcf${ORACLE_SID}.f > /dev/null
77+
if [[ $? -eq 0 ]];
78+
then
79+
rman target / <<EORMAN
80+
backup current controlfile;
81+
exit
82+
EORMAN
83+
fi
84+
fi
85+
86+
# Check that the default RMAN SBT Channel is not pointing to the deinstall home
87+
rman target / <<EORMAN | grep "CONFIGURE CHANNEL DEVICE TYPE 'SBT_TAPE' PARMS" | awk -F= '{print $NF}' | tr -d ")';" | grep ${DEINSTALL_HOME}
88+
SHOW CHANNEL;
89+
exit;
90+
EORMAN
91+
if [[ $? -eq 0 ]];
92+
then
93+
echo "RMAN default channel contains ${DEINSTALL_HOME}"
94+
exit 1
95+
fi
96+
97+
for x in $(grep "${DEINSTALL_HOME}" ${DB_ORACLE_HOME}/dbs/* 2>/dev/null);
98+
do
99+
echo "$x ${DEINSTALL_HOME}"
100+
exit 1
101+
done
102+
103+
# Otherwise exit success (Oracle Home to be deinstalled does not appear to be in use)
104+
exit 0
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
---
2+
- name: Detect if {{ deinstall_home }} Still in Use
3+
script: detect_oracle_home_in_use.sh "{{ deinstall_home }}"
4+
become: yes
5+
become_user: root
6+
changed_when: false
7+
8+
- name: Check if {{ deinstall_home }} Home Still Exists
9+
stat:
10+
path: "{{ deinstall_home }}"
11+
register: home_exists
12+
13+
- name: Deinstall {{ deinstall_home }} Home
14+
include: deinstall_oracle_home.yml
15+
vars:
16+
oracle_home: "{{ deinstall_home }}"
17+
when: home_exists.stat.exists
18+
19+
- name: Remove {{ deinstall_home }} Software Directory
20+
file:
21+
state: absent
22+
path: "{{ deinstall_home }}"
23+
become: yes
24+
become_user: root
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
---
2+
# The Oracle Home may be de-installed with the deinstall tool
3+
4+
- name: Define Temporary Location for Response File
5+
tempfile:
6+
path: /tmp
7+
suffix: deinstall_response
8+
state: directory
9+
register: deinstalldir
10+
11+
- name: Generate Response File for Deinstall
12+
shell: |
13+
cd {{ oracle_home }}/deinstall
14+
./deinstall -silent -checkonly -o {{ deinstalldir.path }}
15+
16+
- name: Get Names of Response Files
17+
find:
18+
path: "{{ deinstalldir.path }}"
19+
register: response_files
20+
21+
- name: Get Name of Response File
22+
set_fact:
23+
response_file: "{{ response_files.files[0].path }}"
24+
25+
- name: Deinstall {{ deinstall_home }} Oracle Home
26+
shell: |
27+
cd {{ oracle_home }}/deinstall
28+
./deinstall -silent -paramfile "{{ response_file }}"
29+
30+
- name: Remove Response File
31+
file:
32+
path: "{{ response_file }}"
33+
state: absent
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
---
2+
- name: Deinstall Grid Infrastructure Software
3+
when:
4+
- oracle_grid_oracle_home is defined
5+
- oracle_grid_oracle_home != 'DO_NOT_DEINSTALL'
6+
block:
7+
- name: Check Grid Infrastructure Home Format
8+
fail:
9+
msg: "Grid Infrastructure Oracle Home {{ oracle_grid_oracle_home }} does not match expected format."
10+
when: not ( oracle_grid_oracle_home | regex_search("^/u01/app/grid/product/[0-9\.]+/grid$"))
11+
12+
- name: Deinstall Grid Infrastructure Home
13+
include: deinstall.yml
14+
vars:
15+
deinstall_home: "{{ oracle_grid_oracle_home }}"
16+
17+
- name: Deinstall Database Software
18+
when:
19+
- oracle_database_oracle_home is defined
20+
- oracle_database_oracle_home != 'DO_NOT_DEINSTALL'
21+
block:
22+
- name: Check Database Home Format
23+
fail:
24+
msg: "Database Oracle Home {{ oracle_database_oracle_home }} does not match expected format."
25+
when: not (oracle_database_oracle_home | regex_search("^/u01/app/oracle/product/[0-9\.]+/db$"))
26+
27+
- name: Deinstall Database Home
28+
include: deinstall.yml
29+
vars:
30+
deinstall_home: "{{ oracle_database_oracle_home }}"
31+
32+
- name: Check if inside AWS.
33+
uri:
34+
url: http://169.254.169.254/latest/meta-data
35+
timeout: 20
36+
register: aws_uri_check
37+
failed_when: false
38+
run_once: yes
39+
40+
- set_fact:
41+
is_aws_environment: "{{ aws_uri_check.status == 200 }}"
42+
run_once: yes
43+
delegate_to: localhost
44+
delegate_facts: true
45+
46+
# Set variables ready to run target deletion in next playbook
47+
# (The approach taken to delete these targets will differ between AWS and non-AWS environments)
48+
# NB: We use ansible_play_hosts to compile a list of targets which have not failed so far
49+
# as we do not want to delete targets for hosts where the deinstall has failed.
50+
- name: Prepare Ansible Controller Variables for OEM Target Deletion
51+
set_fact:
52+
oem_gi_home: "{{ oracle_grid_oracle_home }}"
53+
oem_db_home: "{{ oracle_database_oracle_home }}"
54+
deinstalled_targets: "{{ ansible_play_hosts }}"
55+
delegate_to: localhost
56+
delegate_facts: true
57+
run_once: yes
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
---
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
oracle_patch_directory: /u02/stage
2+
oracle_inventory: /u01/app/oraInventory
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
3+
# Determine if an RMAN Catalog Upgrade is required
4+
5+
. ~/.bash_profile
6+
7+
CATALOG_PASSWORD=$(aws ssm get-parameters --region ${REGION} --with-decryption --name ${SSM_NAME} | jq -r '.Parameters[].Value')
8+
9+
rman <<EORMAN
10+
connect catalog rman19c/${CATALOG_PASSWORD}
11+
exit;
12+
EORMAN

0 commit comments

Comments
 (0)