Skip to content

Commit bfaa5c7

Browse files
authored
Merge pull request #700 from TejasC88/keystone1
Keystone INtegration with RGW
2 parents 169eecf + ee81e1f commit bfaa5c7

File tree

3 files changed

+185
-0
lines changed

3 files changed

+185
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# script: test_ldap_auth.py
2+
# polarion-ID: CEPH-10169
3+
config:
4+
user_count: 1
5+
bucket_count: 1
6+
objects_count: 1

rgw/v2/tests/aws/reusable.py

+37
Original file line numberDiff line numberDiff line change
@@ -681,3 +681,40 @@ def get_object_attributes(aws_auth, bucket_name, key, endpoint):
681681
return resp
682682
except Exception as e:
683683
raise AWSCommandExecError(message=str(e))
684+
685+
686+
def put_keystone_conf(rgw_service_name):
687+
"""
688+
Apply the conf options required for keystone integration to rgw service
689+
"""
690+
log.info("Apply keystone conf options")
691+
utils.exec_shell_cmd(
692+
f"ceph config set client.{rgw_service_name} rgw_keystone_api_version 3"
693+
)
694+
utils.exec_shell_cmd(
695+
f"ceph config set client.{rgw_service_name} rgw_keystone_url http://10.0.209.121:5000"
696+
)
697+
utils.exec_shell_cmd(
698+
f"ceph config set client.{rgw_service_name} rgw_keystone_admin_user demo"
699+
)
700+
utils.exec_shell_cmd(
701+
f"ceph config set client.{rgw_service_name} rgw_keystone_admin_password demo1"
702+
)
703+
utils.exec_shell_cmd(
704+
f"ceph config set client.{rgw_service_name} rgw_keystone_admin_domain Default"
705+
)
706+
utils.exec_shell_cmd(
707+
f"ceph config set client.{rgw_service_name} rgw_keystone_admin_project demo"
708+
)
709+
utils.exec_shell_cmd(
710+
f"ceph config set client.{rgw_service_name} rgw_keystone_implicit_tenants true"
711+
)
712+
utils.exec_shell_cmd(
713+
f"ceph config set client.{rgw_service_name} rgw_keystone_accepted_roles admin,user"
714+
)
715+
utils.exec_shell_cmd(
716+
f"ceph config set client.{rgw_service_name} rgw_s3_auth_use_keystone true"
717+
)
718+
log.info("restart RGW for options to take effect")
719+
utils.exec_shell_cmd(f"ceph orch restart {rgw_service_name}")
720+
time.sleep(10)
+142
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
"""
2+
test_keystone_auth - Test OSP Keystone integration with RGW
3+
4+
Usage: test_keystone_auth.py
5+
Polarion ID - CEPH-10169
6+
Configs - test_keystone_integration.yaml
7+
8+
Operation:
9+
Add config options necessary for keystone integration
10+
The keystone user exposed should be created as a user from RGW side
11+
Create buckets and objects from the keystone user
12+
13+
"""
14+
15+
import argparse
16+
import base64
17+
import json
18+
import logging
19+
import os
20+
import random
21+
import sys
22+
import time
23+
import traceback
24+
25+
sys.path.append(os.path.abspath(os.path.join(__file__, "../../../..")))
26+
27+
28+
from v2.lib import resource_op
29+
from v2.lib.aws import auth as aws_auth
30+
from v2.lib.exceptions import RGWBaseException, TestExecError
31+
from v2.lib.s3.write_io_info import BasicIOInfoStructure, IOInfoInitialize
32+
from v2.tests.aws import reusable as aws_reusable
33+
from v2.tests.s3_swift import reusable as s3_reusable
34+
from v2.utils import utils
35+
from v2.utils.log import configure_logging
36+
from v2.utils.test_desc import AddTestInfo
37+
38+
log = logging.getLogger(__name__)
39+
TEST_DATA_PATH = None
40+
41+
42+
def test_exec(config, ssh_con):
43+
"""
44+
Executes test based on configuration passed
45+
Args:
46+
config(object): Test configuration
47+
"""
48+
io_info_initialize = IOInfoInitialize()
49+
basic_io_structure = BasicIOInfoStructure()
50+
io_info_initialize.initialize(basic_io_structure.initial())
51+
52+
rgw_service_name = utils.exec_shell_cmd("ceph orch ls | grep rgw").split(" ")[0]
53+
log.info(f"rgw service name is {rgw_service_name}")
54+
55+
# Put keystone conf options
56+
aws_reusable.put_keystone_conf(rgw_service_name)
57+
58+
access_demo = "f1363a717f8c470e8971bd644576011d"
59+
secret_demo = "42c450221cf044d9a0867b0e4acd52d3"
60+
project_demo = "83ea1f0a366a4e799b8458f2353cb36b"
61+
62+
# Do a awscli query with keystone credentials
63+
rgw_port = utils.get_radosgw_port_no(ssh_con)
64+
rgw_host, rgw_ip = utils.get_hostname_ip(ssh_con)
65+
aws_auth.install_aws()
66+
67+
cmd = f"AWS_ACCESS_KEY_ID={access_demo} AWS_SECRET_ACCESS_KEY={secret_demo} /usr/local/bin/aws s3 ls --endpoint http://{rgw_ip}:{rgw_port}"
68+
utils.exec_shell_cmd(cmd)
69+
time.sleep(2)
70+
cmd = f"radosgw-admin user list"
71+
users = utils.exec_shell_cmd(cmd)
72+
if project_demo not in users:
73+
raise RGWBaseException("Keystone user not present in RGW user list")
74+
else:
75+
log.info("Keystone user present in RGW")
76+
77+
# Create a bucket on the LDAP user
78+
for bc in range(config.bucket_count):
79+
bucket_name = "keystone" + str(bc)
80+
cmd = f"AWS_ACCESS_KEY_ID={access_demo} AWS_SECRET_ACCESS_KEY={secret_demo} /usr/local/bin/aws s3 mb s3://{bucket_name} --endpoint http://{rgw_ip}:{rgw_port} --region us-east-1"
81+
out = utils.exec_shell_cmd(cmd)
82+
log.info("Bucket created: " + bucket_name)
83+
for obj in range(config.objects_count):
84+
utils.exec_shell_cmd(f"fallocate -l 1K object{obj}")
85+
cmd = f"AWS_ACCESS_KEY_ID={access_demo} AWS_SECRET_ACCESS_KEY={secret_demo} /usr/local/bin/aws s3 cp object{obj} s3://{bucket_name}/object{obj} --endpoint http://{rgw_ip}:{rgw_port} --region us-east-1"
86+
out = utils.exec_shell_cmd(cmd)
87+
log.info("Object created on the bucket owned by LDAP user")
88+
cmd = f"AWS_ACCESS_KEY_ID={access_demo} AWS_SECRET_ACCESS_KEY={secret_demo} /usr/local/bin/aws s3 ls s3://{bucket_name} --endpoint http://{rgw_ip}:{rgw_port}"
89+
out = utils.exec_shell_cmd(cmd)
90+
log.info(f"Listing bucket {bucket_name}: {out}")
91+
92+
# check for any crashes during the execution
93+
crash_info = s3_reusable.check_for_crash()
94+
if crash_info:
95+
raise TestExecError("ceph daemon crash found!")
96+
97+
98+
if __name__ == "__main__":
99+
100+
test_info = AddTestInfo("Test to verify LDAP authentication from RGW")
101+
102+
try:
103+
project_dir = os.path.abspath(os.path.join(__file__, "../../.."))
104+
test_data_dir = "test_data"
105+
TEST_DATA_PATH = os.path.join(project_dir, test_data_dir)
106+
log.info(f"TEST_DATA_PATH: {TEST_DATA_PATH}")
107+
if not os.path.exists(TEST_DATA_PATH):
108+
log.info("test data dir not exists, creating.. ")
109+
os.makedirs(TEST_DATA_PATH)
110+
parser = argparse.ArgumentParser(description="RGW S3 bucket creation using AWS")
111+
parser.add_argument("-c", dest="config", help="RGW S3 bucket stats using s3cmd")
112+
parser.add_argument(
113+
"-log_level",
114+
dest="log_level",
115+
help="Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]",
116+
default="info",
117+
)
118+
parser.add_argument(
119+
"--rgw-node", dest="rgw_node", help="RGW Node", default="127.0.0.1"
120+
)
121+
args = parser.parse_args()
122+
yaml_file = args.config
123+
rgw_node = args.rgw_node
124+
ssh_con = None
125+
if rgw_node != "127.0.0.1":
126+
ssh_con = utils.connect_remote(rgw_node)
127+
log_f_name = os.path.basename(os.path.splitext(yaml_file)[0])
128+
configure_logging(f_name=log_f_name, set_level=args.log_level.upper())
129+
config = resource_op.Config(yaml_file)
130+
config.read()
131+
test_exec(config, ssh_con)
132+
test_info.success_status("test passed")
133+
sys.exit(0)
134+
135+
except (RGWBaseException, Exception) as e:
136+
log.error(e)
137+
log.error(traceback.format_exc())
138+
test_info.failed_status("test failed")
139+
sys.exit(1)
140+
141+
finally:
142+
utils.cleanup_test_data_path(TEST_DATA_PATH)

0 commit comments

Comments
 (0)