Skip to content

Commit 5eff625

Browse files
fix(mock-authz): running locally does not require auth header (#156)
1 parent e74f55f commit 5eff625

20 files changed

+71
-138
lines changed

bin/setup_psqlgraph.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def create_indexes(host, user, password, database):
9898
index = lambda t, c: ["CREATE INDEX ON {} ({})".format(t, x) for x in c]
9999
for scls in Node.get_subclasses():
100100
tablename = scls.__tablename__
101-
list(map(engine.execute, index(tablename, ["node_id",])))
101+
list(map(engine.execute, index(tablename, ["node_id"])))
102102
list(
103103
map(
104104
engine.execute,
@@ -113,7 +113,7 @@ def create_indexes(host, user, password, database):
113113
list(
114114
map(
115115
engine.execute,
116-
index(scls.__tablename__, ["src_id", "dst_id", "dst_id, src_id",]),
116+
index(scls.__tablename__, ["src_id", "dst_id", "dst_id, src_id"]),
117117
)
118118
)
119119

bin/setup_test_database.py

+1-5
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,7 @@
1111

1212
import argparse
1313

14-
from setup_psqlgraph import (
15-
setup_database,
16-
create_tables,
17-
create_indexes,
18-
)
14+
from setup_psqlgraph import setup_database, create_tables, create_indexes
1915

2016

2117
if __name__ == "__main__":

peregrine/api.py

+3-11
Original file line numberDiff line numberDiff line change
@@ -71,14 +71,10 @@ def db_init(app):
7171

7272
# Set CORS options on app configuration
7373
def cors_init(app):
74-
accepted_headers = [
75-
"Content-Type",
76-
"X-Requested-With",
77-
"X-CSRFToken",
78-
]
74+
accepted_headers = ["Content-Type", "X-Requested-With", "X-CSRFToken"]
7975
CORS(
8076
app,
81-
resources={r"/*": {"origins": "*"},},
77+
resources={r"/*": {"origins": "*"}},
8278
headers=accepted_headers,
8379
expose_headers=["Content-Disposition"],
8480
)
@@ -174,11 +170,7 @@ def version():
174170
"version": pkg_resources.get_distribution("gen3dictionary").version,
175171
"commit": "",
176172
}
177-
base = {
178-
"version": VERSION,
179-
"commit": COMMIT,
180-
"dictionary": dictver,
181-
}
173+
base = {"version": VERSION, "commit": COMMIT, "dictionary": dictver}
182174

183175
return jsonify(base), 200
184176

peregrine/dev_settings.example.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -43,10 +43,7 @@
4343
},
4444
}
4545
}
46-
SUBMISSION = {
47-
"bucket": "test_submission",
48-
"host": CLEVERSAFE_HOST,
49-
}
46+
SUBMISSION = {"bucket": "test_submission", "host": CLEVERSAFE_HOST}
5047
# Postgres
5148
PSQLGRAPH = {
5249
"host": os.getenv("GDC_PG_HOST", "localhost"),

peregrine/dev_settings.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -50,10 +50,7 @@
5050
},
5151
}
5252
}
53-
SUBMISSION = {
54-
"bucket": "test_submission",
55-
"host": CLEVERSAFE_HOST,
56-
}
53+
SUBMISSION = {"bucket": "test_submission", "host": CLEVERSAFE_HOST}
5754
# Postgres
5855
PSQLGRAPH = {
5956
"host": os.getenv("GDC_PG_HOST", "localhost"),

peregrine/dictionary.py

+3-8
Original file line numberDiff line numberDiff line change
@@ -16,14 +16,9 @@
1616
this_module = sys.modules[__name__]
1717

1818
#: The data dictionary must implement these attributes.
19-
required_attrs = [
20-
"resolvers",
21-
"schema",
22-
]
23-
24-
optional_attrs = [
25-
"settings",
26-
]
19+
required_attrs = ["resolvers", "schema"]
20+
21+
optional_attrs = ["settings"]
2722

2823
resolvers = None
2924
schema = None

peregrine/models.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,7 @@
2424
this_module = sys.modules[__name__]
2525

2626
#: The data model must implement these attributes.
27-
required_attrs = [
28-
"Program",
29-
"Project",
30-
"submission",
31-
"VersionedNode",
32-
]
27+
required_attrs = ["Program", "Project", "submission", "VersionedNode"]
3328

3429
# These could be assigned programatically, as in:
3530
#

peregrine/resources/submission/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import datamodelutils.models as models
1313
import flask
1414

15-
from peregrine.auth import current_user, get_read_access_projects
15+
from peregrine.auth import get_read_access_projects
1616
import peregrine.blueprints
1717
from peregrine.resources.submission import graphql
1818

peregrine/resources/submission/constants.py

+4-12
Original file line numberDiff line numberDiff line change
@@ -95,15 +95,11 @@ def submitted_state():
9595

9696
#: This is a list of states that an entity must be in to allow
9797
#: deletion
98-
ALLOWED_DELETION_STATES = [
99-
"validated",
100-
]
98+
ALLOWED_DELETION_STATES = ["validated"]
10199

102100
#: This is a list of file_states that a a file must be in to allow
103101
#: deletion
104-
ALLOWED_DELETION_FILE_STATES = [
105-
submitted_state,
106-
]
102+
ALLOWED_DELETION_FILE_STATES = [submitted_state]
107103

108104

109105
#: These categories should all have a ``state`` associated with each type
@@ -119,19 +115,15 @@ def submitted_state():
119115

120116
#: Possible entity.state transitions
121117
#: { to_state: from_state }
122-
ENTITY_STATE_TRANSITIONS = {
123-
"submitted": ["validated", None],
124-
}
118+
ENTITY_STATE_TRANSITIONS = {"submitted": ["validated", None]}
125119

126120
#: The key that specifies the high level state that a file is in the
127121
#: pipeline
128122
FILE_STATE_KEY = "file_state"
129123

130124
#: Possible data_file.file_state transitions
131125
#: { to_state: from_state }
132-
FILE_STATE_TRANSITIONS = {
133-
"submitted": ["validated"],
134-
}
126+
FILE_STATE_TRANSITIONS = {"submitted": ["validated"]}
135127

136128
#: The auth role required to take action actions
137129
ROLE_SUBMIT = "release"

peregrine/resources/submission/graphql/counts.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,7 @@
77
import node_subclass as ns
88
from . import transaction
99

10-
from .base import (
11-
assert_type,
12-
munge,
13-
)
10+
from .base import assert_type, munge
1411

1512
from .util import clean_count
1613

peregrine/resources/submission/graphql/node.py

+9-13
Original file line numberDiff line numberDiff line change
@@ -416,9 +416,7 @@ class Node(graphene.Interface):
416416
updated_datetime = graphene.String()
417417

418418
# These fields depend on these columns being loaded
419-
fields_depend_on_columns = {
420-
"project_id": {"program", "code"},
421-
}
419+
fields_depend_on_columns = {"project_id": {"program", "code"}}
422420

423421

424422
def resolve_node(self, info, **args):
@@ -549,7 +547,7 @@ def get_node_class_property_args(cls, not_props_io={}):
549547
args_not = {}
550548
args_not.update(get_node_class_property_attrs(cls))
551549
not_props_io[not_props_io_name] = type(
552-
not_props_io_name, (graphene.InputObjectType,), args_not,
550+
not_props_io_name, (graphene.InputObjectType,), args_not
553551
)
554552
globals()[not_props_io[not_props_io_name].__name__] = not_props_io[
555553
not_props_io_name
@@ -689,7 +687,7 @@ def get_node_class_special_attrs(cls):
689687
def get_node_class_link_attrs(cls):
690688
attrs = {
691689
name: graphene.List(
692-
__name__ + "." + link["type"].label, args=get_node_class_args(link["type"]),
690+
__name__ + "." + link["type"].label, args=get_node_class_args(link["type"])
693691
)
694692
for name, link in cls._pg_edges.items()
695693
}
@@ -703,7 +701,7 @@ def resolve__related_cases(self, info, args):
703701

704702
q = with_path_to(
705703
get_authorized_query(md.Case),
706-
{"type": cls.label, "id": self.id,},
704+
{"type": cls.label, "id": self.id},
707705
info,
708706
name="related_cases",
709707
)
@@ -735,7 +733,7 @@ def resolve_transaction_logs_count(self, info, **args):
735733

736734
attrs["resolve__transaction_logs_count"] = resolve_transaction_logs_count
737735
attrs["_transaction_logs_count"] = graphene.Field(
738-
graphene.Int, args=transaction.get_transaction_log_args(),
736+
graphene.Int, args=transaction.get_transaction_log_args()
739737
)
740738

741739
def resolve_transaction_logs(self, info, **args):
@@ -744,7 +742,7 @@ def resolve_transaction_logs(self, info, **args):
744742

745743
attrs["resolve__transaction_logs"] = resolve_transaction_logs
746744
attrs["_transaction_logs"] = graphene.List(
747-
transaction.TransactionLog, args=transaction.get_transaction_log_args(),
745+
transaction.TransactionLog, args=transaction.get_transaction_log_args()
748746
)
749747

750748
_links_args = get_node_interface_args()
@@ -948,9 +946,7 @@ def resolver(self, info, cls=cls, gql_object=gql_object, **args):
948946
capp.logger.exception(e)
949947
raise
950948

951-
field = graphene.Field(
952-
graphene.List(gql_object), args=get_node_class_args(cls),
953-
)
949+
field = graphene.Field(graphene.List(gql_object), args=get_node_class_args(cls))
954950

955951
res_name = "resolve_{}".format(name)
956952
resolver.__name__ = res_name
@@ -1062,7 +1058,7 @@ def instantiate_graphene(t):
10621058

10631059
# add required node fields
10641060
DataNode.shared_fields.update(
1065-
{"id": graphene.String(), "type": graphene.String(),}
1061+
{"id": graphene.String(), "type": graphene.String()}
10661062
)
10671063

10681064
return DataNode.shared_fields
@@ -1085,7 +1081,7 @@ def get_datanode_interface_args():
10851081
args = get_base_node_args()
10861082
args.update(get_datanode_fields_dict())
10871083
args.update(
1088-
{"of_type": graphene.List(graphene.String), "project_id": graphene.String(),}
1084+
{"of_type": graphene.List(graphene.String), "project_id": graphene.String()}
10891085
)
10901086
return args
10911087

peregrine/resources/submission/graphql/transaction.py

+4-11
Original file line numberDiff line numberDiff line change
@@ -180,9 +180,7 @@ class TransactionDocument(graphene.ObjectType):
180180
response = graphene.Field(TransactionResponse)
181181

182182
# These fields depend on these columns being loaded
183-
fields_depend_on_columns = {
184-
"doc_size": {"doc"},
185-
}
183+
fields_depend_on_columns = {"doc_size": {"doc"}}
186184

187185
@classmethod
188186
def resolve_doc_size(cls, document, *args, **kwargs):
@@ -224,10 +222,7 @@ class TransactionLog(graphene.ObjectType):
224222
related_cases = graphene.List(TransactionResponseEntityRelatedCases)
225223

226224
# These fields depend on these columns being loaded
227-
fields_depend_on_columns = {
228-
"type": {"role"},
229-
"project_id": {"project", "program"},
230-
}
225+
fields_depend_on_columns = {"type": {"role"}, "project_id": {"project", "program"}}
231226

232227
TYPE_MAP = {
233228
"update": "upload",
@@ -457,8 +452,6 @@ def resolve_transaction_log_count(self, info, **args):
457452
return q.count()
458453

459454

460-
TransactionLogField = graphene.List(TransactionLog, args=get_transaction_log_args(),)
455+
TransactionLogField = graphene.List(TransactionLog, args=get_transaction_log_args())
461456

462-
TransactionLogCountField = graphene.Field(
463-
graphene.Int, args=get_transaction_log_args(),
464-
)
457+
TransactionLogCountField = graphene.Field(graphene.Int, args=get_transaction_log_args())

peregrine/resources/submission/graphql/traversal.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,7 @@
3434
# to annotation.
3535
#
3636
# See :func:`is_valid_direction` for more details.
37-
CATEGORY_LEVEL = {
38-
"administrative": 0,
39-
"biospecimen": 1,
40-
"clinical": 1,
41-
"data_file": 3,
42-
}
37+
CATEGORY_LEVEL = {"administrative": 0, "biospecimen": 1, "clinical": 1, "data_file": 3}
4338

4439

4540
def is_valid_direction(node, visited):

peregrine/test_settings.py

+2-8
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,7 @@
1313
"user_domain_name": "some_domain",
1414
}
1515

16-
SUBMISSION = {
17-
"bucket": "test_submission",
18-
"host": "host",
19-
}
16+
SUBMISSION = {"bucket": "test_submission", "host": "host"}
2017
STORAGE = {"s3": {"keys": {}, "kwargs": {}}}
2118
STORAGE["s3"]["keys"]["host"] = {"access_key": "fake", "secret_key": "sooper_sekrit"}
2219
STORAGE["s3"]["kwargs"]["host"] = {}
@@ -34,10 +31,7 @@
3431
PEREGRINE_PORT = "443"
3532

3633
# Slicing settings
37-
SLICING = {
38-
"host": "localhost",
39-
"gencode": "REPLACEME",
40-
}
34+
SLICING = {"host": "localhost", "gencode": "REPLACEME"}
4135

4236
FLASK_SECRET_KEY = "flask_test_key"
4337

peregrine/utils/scheduling.py

+2-5
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,7 @@
44
import cdispyutils
55

66
from peregrine.errors import InternalError
7-
from peregrine.globals import (
8-
ASYNC_MAX_Q_LEN,
9-
ERR_ASYNC_SCHEDULING,
10-
)
7+
from peregrine.globals import ASYNC_MAX_Q_LEN, ERR_ASYNC_SCHEDULING
118

129
logger = cdispyutils.log.get_logger("submission.scheduling")
1310

@@ -67,7 +64,7 @@ def grow(self, n_workers):
6764
started immediately.
6865
"""
6966
workers = [
70-
self.worker_class(target=async_pool_consumer, args=(self.task_queue,),)
67+
self.worker_class(target=async_pool_consumer, args=(self.task_queue,))
7168
for _ in range(n_workers)
7269
]
7370

run.py

+10-6
Original file line numberDiff line numberDiff line change
@@ -89,20 +89,24 @@ def run_with_fake_auth():
8989
new_callable=PropertyMock,
9090
return_value=lambda: True,
9191
), patch(
92-
"peregrine.auth.verify_hmac", new=set_user,
92+
"peregrine.auth.verify_hmac", new=set_user
9393
):
9494
run_for_development(debug=debug, threaded=True)
9595

9696

9797
def run_with_fake_authz():
9898
"""
99-
Mocks arborist calls.
99+
By mocking `get_read_access_projects`, we avoid checking the
100+
Authorization header and access token, and avoid making arborist
101+
calls to fetch a list of authorized resources.
100102
"""
101-
auth_mapping = {} # modify this to mock specific access
103+
# `user_projects` contains a list of `project_id`s (in format
104+
# "<program.name>-<project.code>") the user has access to.
105+
# Update it to mock specific access:
106+
user_projects = []
102107
with patch(
103-
"gen3authz.client.arborist.client.ArboristClient.auth_mapping",
104-
new_callable=PropertyMock,
105-
return_value=lambda x: auth_mapping,
108+
"peregrine.resources.submission.get_read_access_projects",
109+
return_value=user_projects,
106110
):
107111
run_for_development(debug=debug, threaded=True)
108112

0 commit comments

Comments
 (0)