Skip to content

Commit

Permalink
add configuration for bulk request timeout (#57)
Browse files Browse the repository at this point in the history
  • Loading branch information
pymonger authored Mar 21, 2023
1 parent 7e7b0ff commit 60ab5fd
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 2 deletions.
3 changes: 3 additions & 0 deletions config/settings.cfg.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ GRQ_INDEX = "grq"
# ElasticSearch geonames index
GEONAMES_INDEX = "geonames"

# timeout value for ElasticSearch bulk requests (defaults to 10)
BULK_REQUEST_TIMEOUT = 30

# Redis URL
REDIS_URL = "redis://{{ MOZART_REDIS_PVT_IP }}:6379/0"

Expand Down
7 changes: 5 additions & 2 deletions grq2/services/api_v02/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,9 @@ class IndexDataset(Resource):
@grq_ns.marshal_with(resp_model)
@grq_ns.expect(parser, validate=True)
def post(self):
# get bulk request timeout from config
bulk_request_timeout = app.config.get('BULK_REQUEST_TIMEOUT', 10)

try:
datasets = json.loads(request.json)

Expand All @@ -134,14 +137,14 @@ def post(self):
docs_bulk.append({"index": {"_index": index, "_id": _id}})
docs_bulk.append(ds)

response = grq_es.es.bulk(body=docs_bulk)
response = grq_es.es.bulk(body=docs_bulk, request_timeout=bulk_request_timeout)
if response["errors"] is True:
app.logger.error(response)
delete_docs = []
for doc in docs_bulk:
if "index" in doc:
delete_docs.append({"delete": doc["index"]})
grq_es.es.bulk(delete_docs)
grq_es.es.bulk(delete_docs, request_timeout=bulk_request_timeout)
return {
"success": False,
"message": response["items"]
Expand Down

0 comments on commit 60ab5fd

Please sign in to comment.