Skip to content

Commit a5f51cc

Browse files
cjonesybolkedebruin
authored andcommitted
[AIRFLOW-1675] Fix docstrings for API docs
Some docstrings were missing spaces, causing them to render strangely in documentation. This corrects the issue by adding in the spaces. Closes apache#2667 from cjonesy/master
1 parent 0ba6ab6 commit a5f51cc

File tree

7 files changed

+19
-1
lines changed

7 files changed

+19
-1
lines changed

airflow/contrib/hooks/bigquery_hook.py

+3
Original file line numberDiff line numberDiff line change
@@ -681,6 +681,7 @@ def run_table_delete(self, deletion_dataset_table, ignore_if_missing=False):
681681
Delete an existing table from the dataset;
682682
If the table does not exist, return an error unless ignore_if_missing
683683
is set to True.
684+
684685
:param deletion_dataset_table: A dotted
685686
(<project>.|<project>:)<dataset>.<table> that indicates which table
686687
will be deleted.
@@ -720,6 +721,7 @@ def run_table_upsert(self, dataset_id, table_resource, project_id=None):
720721
If the table already exists, update the existing table.
721722
Since BigQuery does not natively allow table upserts, this is not an
722723
atomic operation.
724+
723725
:param dataset_id: the dataset to upsert the table into.
724726
:type dataset_id: str
725727
:param table_resource: a table resource. see
@@ -774,6 +776,7 @@ def run_grant_dataset_view_access(self,
774776
Grant authorized view access of a dataset to a view table.
775777
If this view has already been granted access to the dataset, do nothing.
776778
This method is not atomic. Running it may clobber a simultaneous update.
779+
777780
:param source_dataset: the source dataset
778781
:type source_dataset: str
779782
:param view_dataset: the dataset that the view is in

airflow/contrib/operators/spark_sql_operator.py

+1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
class SparkSqlOperator(BaseOperator):
2121
"""
2222
Execute Spark SQL query
23+
2324
:param sql: The SQL query to execute
2425
:type sql: str
2526
:param conf: arbitrary Spark configuration property

airflow/contrib/operators/spark_submit_operator.py

+1
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ class SparkSubmitOperator(BaseOperator):
2323
This hook is a wrapper around the spark-submit binary to kick off a spark-submit job.
2424
It requires that the "spark-submit" binary is in the PATH or the spark-home is set
2525
in the extra on the connection.
26+
2627
:param application: The application that submitted as a job, either jar or py file.
2728
:type application: str
2829
:param conf: Arbitrary Spark configuration properties

airflow/executors/base_executor.py

+1
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ def queue_task_instance(
7878
def has_task(self, task_instance):
7979
"""
8080
Checks if a task is either queued or running in this executor
81+
8182
:param task_instance: TaskInstance
8283
:return: True if the task is known to this executor
8384
"""

airflow/hooks/oracle_hook.py

+1
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ def get_conn(self):
3535
Optional parameters for using a custom DSN connection (instead of using a server alias from tnsnames.ora)
3636
The dsn (data source name) is the TNS entry (from the Oracle names server or tnsnames.ora file)
3737
or is a string like the one returned from makedsn().
38+
3839
:param dsn: the host address for the Oracle server
3940
:param service_name: the db_unique_name of the database that you are connecting to (CONNECT_DATA part of TNS)
4041
You can set these parameters in the extra fields of your connection

airflow/models.py

+11
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ def get_fernet():
9393
9494
This function could fail either because Cryptography is not installed
9595
or because the Fernet key is invalid.
96+
9697
:return: Fernet object
9798
:raises: AirflowException if there's a problem trying to load Fernet
9899
"""
@@ -1915,6 +1916,7 @@ class SkipMixin(LoggingMixin):
19151916
def skip(self, dag_run, execution_date, tasks):
19161917
"""
19171918
Sets tasks instances to skipped from the same dag run.
1919+
19181920
:param dag_run: the DagRun for which to set the tasks to skipped
19191921
:param execution_date: execution_date
19201922
:param tasks: tasks to skip (not task_ids)
@@ -2985,6 +2987,7 @@ def get_run_dates(self, start_date, end_date=None):
29852987
"""
29862988
Returns a list of dates between the interval received as parameter using this
29872989
dag's schedule interval. Returned dates can be used for execution dates.
2990+
29882991
:param start_date: the start date of the interval
29892992
:type start_date: datetime
29902993
:param end_date: the end date of the interval, defaults to datetime.utcnow()
@@ -3153,6 +3156,7 @@ def is_paused(self, session=None):
31533156
def get_active_runs(self, session=None):
31543157
"""
31553158
Returns a list of dag run execution dates currently running
3159+
31563160
:param session:
31573161
:return: List of execution dates
31583162
"""
@@ -3168,6 +3172,7 @@ def get_active_runs(self, session=None):
31683172
def get_num_active_runs(self, external_trigger=None, session=None):
31693173
"""
31703174
Returns the number of active "running" dag runs
3175+
31713176
:param external_trigger: True for externally triggered active dag runs
31723177
:type external_trigger: bool
31733178
:param session:
@@ -3645,6 +3650,7 @@ def run(
36453650
delay_on_limit_secs=1.0):
36463651
"""
36473652
Runs the DAG.
3653+
36483654
:param start_date: the start date of the range to run
36493655
:type start_date: datetime
36503656
:param end_date: the end date of the range to run
@@ -4057,6 +4063,7 @@ def set(
40574063
TODO: "pickling" has been deprecated and JSON is preferred. "pickling" will be
40584064
removed in Airflow 2.0. :param enable_pickling: If pickling is not enabled, the
40594065
XCOM value will be parsed as JSON instead.
4066+
40604067
:return: None
40614068
"""
40624069
session.expunge_all()
@@ -4110,6 +4117,7 @@ def get_one(
41104117
"""
41114118
Retrieve an XCom value, optionally meeting certain criteria.
41124119
TODO: "pickling" has been deprecated and JSON is preferred. "pickling" will be removed in Airflow 2.0.
4120+
41134121
:param enable_pickling: If pickling is not enabled, the XCOM value will be parsed to JSON instead.
41144122
:return: XCom value
41154123
"""
@@ -4412,6 +4420,7 @@ def find(dag_id=None, run_id=None, execution_date=None,
44124420
session=None):
44134421
"""
44144422
Returns a set of dag runs for the given search criteria.
4423+
44154424
:param dag_id: the dag_id to find dag runs for
44164425
:type dag_id: integer, list
44174426
:param run_id: defines the the run id for this dag run
@@ -4485,6 +4494,7 @@ def get_task_instances(self, state=None, session=None):
44854494
def get_task_instance(self, task_id, session=None):
44864495
"""
44874496
Returns the task instance specified by task_id for this dag run
4497+
44884498
:param task_id: the task id
44894499
"""
44904500

@@ -4535,6 +4545,7 @@ def update_state(self, session=None):
45354545
"""
45364546
Determines the overall state of the DagRun based on the state
45374547
of its TaskInstances.
4548+
45384549
:return: State
45394550
"""
45404551

airflow/operators/generic_transfer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ class GenericTransfer(BaseOperator):
2323
needs to expose a `get_records` method, and the destination a
2424
`insert_rows` method.
2525
26-
This is mean to be used on small-ish datasets that fit in memory.
26+
This is meant to be used on small-ish datasets that fit in memory.
2727
2828
:param sql: SQL query to execute against the source database
2929
:type sql: str

0 commit comments

Comments
 (0)