Skip to content

Commit 14e015b

Browse files
committed
merge in main
2 parents 4a605ad + 1c57cd1 commit 14e015b

36 files changed

+2798
-845
lines changed

.github/workflows/continuous-integration.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ on:
66
jobs:
77
build:
88

9-
runs-on: ubuntu-latest
9+
runs-on: ubuntu-22.04
1010
strategy:
1111
matrix:
1212
python-version: [3.8, 3.9]

.github/workflows/deploy-documentation.yml

+11-10
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ on:
55
branches:
66
- main # Adjust to your main branch
77
workflow_dispatch: null
8-
8+
99
# Set permissions of GITHUB_TOKEN
1010
permissions:
1111
contents: read
@@ -19,11 +19,13 @@ concurrency:
1919

2020
jobs:
2121
build:
22-
runs-on: ubuntu-latest
22+
runs-on: ubuntu-22.04
2323

2424
steps:
2525
- name: Checkout code
26-
uses: actions/checkout@v3
26+
uses: actions/checkout@v4
27+
with:
28+
fetch-depth: 0
2729

2830
- name: Set up Python
2931
uses: actions/setup-python@v4
@@ -32,25 +34,24 @@ jobs:
3234

3335
- name: Install dependencies
3436
run: |
35-
python -m pip install --upgrade pip
36-
pip install sphinx sphinx-autobuild sphinx_rtd_theme
37+
make init
3738
3839
- name: Generate Documentation
3940
run: |
40-
sphinx-build -b html docs/ docs/_build
41+
make docs
4142
4243
- name: Upload artifact
43-
uses: actions/upload-pages-artifact@v1
44+
uses: actions/upload-pages-artifact@v3
4445
with:
4546
path: docs/_build
46-
47+
4748
deploy:
48-
runs-on: ubuntu-latest
49+
runs-on: ubuntu-22.04
4950
needs: build
5051
permissions:
5152
pages: write
5253
id-token: write
5354

5455
steps:
5556
- name: Deploy to GitHub Pages
56-
uses: actions/deploy-pages@v1
57+
uses: actions/deploy-pages@v4

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -28,3 +28,5 @@ pyrightconfig.json
2828

2929
# don't store html build file for docs
3030
/docs/_build
31+
docs/digital_land*
32+
docs/modules.rst

Makefile

+6-8
Original file line numberDiff line numberDiff line change
@@ -40,17 +40,15 @@ endif
4040
# install pre-commits
4141
pre-commit install
4242

43-
# build docs from doc strings
44-
.PHONY: api-docs
45-
api-docs:
46-
sphinx-apidoc -o docs/ api/digital_land
47-
# Build documentation with Sphinx
43+
# Build documentation with Sphinx use for deploying documentation
4844
.PHONY: docs
49-
docs: api-docs
45+
docs:
46+
sphinx-apidoc -o docs/ digital_land
5047
sphinx-build -b html docs/ docs/_build
5148

52-
# Serve the documentation locally with live-reloading
53-
serve-docs: api-docs
49+
# Serve the documentation locally with live-reloading. Use this for local building
50+
serve-docs:
51+
sphinx-apidoc -o docs/ digital_land
5452
sphinx-autobuild docs/ docs/_build --host 0.0.0.0 --port 8000
5553

5654
# Clean up the generated documentation

digital_land/cli.py

+47
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
organisation_check,
3131
save_state,
3232
compare_state,
33+
add_data,
3334
)
3435

3536
from digital_land.command_arguments import (
@@ -366,6 +367,52 @@ def retire_endpoints_cmd(config_collections_dir, csv_path):
366367
return collection_retire_endpoints_and_sources(config_collections_dir, csv_path)
367368

368369

370+
@cli.command("add-data")
371+
@click.argument("csv-path", nargs=1, type=click.Path())
372+
@click.argument("collection-name", nargs=1, type=click.STRING)
373+
@click.option("--collection-dir", "-c", nargs=1, type=click.Path(exists=True))
374+
@click.option("--pipeline-dir", "-p", nargs=1, type=click.Path(exists=True))
375+
@click.option(
376+
"--specification-dir", "-s", type=click.Path(exists=True), default="specification/"
377+
)
378+
@click.option(
379+
"--organisation-path",
380+
"-o",
381+
type=click.Path(exists=True),
382+
default="var/cache/organisation.csv",
383+
)
384+
@click.option(
385+
"--cache-dir",
386+
type=click.Path(exists=True),
387+
)
388+
def add_data_cmd(
389+
csv_path,
390+
collection_name,
391+
collection_dir,
392+
pipeline_dir,
393+
specification_dir,
394+
organisation_path,
395+
cache_dir,
396+
):
397+
csv_file_path = Path(csv_path)
398+
if not csv_file_path.is_file():
399+
logging.error(f"CSV file not found at path: {csv_path}")
400+
sys.exit(2)
401+
collection_dir = Path(collection_dir)
402+
pipeline_dir = Path(pipeline_dir)
403+
specification_dir = Path(specification_dir)
404+
405+
return add_data(
406+
csv_file_path,
407+
collection_name,
408+
collection_dir,
409+
pipeline_dir,
410+
specification_dir,
411+
organisation_path,
412+
cache_dir=cache_dir,
413+
)
414+
415+
369416
# edit to add collection_name in
370417
@cli.command("add-endpoints-and-lookups")
371418
@click.argument("csv-path", nargs=1, type=click.Path())

digital_land/collection.py

+13-8
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,11 @@ def load(
168168
organisations = set()
169169
datasets = set()
170170
for endpoint in resource["endpoints"]:
171+
if endpoint not in source.records:
172+
raise RuntimeError(
173+
f"Endpoint '{endpoint}' not found in source. Check the endpoint.csv and source.csv files."
174+
)
175+
171176
for entry in source.records[endpoint]:
172177
organisations.add(entry["organisation"])
173178
datasets = set(
@@ -354,7 +359,7 @@ def load(self, directory=None):
354359
self.source.load(directory=directory)
355360
self.endpoint.load(directory=directory)
356361

357-
regenerate_resouces = False
362+
regenerate_resources = False
358363

359364
# Try to load log store from csv first
360365
try:
@@ -363,22 +368,22 @@ def load(self, directory=None):
363368
except FileNotFoundError:
364369
logging.info("No log.csv - building from log items")
365370
self.load_log_items(directory=directory)
366-
regenerate_resouces = True
371+
regenerate_resources = True
367372

368-
# Now try to load resoucres, unless we need to rebuild them anyway
369-
if not regenerate_resouces:
373+
# Now try to load resources, unless we need to rebuild them anyway
374+
if not regenerate_resources:
370375
try:
371376
self.resource.load_csv(directory=directory)
372377
logging.info(
373378
f"Resource loaded from CSV - {len(self.resource.entries)} entries"
374379
)
375380
except FileNotFoundError:
376-
logging.info("No resources.csv - genereating from log.csv")
377-
regenerate_resouces = True
381+
logging.info("No resources.csv - generating from log.csv")
382+
regenerate_resources = True
378383

379384
# Do we need to regenerate resources?
380-
if regenerate_resouces:
381-
logging.info("Generating resouces from log.csv")
385+
if regenerate_resources:
386+
logging.info("Generating resources from log.csv")
382387
self.resource.load(log=self.log, source=self.source, directory=directory)
383388

384389
# attempts to load in old-resources if the file exists, many use cases won't have any

0 commit comments

Comments
 (0)