Skip to content

Commit

Permalink
add organization id and project id as args (#616)
Browse files Browse the repository at this point in the history
  • Loading branch information
logan-markewich authored Feb 11, 2025
1 parent 47c8682 commit c872617
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 9 deletions.
4 changes: 0 additions & 4 deletions .github/workflows/publish_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,16 +36,12 @@ jobs:
- name: Build and publish llama-cloud-services
uses: JRubics/poetry-publish@v2.1
with:
poetry_version: ${{ env.POETRY_VERSION }}
python_version: ${{ env.PYTHON_VERSION }}
pypi_token: ${{ secrets.LLAMA_PARSE_PYPI_TOKEN }}
poetry_install_options: "--without dev"

- name: Build and publish llama-parse
uses: JRubics/poetry-publish@v2.1
with:
poetry_version: ${{ env.POETRY_VERSION }}
python_version: ${{ env.PYTHON_VERSION }}
working_directory: "llama_parse"
pypi_token: ${{ secrets.LLAMA_PARSE_PYPI_TOKEN }}
poetry_install_options: "--without dev"
Expand Down
28 changes: 26 additions & 2 deletions llama_cloud_services/parse/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,21 @@
JOB_UPLOAD_ROUTE = "/api/parsing/upload"


def build_url(
base_url: str, organization_id: Optional[str], project_id: Optional[str]
) -> str:
query_params = {}
if organization_id:
query_params["organization_id"] = organization_id
if project_id:
query_params["project_id"] = project_id

if query_params:
return base_url + "?" + "&".join([f"{k}={v}" for k, v in query_params.items()])

return base_url


class LlamaParse(BasePydanticReader):
"""A smart-parser for files."""

Expand All @@ -50,6 +65,14 @@ class LlamaParse(BasePydanticReader):
default=DEFAULT_BASE_URL,
description="The base URL of the Llama Parsing API.",
)
organization_id: Optional[str] = Field(
default=None,
description="The organization ID for the LlamaParse API.",
)
project_id: Optional[str] = Field(
default=None,
description="The project ID for the LlamaParse API.",
)
check_interval: int = Field(
default=1,
description="The interval in seconds to check if the parsing is done.",
Expand Down Expand Up @@ -639,7 +662,7 @@ async def _create_job(
if self.page_suffix is not None:
data["page_suffix"] = self.page_suffix

if self.parsing_instruction is not None:
if self.parsing_instruction:
print(
"WARNING: parsing_instruction is deprecated. Use complemental_formatting_instruction or content_guideline_instruction instead."
)
Expand Down Expand Up @@ -706,7 +729,8 @@ async def _create_job(
data["gpt4o_api_key"] = self.gpt4o_api_key

try:
resp = await self.aclient.post(JOB_UPLOAD_ROUTE, files=files, data=data) # type: ignore
url = build_url(JOB_UPLOAD_ROUTE, self.organization_id, self.project_id)
resp = await self.aclient.post(url, files=files, data=data) # type: ignore
resp.raise_for_status() # this raises if status is not 2xx
return resp.json()["id"]
except httpx.HTTPStatusError as err: # this catches it
Expand Down
4 changes: 2 additions & 2 deletions llama_parse/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "llama-parse"
version = "0.6.0"
version = "0.6.1"
description = "Parse files into RAG-Optimized formats."
authors = ["Logan Markewich <logan@llamaindex.ai>"]
license = "MIT"
Expand All @@ -13,7 +13,7 @@ packages = [{include = "llama_parse"}]

[tool.poetry.dependencies]
python = ">=3.9,<4.0"
llama-cloud-services = "*"
llama-cloud-services = ">=0.6.1"

[tool.poetry.group.dev.dependencies]
pytest = "^8.0.0"
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ python_version = "3.10"

[tool.poetry]
name = "llama-cloud-services"
version = "0.6.0"
version = "0.6.1"
description = "Tailored SDK clients for LlamaCloud services."
authors = ["Logan Markewich <logan@runllama.ai>"]
license = "MIT"
Expand Down

0 comments on commit c872617

Please sign in to comment.