diff --git a/alws/app.py b/alws/app.py index ee1f36628..1dc45cfa5 100644 --- a/alws/app.py +++ b/alws/app.py @@ -4,6 +4,7 @@ import sentry_sdk from fastapi import FastAPI from pika.exceptions import StreamLostError +from fastapi_sqla import setup as fastapi_sqla_setup from starlette.middleware.exceptions import ExceptionMiddleware from alws import routers @@ -39,6 +40,7 @@ app = FastAPI() app.add_middleware(ExceptionMiddleware, handlers=handlers) +fastapi_sqla_setup(app) for module in ROUTERS: for router_type in ( diff --git a/alws/auth/dependencies.py b/alws/auth/dependencies.py index d9046328f..068068259 100644 --- a/alws/auth/dependencies.py +++ b/alws/auth/dependencies.py @@ -1,15 +1,18 @@ from fastapi import Depends +from fastapi_sqla import AsyncSessionDependency from fastapi_users.authentication.strategy import ( AccessTokenDatabase, DatabaseStrategy, JWTStrategy, ) from fastapi_users_db_sqlalchemy import SQLAlchemyUserDatabase -from fastapi_users_db_sqlalchemy.access_token import SQLAlchemyAccessTokenDatabase +from fastapi_users_db_sqlalchemy.access_token import ( + SQLAlchemyAccessTokenDatabase, +) from sqlalchemy.ext.asyncio import AsyncSession from alws.config import settings -from alws.dependencies import get_async_session +from alws.dependencies import get_async_db_key from alws.models import User, UserAccessToken, UserOauthAccount __all__ = [ @@ -20,19 +23,26 @@ ] -async def get_user_db(session: AsyncSession = Depends(get_async_session)): +async def get_user_db( + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), +): yield SQLAlchemyUserDatabase( - session, User, oauth_account_table=UserOauthAccount) + session, User, oauth_account_table=UserOauthAccount + ) async def get_access_token_db( - session: AsyncSession = Depends(get_async_session), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): yield SQLAlchemyAccessTokenDatabase(session, UserAccessToken) def get_database_strategy( - access_token_db: AccessTokenDatabase = Depends(get_access_token_db) + access_token_db: AccessTokenDatabase = Depends(get_access_token_db), ) -> DatabaseStrategy: return DatabaseStrategy(access_token_db, lifetime_seconds=3600) diff --git a/alws/config.py b/alws/config.py index 35f376c65..755b241ef 100644 --- a/alws/config.py +++ b/alws/config.py @@ -26,16 +26,38 @@ class Settings(BaseSettings): redis_url: str = 'redis://redis:6379' - database_url: str = ( - 'postgresql+asyncpg://postgres:password@db/almalinux-bs' - ) + # TBD: remove after moving to fastapi-sqla + database_url: str = 'postgresql+asyncpg://postgres:password@db/almalinux-bs' test_database_url: str = ( 'postgresql+asyncpg://postgres:password@db/test-almalinux-bs' ) + # TBD: remove after moving to fastapi-sqla sync_database_url: str = ( 'postgresql+psycopg2://postgres:password@db/almalinux-bs' ) + fastapi_sqla__async__sqlalchemy_url: str = ( + 'postgresql+asyncpg://postgres:password@db/almalinux-bs' + ) + fastapi_sqla__async__sqlalchemy_echo_pool: bool = True + + sqlalchemy_url: str = ( + 'postgresql+psycopg2://postgres:password@db/almalinux-bs' + ) + sqlalchemy_pool_pre_ping: bool = True + sqlalchemy_pool_recycle: int = 3600 + + fastapi_sqla__pulp__sqlalchemy_url: str = ( + 'postgresql+psycopg2://postgres:password@pulp:5432/pulp' + ) + fastapi_sqla__pulp__sqlalchemy_pool_pre_ping: bool = True + fastapi_sqla__pulp__sqlalchemy_pool_recycle: int = 3600 + + fastapi_sqla__pulp_async__sqlalchemy_url: str = ( + 'postgresql+asyncpg://postgres:password@pulp:5432/pulp' + ) + fastapi_sqla__pulp_async__sqlalchemy_echo_pool: bool = True + github_client: str github_client_secret: str diff --git a/alws/crud/actions.py b/alws/crud/actions.py index 4a18080e1..b7005b5ed 100644 --- a/alws/crud/actions.py +++ b/alws/crud/actions.py @@ -1,11 +1,11 @@ +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from alws.database import Session from alws.models import UserAction from alws.perms.actions import ActionsList -async def ensure_all_actions_exist(session: Session, commit: bool = False): +async def ensure_all_actions_exist(session: AsyncSession): existing_actions = ( (await session.execute(select(UserAction))).scalars().all() ) @@ -19,7 +19,4 @@ async def ensure_all_actions_exist(session: Session, commit: bool = False): if new_actions: session.add_all(new_actions) - if commit: - await session.commit() - else: - await session.flush() + await session.flush() diff --git a/alws/crud/build.py b/alws/crud/build.py index e09a14dff..ded7ce404 100644 --- a/alws/crud/build.py +++ b/alws/crud/build.py @@ -85,7 +85,7 @@ async def create_build( for flavour in flavors: db_build.platform_flavors.append(flavour) db.add(db_build) - await db.commit() + await db.flush() await db.refresh(db_build) start_build.send(db_build.id, build.model_dump()) return db_build @@ -175,9 +175,7 @@ async def generate_query(count=False): sqlalchemy.or_( models.BuildTaskRef.url.like(f"%/{project_name}.git"), models.BuildTaskRef.url.like(f"%/{project_name}%.src.rpm"), - models.BuildTaskRef.url.like( - f"%/rpms/{project_name}%.git" - ), + models.BuildTaskRef.url.like(f"%/rpms/{project_name}%.git"), ) ) if not (await db.execute(project_query)).scalars().all(): @@ -201,13 +199,11 @@ async def generate_query(count=False): if build_task_arch is not None: query = query.filter(models.BuildTask.arch == build_task_arch) if any(rpm_params.values()): - pulp_params.update( - { - key: value - for key, value in rpm_params.items() - if value is not None - } - ) + pulp_params.update({ + key: value + for key, value in rpm_params.items() + if value is not None + }) # TODO: we can get packages from pulp database pulp_hrefs = await pulp_client.get_rpm_packages(**pulp_params) pulp_hrefs = [row["pulp_href"] for row in pulp_hrefs] @@ -293,128 +289,112 @@ async def remove_build_job(db: AsyncSession, build_id: int): build_task_ref_ids = [] test_task_ids = [] test_task_artifact_ids = [] - async with db.begin(): - build = await db.execute(query_bj) - build = build.scalars().first() - if build is None: - raise DataNotFoundError(f'Build with {build_id} not found') - if build.products: - product_names = "\n".join( - (product.name for product in build.products) - ) - raise BuildError( - f"Cannot delete Build={build_id}, " - f"build contains in following products:\n{product_names}" - ) - if build.released: - raise BuildError(f"Build with {build_id} is released") - for bt in build.tasks: - build_task_ids.append(bt.id) - build_task_ref_ids.append(bt.ref_id) - for build_artifact in bt.artifacts: - build_task_artifact_ids.append(build_artifact.id) - for tt in bt.test_tasks: - test_task_ids.append(tt.id) - repo_ids.append(tt.repository_id) - for test_artifact in tt.artifacts: - test_task_artifact_ids.append(test_artifact.id) - for br in build.repos: - repos.append(br.pulp_href) - repo_ids.append(br.id) - pulp_client = PulpClient( - settings.pulp_host, settings.pulp_user, settings.pulp_password - ) - await db.execute( - delete(models.BuildRepo).where( - models.BuildRepo.c.build_id == build_id - ) - ) - await db.execute( - delete(models.BuildPlatformFlavour).where( - models.BuildPlatformFlavour.c.build_id == build_id - ) - ) - await db.execute( - delete(models.SignTask).where(models.SignTask.build_id == build_id) - ) - await db.execute( - delete(models.BinaryRpm).where( - models.BinaryRpm.build_id == build_id - ) + build = await db.execute(query_bj) + build = build.scalars().first() + if build is None: + raise DataNotFoundError(f'Build with {build_id} not found') + if build.products: + product_names = "\n".join((product.name for product in build.products)) + raise BuildError( + f"Cannot delete Build={build_id}, " + f"build contains in following products:\n{product_names}" ) - await db.execute( - delete(models.SourceRpm).where( - models.SourceRpm.build_id == build_id - ) - ) - await db.execute( - delete(models.PerformanceStats).where( - models.PerformanceStats.build_task_id.in_(build_task_ids) - ) - ) - await db.execute( - delete(models.PerformanceStats).where( - models.PerformanceStats.test_task_id.in_(test_task_ids) - ) - ) - await db.execute( - delete(models.TestTaskArtifact).where( - models.TestTaskArtifact.id.in_(test_task_artifact_ids) - ) - ) - await db.execute( - delete(models.TestTask).where( - models.TestTask.id.in_(test_task_ids) - ) + if build.released: + raise BuildError(f"Build with {build_id} is released") + for bt in build.tasks: + build_task_ids.append(bt.id) + build_task_ref_ids.append(bt.ref_id) + for build_artifact in bt.artifacts: + build_task_artifact_ids.append(build_artifact.id) + for tt in bt.test_tasks: + test_task_ids.append(tt.id) + repo_ids.append(tt.repository_id) + for test_artifact in tt.artifacts: + test_task_artifact_ids.append(test_artifact.id) + for br in build.repos: + repos.append(br.pulp_href) + repo_ids.append(br.id) + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) + await db.execute( + delete(models.BuildRepo).where(models.BuildRepo.c.build_id == build_id) + ) + await db.execute( + delete(models.BuildPlatformFlavour).where( + models.BuildPlatformFlavour.c.build_id == build_id ) - await db.execute( - delete(models.BuildTaskArtifact).where( - models.BuildTaskArtifact.id.in_(build_task_artifact_ids) - ) + ) + await db.execute( + delete(models.SignTask).where(models.SignTask.build_id == build_id) + ) + await db.execute( + delete(models.BinaryRpm).where(models.BinaryRpm.build_id == build_id) + ) + await db.execute( + delete(models.SourceRpm).where(models.SourceRpm.build_id == build_id) + ) + await db.execute( + delete(models.PerformanceStats).where( + models.PerformanceStats.build_task_id.in_(build_task_ids) ) - await db.execute( - delete(models.BuildTaskDependency).where( - models.BuildTaskDependency.c.build_task_dependency.in_( - build_task_ids - ) - ) + ) + await db.execute( + delete(models.PerformanceStats).where( + models.PerformanceStats.test_task_id.in_(test_task_ids) ) - await db.execute( - delete(models.Repository).where(models.Repository.id.in_(repo_ids)) + ) + await db.execute( + delete(models.TestTaskArtifact).where( + models.TestTaskArtifact.id.in_(test_task_artifact_ids) ) - await db.execute( - delete(models.BuildTask).where( - models.BuildTask.build_id == build_id - ) + ) + await db.execute( + delete(models.TestTask).where(models.TestTask.id.in_(test_task_ids)) + ) + await db.execute( + delete(models.BuildTaskArtifact).where( + models.BuildTaskArtifact.id.in_(build_task_artifact_ids) ) - await db.execute( - delete(models.BuildDependency).where( - sqlalchemy.or_( - models.BuildDependency.c.build_dependency == build_id, - models.BuildDependency.c.build_id == build_id, - ) + ) + await db.execute( + delete(models.BuildTaskDependency).where( + models.BuildTaskDependency.c.build_task_dependency.in_( + build_task_ids ) ) - await db.execute( - delete(models.BuildTaskRef).where( - models.BuildTaskRef.id.in_(build_task_ref_ids) + ) + await db.execute( + delete(models.Repository).where(models.Repository.id.in_(repo_ids)) + ) + await db.execute( + delete(models.BuildTask).where(models.BuildTask.build_id == build_id) + ) + await db.execute( + delete(models.BuildDependency).where( + sqlalchemy.or_( + models.BuildDependency.c.build_dependency == build_id, + models.BuildDependency.c.build_id == build_id, ) ) - await db.execute( - delete(models.Build).where(models.Build.id == build_id) + ) + await db.execute( + delete(models.BuildTaskRef).where( + models.BuildTaskRef.id.in_(build_task_ref_ids) ) - # FIXME - # it seems we cannot just delete any files because - # https://docs.pulpproject.org/pulpcore/restapi.html#tag/Content:-Files - # does not content delete option, but artifact does: - # https://docs.pulpproject.org/pulpcore/restapi.html#operation/ - # artifacts_delete - # "Remove Artifact only if it is not associated with any Content." - # for artifact in artifacts: - # await pulp_client.remove_artifact(artifact) - for repo in repos: - try: - await pulp_client.delete_by_href(repo, wait_for_result=True) - except Exception as err: - logging.exception("Cannot delete repo from pulp: %s", err) - await db.commit() + ) + await db.execute(delete(models.Build).where(models.Build.id == build_id)) + # FIXME + # it seems we cannot just delete any files because + # https://docs.pulpproject.org/pulpcore/restapi.html#tag/Content:-Files + # does not content delete option, but artifact does: + # https://docs.pulpproject.org/pulpcore/restapi.html#operation/ + # artifacts_delete + # "Remove Artifact only if it is not associated with any Content." + # for artifact in artifacts: + # await pulp_client.remove_artifact(artifact) + for repo in repos: + try: + await pulp_client.delete_by_href(repo, wait_for_result=True) + except Exception as err: + logging.exception("Cannot delete repo from pulp: %s", err) diff --git a/alws/crud/build_node.py b/alws/crud/build_node.py index 8a51a7c78..8f18ca9cd 100644 --- a/alws/crud/build_node.py +++ b/alws/crud/build_node.py @@ -6,6 +6,7 @@ from collections import defaultdict import sqlalchemy +from fastapi_sqla import open_async_session from sqlalchemy import delete, insert, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select @@ -18,7 +19,6 @@ ErrataPackageStatus, GitHubIssueStatus, ) -from alws.database import PulpAsyncSession from alws.errors import ( ArtifactChecksumError, ArtifactConversionError, @@ -48,53 +48,50 @@ async def get_available_build_task( db: AsyncSession, request: build_node_schema.RequestTask, ) -> typing.Optional[models.BuildTask]: - async with db.begin(): - # TODO: here should be config value - ts_expired = datetime.datetime.utcnow() - datetime.timedelta( - minutes=20 - ) - db_task = await db.execute( - select(models.BuildTask) - .where(~models.BuildTask.dependencies.any()) - .with_for_update() - .filter( - sqlalchemy.and_( - models.BuildTask.status < BuildTaskStatus.COMPLETED, - models.BuildTask.arch.in_(request.supported_arches), - sqlalchemy.or_( - models.BuildTask.ts < ts_expired, - models.BuildTask.ts.is_(None), - ), - ) - ) - .options( - selectinload(models.BuildTask.ref), - selectinload(models.BuildTask.build).selectinload( - models.Build.repos - ), - selectinload(models.BuildTask.platform).selectinload( - models.Platform.repos - ), - selectinload(models.BuildTask.build).selectinload( - models.Build.owner + # TODO: here should be config value + ts_expired = datetime.datetime.utcnow() - datetime.timedelta(minutes=20) + db_task = await db.execute( + select(models.BuildTask) + .where(~models.BuildTask.dependencies.any()) + .with_for_update() + .filter( + sqlalchemy.and_( + models.BuildTask.status < BuildTaskStatus.COMPLETED, + models.BuildTask.arch.in_(request.supported_arches), + sqlalchemy.or_( + models.BuildTask.ts < ts_expired, + models.BuildTask.ts.is_(None), ), - selectinload(models.BuildTask.build) - .selectinload(models.Build.linked_builds) - .selectinload(models.Build.repos), - selectinload(models.BuildTask.build) - .selectinload(models.Build.platform_flavors) - .selectinload(models.PlatformFlavour.repos), - selectinload(models.BuildTask.artifacts), - selectinload(models.BuildTask.rpm_modules), ) - .order_by(models.BuildTask.id.asc()) ) - db_task = db_task.scalars().first() - if not db_task: - return - db_task.ts = datetime.datetime.utcnow() - db_task.status = BuildTaskStatus.STARTED - await db.commit() + .options( + selectinload(models.BuildTask.ref), + selectinload(models.BuildTask.build).selectinload( + models.Build.repos + ), + selectinload(models.BuildTask.platform).selectinload( + models.Platform.repos + ), + selectinload(models.BuildTask.build).selectinload( + models.Build.owner + ), + selectinload(models.BuildTask.build) + .selectinload(models.Build.linked_builds) + .selectinload(models.Build.repos), + selectinload(models.BuildTask.build) + .selectinload(models.Build.platform_flavors) + .selectinload(models.PlatformFlavour.repos), + selectinload(models.BuildTask.artifacts), + selectinload(models.BuildTask.rpm_modules), + ) + .order_by(models.BuildTask.id.asc()) + ) + db_task = db_task.scalars().first() + if not db_task: + return + db_task.ts = datetime.datetime.utcnow() + db_task.status = BuildTaskStatus.STARTED + await db.flush() return db_task @@ -140,85 +137,79 @@ async def update_failed_build_items_in_parallel( db: AsyncSession, build_id: int, ): - async with db.begin(): - tasks_cache = await get_failed_build_tasks_matrix(db, build_id) - tasks_indexes = list(tasks_cache.keys()) - for task_index, index_dict in tasks_cache.items(): - current_idx = tasks_indexes.index(task_index) - first_index_dep = None - - completed_index_tasks = [] - failed_tasks = [] - for task in index_dict.values(): - if task.status == BuildTaskStatus.COMPLETED: - completed_index_tasks.append(task) - elif task.status == BuildTaskStatus.FAILED: - failed_tasks.append(task) - - drop_srpm = False - if len(failed_tasks) == len(index_dict): - drop_srpm = True - - for key in sorted( - list(index_dict.keys()), - key=lambda x: x[1] == "i686", - reverse=True, - ): - task = index_dict[key] - if task.status != BuildTaskStatus.FAILED: - continue - if task.built_srpm_url and drop_srpm: - task.built_srpm_url = None - task.status = BuildTaskStatus.IDLE - task.ts = None - if first_index_dep: - await db.run_sync( - add_build_task_dependencies, task, first_index_dep - ) - idx = current_idx - 1 - while idx >= 0: - prev_task_index = tasks_indexes[idx] - dep = tasks_cache.get(prev_task_index, {}).get(key) - # dependency.status can be completed because - # we stores in cache completed tasks - if dep and dep.status == BuildTaskStatus.IDLE: - await db.run_sync( - add_build_task_dependencies, task, dep - ) - idx -= 1 - # if at least one task in index is completed, - # we shouldn't wait first task completion - if first_index_dep is None and not completed_index_tasks: - first_index_dep = task - await db.commit() + tasks_cache = await get_failed_build_tasks_matrix(db, build_id) + tasks_indexes = list(tasks_cache.keys()) + for task_index, index_dict in tasks_cache.items(): + current_idx = tasks_indexes.index(task_index) + first_index_dep = None + + completed_index_tasks = [] + failed_tasks = [] + for task in index_dict.values(): + if task.status == BuildTaskStatus.COMPLETED: + completed_index_tasks.append(task) + elif task.status == BuildTaskStatus.FAILED: + failed_tasks.append(task) + + drop_srpm = False + if len(failed_tasks) == len(index_dict): + drop_srpm = True + + for key in sorted( + list(index_dict.keys()), + key=lambda x: x[1] == "i686", + reverse=True, + ): + task = index_dict[key] + if task.status != BuildTaskStatus.FAILED: + continue + if task.built_srpm_url and drop_srpm: + task.built_srpm_url = None + task.status = BuildTaskStatus.IDLE + task.ts = None + if first_index_dep: + await db.run_sync( + add_build_task_dependencies, task, first_index_dep + ) + idx = current_idx - 1 + while idx >= 0: + prev_task_index = tasks_indexes[idx] + dep = tasks_cache.get(prev_task_index, {}).get(key) + # dependency.status can be completed because + # we stores in cache completed tasks + if dep and dep.status == BuildTaskStatus.IDLE: + await db.run_sync(add_build_task_dependencies, task, dep) + idx -= 1 + # if at least one task in index is completed, + # we shouldn't wait first task completion + if first_index_dep is None and not completed_index_tasks: + first_index_dep = task + await db.flush() async def update_failed_build_items(db: AsyncSession, build_id: int): - async with db.begin(): - failed_tasks_matrix = await get_failed_build_tasks_matrix(db, build_id) - - last_task = None - for tasks_dicts in failed_tasks_matrix.values(): - failed_tasks = [ - task - for task in tasks_dicts.values() - if task.status == BuildTaskStatus.FAILED - ] - drop_srpm = False - if len(failed_tasks) == len(tasks_dicts): - drop_srpm = True - - for task in failed_tasks: - if task.built_srpm_url and drop_srpm: - task.built_srpm_url = None - task.status = BuildTaskStatus.IDLE - task.ts = None - if last_task is not None: - await db.run_sync( - add_build_task_dependencies, task, last_task - ) - last_task = task - await db.commit() + failed_tasks_matrix = await get_failed_build_tasks_matrix(db, build_id) + + last_task = None + for tasks_dicts in failed_tasks_matrix.values(): + failed_tasks = [ + task + for task in tasks_dicts.values() + if task.status == BuildTaskStatus.FAILED + ] + drop_srpm = False + if len(failed_tasks) == len(tasks_dicts): + drop_srpm = True + + for task in failed_tasks: + if task.built_srpm_url and drop_srpm: + task.built_srpm_url = None + task.status = BuildTaskStatus.IDLE + task.ts = None + if last_task is not None: + await db.run_sync(add_build_task_dependencies, task, last_task) + last_task = task + await db.flush() async def mark_build_tasks_as_cancelled( @@ -236,7 +227,6 @@ async def mark_build_tasks_as_cancelled( error="Build task cancelled by user", ) ) - await session.commit() async def log_repo_exists(db: AsyncSession, task: models.BuildTask): @@ -283,15 +273,14 @@ async def create_build_log_repo(db: AsyncSession, task: models.BuildTask): build_id=task.build_id, repository_id=log_repo.id ) ) - await db.commit() + await db.flush() async def ping_tasks(db: AsyncSession, task_list: typing.List[int]): query = models.BuildTask.id.in_(task_list) now = datetime.datetime.utcnow() - async with db.begin(): - await db.execute(update(models.BuildTask).where(query).values(ts=now)) - await db.commit() + await db.execute(update(models.BuildTask).where(query).values(ts=now)) + await db.flush() async def get_build_task(db: AsyncSession, task_id: int) -> models.BuildTask: @@ -525,9 +514,7 @@ async def __process_logs( logging.error("Log repository is absent, skipping logs processing") return logs = [] - tasks = [ - pulp_client.create_entity(artifact) for artifact in task_artifacts - ] + tasks = [pulp_client.create_entity(artifact) for artifact in task_artifacts] try: results = await asyncio.gather(*tasks) except Exception as e: @@ -806,7 +793,7 @@ def _get_srpm_name( # rebuilding failed tasks. At this point, we delete the current # module in pulp db and a new final one will be properly # created/pubished below. - async with PulpAsyncSession() as pulp_db, pulp_db.begin(): + async with open_async_session('pulp_async') as pulp_db: module_in_pulp_db = await get_module_from_pulp_db( pulp_db, rpm_module, @@ -1024,9 +1011,9 @@ async def safe_build_done( start_time = datetime.datetime.utcnow() logging.info("Start processing build_task: %d", request.task_id) try: - async with db.begin(), pulp.begin(): + async with pulp.begin(): build_task, build_done_stats = await build_done(db, pulp, request) - await db.commit() + await db.flush() except Exception: logging.exception("Build done failed:") success = False @@ -1050,9 +1037,8 @@ async def safe_build_done( }, **build_done_stats, } - async with db.begin(): - await __update_built_srpm_url(db, build_task, request) - await db.commit() + await __update_built_srpm_url(db, build_task, request) + await db.flush() finally: remove_dep_query = delete(models.BuildTaskDependency).where( models.BuildTaskDependency.c.build_task_dependency @@ -1080,7 +1066,7 @@ async def safe_build_done( ), ) await db.execute(remove_dep_query) - await db.commit() + await db.flush() logging.info("Build task: %d, processing is finished", request.task_id) return success diff --git a/alws/crud/errata.py b/alws/crud/errata.py index 4c7be3266..0fb81be80 100644 --- a/alws/crud/errata.py +++ b/alws/crud/errata.py @@ -4,7 +4,6 @@ import logging import re import uuid -from contextlib import asynccontextmanager from typing import ( Any, Awaitable, @@ -18,6 +17,7 @@ import createrepo_c as cr import jinja2 +from fastapi_sqla import open_async_session, open_session from sqlalchemy import and_, delete, or_, select, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Session, load_only, selectinload @@ -31,7 +31,7 @@ ErrataReleaseStatus, GitHubIssueStatus, ) -from alws.dependencies import get_db, get_pulp_db +from alws.dependencies import get_async_db_key from alws.pulp_models import ( RpmPackage, UpdateCollection, @@ -222,9 +222,7 @@ def errata_records_to_oval(records: List[models.NewErrataRecord], platform_name: continue # TODO: Add test mapping here # test_id: rhel_evra - criterion["comment"] = criterion[ - "comment" - ].replace( + criterion["comment"] = criterion["comment"].replace( evra, rhel_evra_mapping[evra][ next(iter(rhel_evra_mapping[evra].keys())) @@ -499,7 +497,7 @@ async def update_errata_record( record.description = None else: record.description = update_record.description - await db.commit() + await db.flush() await db.refresh(record) return record @@ -757,7 +755,7 @@ async def create_errata_record(db: AsyncSession, errata: BaseErrataRecord): ) db.add_all(items_to_insert) - await db.commit() + await db.flush() await db.refresh(db_errata) if not settings.github_integration_enabled: return db_errata @@ -856,9 +854,7 @@ def generate_query(count=False): ) ) if platform: - query = query.filter( - models.NewErrataRecord.platform_id == platform - ) + query = query.filter(models.NewErrataRecord.platform_id == platform) if cve_id: query = query.filter( models.NewErrataRecord.cves.like(f"%{cve_id}%") @@ -884,40 +880,36 @@ async def update_package_status( db: AsyncSession, request: List[errata_schema.ChangeErrataPackageStatusRequest], ): - async with db.begin(): - for record in request: - errata_record = await db.execute( - select(models.NewErrataRecord) - .where( - models.NewErrataRecord.id == record.errata_record_id, - models.NewErrataRecord.platform_id - == record.errata_platform_id, - ) - .options( - selectinload(models.NewErrataRecord.packages) - .selectinload(models.NewErrataPackage.albs_packages) - .selectinload(models.NewErrataToALBSPackage.build_artifact) - .selectinload(models.BuildTaskArtifact.build_task) - ) + for record in request: + errata_record = await db.execute( + select(models.NewErrataRecord) + .where( + models.NewErrataRecord.id == record.errata_record_id, + models.NewErrataRecord.platform_id == record.errata_platform_id, ) - errata_record = errata_record.scalars().first() - record_approved = record.status == ErrataPackageStatus.approved - for errata_pkg in errata_record.packages: - if errata_pkg.source_srpm != record.source: - continue - for albs_pkg in errata_pkg.albs_packages: - if albs_pkg.status == ErrataPackageStatus.released: - raise ValueError( - "There is already released package " - f"with same source: {albs_pkg}" - ) - if ( - albs_pkg.build_id != record.build_id - and record_approved - ): - albs_pkg.status = ErrataPackageStatus.skipped - if albs_pkg.build_id == record.build_id: - albs_pkg.status = record.status + .options( + selectinload(models.NewErrataRecord.packages) + .selectinload(models.NewErrataPackage.albs_packages) + .selectinload(models.NewErrataToALBSPackage.build_artifact) + .selectinload(models.BuildTaskArtifact.build_task) + ) + ) + errata_record = errata_record.scalars().first() + record_approved = record.status == ErrataPackageStatus.approved + for errata_pkg in errata_record.packages: + if errata_pkg.source_srpm != record.source: + continue + for albs_pkg in errata_pkg.albs_packages: + if albs_pkg.status == ErrataPackageStatus.released: + raise ValueError( + "There is already released package " + f"with same source: {albs_pkg}" + ) + if albs_pkg.build_id != record.build_id and record_approved: + albs_pkg.status = ErrataPackageStatus.skipped + if albs_pkg.build_id == record.build_id: + albs_pkg.status = record.status + await db.flush() return True @@ -1122,64 +1114,62 @@ def append_update_packages_in_update_records( updateinfo_mapping: DefaultDict[ str, List[ - Tuple[ - models.BuildTaskArtifact, dict, models.NewErrataToALBSPackage - ] + Tuple[models.BuildTaskArtifact, dict, models.NewErrataToALBSPackage] ], ], ): - with pulp_db.begin(): - for record in errata_records: - record_uuid = uuid.UUID(record["pulp_href"].split("/")[-2]) - packages = updateinfo_mapping.get(record["id"]) - if not packages: + for record in errata_records: + record_uuid = uuid.UUID(record["pulp_href"].split("/")[-2]) + packages = updateinfo_mapping.get(record["id"]) + if not packages: + continue + pulp_record = pulp_db.execute( + select(UpdateRecord) + .where(UpdateRecord.content_ptr_id == record_uuid) + .options( + selectinload(UpdateRecord.collections).selectinload( + UpdateCollection.packages + ), + ) + ) + pulp_record: UpdateRecord = pulp_record.scalars().first() + for _, pulp_pkg, pkg in packages: + already_released = False + collection = pulp_record.collections[0] + collection_arch = re.search( + r"i686|x86_64|aarch64|ppc64le|s390x", + collection.name, + ).group() + if pulp_pkg["arch"] not in (collection_arch, "noarch"): continue - pulp_record = pulp_db.execute( - select(UpdateRecord) - .where(UpdateRecord.content_ptr_id == record_uuid) - .options( - selectinload(UpdateRecord.collections).selectinload( - UpdateCollection.packages - ), - ) + already_released = next( + ( + package + for package in collection.packages + if package.filename == pulp_pkg["location_href"] + ), + None, ) - pulp_record: UpdateRecord = pulp_record.scalars().first() - for _, pulp_pkg, pkg in packages: - already_released = False - collection = pulp_record.collections[0] - collection_arch = re.search( - r"i686|x86_64|aarch64|ppc64le|s390x", - collection.name, - ).group() - if pulp_pkg["arch"] not in (collection_arch, "noarch"): - continue - already_released = next( - ( - package - for package in collection.packages - if package.filename == pulp_pkg["location_href"] - ), - None, - ) - if already_released: - continue - collection.packages.append( - UpdatePackage( - name=pulp_pkg["name"], - filename=pulp_pkg["location_href"], - arch=pulp_pkg["arch"], - version=pulp_pkg["version"], - release=pulp_pkg["release"], - epoch=str(pulp_pkg["epoch"]), - reboot_suggested=pkg.errata_package.reboot_suggested, - src=pulp_pkg["rpm_sourcerpm"], - sum=pulp_pkg["sha256"], - sum_type=cr.checksum_type("sha256"), - ) - ) - pulp_record.updated_date = datetime.datetime.utcnow().strftime( - "%Y-%m-%d %H:%M:%S" + if already_released: + continue + collection.packages.append( + UpdatePackage( + name=pulp_pkg["name"], + filename=pulp_pkg["location_href"], + arch=pulp_pkg["arch"], + version=pulp_pkg["version"], + release=pulp_pkg["release"], + epoch=str(pulp_pkg["epoch"]), + reboot_suggested=pkg.errata_package.reboot_suggested, + src=pulp_pkg["rpm_sourcerpm"], + sum=pulp_pkg["sha256"], + sum_type=cr.checksum_type("sha256"), ) + ) + pulp_record.updated_date = datetime.datetime.utcnow().strftime( + "%Y-%m-%d %H:%M:%S" + ) + pulp_db.flush() def get_albs_packages_from_record( @@ -1267,7 +1257,7 @@ async def process_errata_release_for_repos( repository_version=latest_repo_version, ) logging.info("Appending packages to existing errata records") - with get_pulp_db() as pulp_db: + with open_session(key="pulp") as pulp_db: append_update_packages_in_update_records( pulp_db=pulp_db, errata_records=errata_records, @@ -1371,7 +1361,7 @@ async def release_errata_record(record_id: str, platform_id: int, force: bool): settings.pulp_user, settings.pulp_password, ) - async with asynccontextmanager(get_db)() as session: + async with open_async_session(key=get_async_db_key()) as session: session: AsyncSession query = generate_query_for_release([record_id]) query = query.filter(models.NewErrataRecord.platform_id == platform_id) @@ -1401,7 +1391,7 @@ async def release_errata_record(record_id: str, platform_id: int, force: bool): db_record.release_status = ErrataReleaseStatus.FAILED db_record.last_release_log = str(exc) logging.exception("Cannot release %s record:", record_id) - await session.commit() + await session.flush() return await process_errata_release_for_repos( @@ -1421,7 +1411,7 @@ async def release_errata_record(record_id: str, platform_id: int, force: bool): force_flag=force, missing_pkg_names=missing_pkg_names, ) - await session.commit() + await session.flush() if settings.github_integration_enabled: try: await close_issues(record_ids=[db_record.id]) @@ -1441,7 +1431,7 @@ async def bulk_errata_records_release(records_ids: List[str]): ) release_tasks = [] repos_to_publish = [] - async with asynccontextmanager(get_db)() as session: + async with open_async_session(key=get_async_db_key()) as session: await session.execute( update(models.NewErrataRecord) .where(models.NewErrataRecord.id.in_(records_ids)) @@ -1450,9 +1440,8 @@ async def bulk_errata_records_release(records_ids: List[str]): last_release_log=None, ) ) - await session.commit() - async with asynccontextmanager(get_db)() as session: + async with open_async_session(key=get_async_db_key()) as session: session: AsyncSession db_records = await session.execute( generate_query_for_release(records_ids), @@ -1518,7 +1507,6 @@ async def bulk_errata_records_release(records_ids: List[str]): continue repos_to_publish.extend(repo_mapping.keys()) release_tasks.extend(tasks) - await session.commit() logging.info("Executing release tasks") await asyncio.gather(*release_tasks) logging.info("Executing publication tasks") @@ -1546,16 +1534,15 @@ async def get_updateinfo_xml_from_pulp( platform_name = None if platform_id: - async with db.begin(): - platform_name = ( - await db.execute( - select(models.Platform.name).where( - models.Platform.id == platform_id - ) + platform_name = ( + await db.execute( + select(models.Platform.name).where( + models.Platform.id == platform_id ) - ).scalar() - if not platform_name: - return + ) + ).scalar() + if not platform_name: + return cr_upd = cr.UpdateInfo() for errata_record in errata_records: @@ -1699,4 +1686,4 @@ async def reset_matched_errata_packages(record_id: str, session: AsyncSession): ) ) session.add_all(items_to_insert) - await session.commit() + await session.flush() diff --git a/alws/crud/platform.py b/alws/crud/platform.py index 1fd64632e..8d7275ce3 100644 --- a/alws/crud/platform.py +++ b/alws/crud/platform.py @@ -13,52 +13,49 @@ async def modify_platform( db: Session, platform: platform_schema.PlatformModify ) -> models.Platform: query = models.Platform.name == platform.name - async with db.begin(): - db_platform = await db.execute( - select(models.Platform) - .where(query) - .options( - selectinload(models.Platform.repos), - selectinload(models.Platform.reference_platforms), - ) - .with_for_update() + db_platform = await db.execute( + select(models.Platform) + .where(query) + .options( + selectinload(models.Platform.repos), + selectinload(models.Platform.reference_platforms), ) - db_platform = db_platform.scalars().first() - if not db_platform: - raise DataNotFoundError( - f'Platform with name: "{platform.name}" does not exists' - ) - fields_to_update = ( - 'type', - 'distr_type', - 'distr_version', - 'arch_list', - 'data', - 'modularity', - 'is_reference', - 'weak_arch_list', - 'copy_priority_arches', - 'copyright', - 'contact_mail', + .with_for_update() + ) + db_platform = db_platform.scalars().first() + if not db_platform: + raise DataNotFoundError( + f'Platform with name: "{platform.name}" does not exists' ) - for field in fields_to_update: - value = getattr(platform, field, None) - if value is not None: - setattr(db_platform, field, value) - db_repos = {repo.name: repo for repo in db_platform.repos} - payload_repos = getattr(platform, 'repos', None) - new_repos = {} - if payload_repos: - new_repos = {repo.name: repo for repo in platform.repos} - for repo in platform.repos: - if repo.name in db_repos: - db_repo = db_repos[repo.name] - for key in repo.model_dump().keys(): - setattr(db_repo, key, getattr(repo, key)) - else: - db_platform.repos.append( - models.Repository(**repo.model_dump()) - ) + fields_to_update = ( + 'type', + 'distr_type', + 'distr_version', + 'arch_list', + 'data', + 'modularity', + 'is_reference', + 'weak_arch_list', + 'copy_priority_arches', + 'copyright', + 'contact_mail', + ) + for field in fields_to_update: + value = getattr(platform, field, None) + if value is not None: + setattr(db_platform, field, value) + db_repos = {repo.name: repo for repo in db_platform.repos} + payload_repos = getattr(platform, 'repos', None) + new_repos = {} + if payload_repos: + new_repos = {repo.name: repo for repo in platform.repos} + for repo in platform.repos: + if repo.name in db_repos: + db_repo = db_repos[repo.name] + for key in repo.model_dump().keys(): + setattr(db_repo, key, getattr(repo, key)) + else: + db_platform.repos.append(models.Repository(**repo.model_dump())) ref_platform_ids_to_remove = [ ref_platform.id @@ -83,14 +80,13 @@ async def modify_platform( ) ) ) - - repos_to_remove = [] - for repo_name in db_repos: - if new_repos and repo_name not in new_repos: - repos_to_remove.append(repo_name) - remove_query = models.Repository.name.in_(repos_to_remove) - await db.execute(delete(models.Repository).where(remove_query)) - await db.commit() + repos_to_remove = [] + for repo_name in db_repos: + if new_repos and repo_name not in new_repos: + repos_to_remove.append(repo_name) + remove_query = models.Repository.name.in_(repos_to_remove) + await db.execute(delete(models.Repository).where(remove_query)) + await db.flush() await db.refresh(db_platform) return db_platform @@ -116,7 +112,7 @@ async def create_platform( for repo in platform.repos: db_platform.repos.append(models.Repository(**repo.model_dump())) db.add(db_platform) - await db.commit() + await db.flush() await db.refresh(db_platform) return db_platform diff --git a/alws/crud/platform_flavors.py b/alws/crud/platform_flavors.py index 25808f4f9..0c3b8c621 100644 --- a/alws/crud/platform_flavors.py +++ b/alws/crud/platform_flavors.py @@ -35,11 +35,9 @@ async def delete_flavour(db, pf_id: int) -> models.PlatformFlavour: ) await db.execute( - delete(models.PlatformFlavour).where( - models.PlatformFlavour.id == pf_id - ) + delete(models.PlatformFlavour).where(models.PlatformFlavour.id == pf_id) ) - await db.commit() + await db.flush() async def create_flavour(db, flavour: CreateFlavour) -> models.PlatformFlavour: @@ -65,7 +63,7 @@ async def create_flavour(db, flavour: CreateFlavour) -> models.PlatformFlavour: db.add(db_repo) db_flavour.repos.append(db_repo) db.add(db_flavour) - await db.commit() + await db.flush() db_flavour = await db.execute( select(models.PlatformFlavour) .where(models.PlatformFlavour.name == flavour.name) @@ -98,7 +96,7 @@ async def update_flavour(db, flavour: UpdateFlavour) -> models.PlatformFlavour: db.add(db_repo) db_flavour.repos.append(db_repo) db.add(db_flavour) - await db.commit() + await db.flush() return await find_flavour_by_name(db, flavour.name) diff --git a/alws/crud/products.py b/alws/crud/products.py index 322f82acf..9be01cbe5 100644 --- a/alws/crud/products.py +++ b/alws/crud/products.py @@ -58,9 +58,7 @@ async def create_product( if teams: team = teams[0] else: - team_payload = TeamCreate( - team_name=team_name, user_id=payload.owner_id - ) + team_payload = TeamCreate(team_name=team_name, user_id=payload.owner_id) team = await create_team(db, team_payload, flush=True) team_roles = await create_team_roles(db, team_name) @@ -84,20 +82,18 @@ async def create_product( for platform in product.platforms: platform_name = platform.name.lower() - repo_tasks.extend( - ( - create_product_repo( - pulp_client, - product.name, - owner.username, - platform_name, - arch, - is_debug, - ) - for arch in platform.arch_list - for is_debug in (True, False) + repo_tasks.extend(( + create_product_repo( + pulp_client, + product.name, + owner.username, + platform_name, + arch, + is_debug, ) - ) + for arch in platform.arch_list + for is_debug in (True, False) + )) repo_tasks.append( create_product_repo( pulp_client, @@ -241,12 +237,10 @@ async def remove_product( .join(models.BuildTask) .where( models.Build.team_id == db_product.team_id, - models.BuildTask.status.in_( - [ - BuildTaskStatus.IDLE, - BuildTaskStatus.STARTED, - ] - ), + models.BuildTask.status.in_([ + BuildTaskStatus.IDLE, + BuildTaskStatus.STARTED, + ]), ) ) ) @@ -265,7 +259,7 @@ async def remove_product( ) delete_tasks = [] all_product_distros = await pulp_client.get_rpm_distros( - include_fields=["pulp_href"], + include_fields=["pulp_href", "name"], **{"name__startswith": db_product.pulp_base_distro_name}, ) for product_repo in db_product.repositories: @@ -283,7 +277,7 @@ async def remove_product( ) await asyncio.gather(*delete_tasks) await db.delete(db_product) - await db.commit() + await db.flush() async def modify_product( @@ -293,48 +287,64 @@ async def modify_product( user_id: int, modification: str, ): - async with db.begin(): - db_product = await get_products(db, product_name=product) - db_user = await get_user(db, user_id=user_id) - if not db_user: - raise DataNotFoundError(f"User={user_id} doesn't exist") - if not can_perform(db_product, db_user, actions.ReleaseToProduct.name): - raise PermissionDenied( - 'User has no permissions ' - f'to modify the product "{db_product.name}"' - ) + db_product = await get_products(db, product_name=product) + db_user = await get_user(db, user_id=user_id) + if not db_user: + raise DataNotFoundError(f"User={user_id} doesn't exist") + if not can_perform(db_product, db_user, actions.ReleaseToProduct.name): + raise PermissionDenied( + 'User has no permissions ' + f'to modify the product "{db_product.name}"' + ) - db_build = await db.execute( - select(models.Build) - .where( - models.Build.id == build_id, - ) - .options( - selectinload(models.Build.repos), - selectinload(models.Build.tasks).selectinload( - models.BuildTask.rpm_modules - ), - selectinload(models.Build.tasks).selectinload( - models.BuildTask.platform - ), + db_build = await db.execute( + select(models.Build) + .where( + models.Build.id == build_id, + ) + .options( + selectinload(models.Build.repos), + selectinload(models.Build.tasks).selectinload( + models.BuildTask.rpm_modules ), + selectinload(models.Build.tasks).selectinload( + models.BuildTask.platform + ), + ), + ) + + db_build = await db.execute( + select(models.Build) + .where( + models.Build.id == build_id, ) - db_build = db_build.scalars().first() + .options( + selectinload(models.Build.repos), + selectinload(models.Build.tasks).selectinload( + models.BuildTask.rpm_modules + ), + selectinload(models.Build.tasks).selectinload( + models.BuildTask.platform + ), + ), + ) + db_build = db_build.scalars().first() - if modification == 'add': - if db_build in db_product.builds: - error_msg = ( - f"Can't add build {build_id} to {product} " - "as it's already part of the product" - ) - raise ProductError(error_msg) - if modification == 'remove': - if db_build not in db_product.builds: - error_msg = ( - f"Can't remove build {build_id} " - f"from {product} as it's not part " - "of the product" - ) - raise ProductError(error_msg) + if modification == 'add': + if db_build in db_product.builds: + error_msg = ( + f"Can't add build {build_id} to {product} " + "as it's already part of the product" + ) + raise ProductError(error_msg) + if modification == 'remove': + if db_build not in db_product.builds: + error_msg = ( + f"Can't remove build {build_id} " + f"from {product} as it's not part " + "of the product" + ) + raise ProductError(error_msg) + await db.flush() perform_product_modification.send(db_build.id, db_product.id, modification) diff --git a/alws/crud/release.py b/alws/crud/release.py index 6342493f7..c248a72ec 100644 --- a/alws/crud/release.py +++ b/alws/crud/release.py @@ -112,7 +112,7 @@ async def create_release( release_id=release.id, statistics=releaser.stats.copy() ) db.add(stats) - await db.commit() + await db.flush() return await releaser.get_final_release(release.id) @@ -138,7 +138,7 @@ async def update_release( new_stats = copy.deepcopy(perf_stat.statistics) new_stats.update(**releaser.stats) perf_stat.statistics = new_stats - await db.commit() + await db.flush() return await releaser.get_final_release(release_id) @@ -163,7 +163,7 @@ async def commit_release( new_stats = copy.deepcopy(perf_stat.statistics) new_stats.update(**releaser.stats) perf_stat.statistics = new_stats - await db.commit() + await db.flush() async def revert_release( @@ -184,33 +184,30 @@ async def remove_release( release_id: int, user: models.User, ): - async with db.begin(): - release = ( - ( - await db.execute( - select(models.Release).where( - models.Release.id == release_id, - models.Release.status == ReleaseStatus.SCHEDULED, - ) + release = ( + ( + await db.execute( + select(models.Release).where( + models.Release.id == release_id, + models.Release.status == ReleaseStatus.SCHEDULED, ) ) - .scalars() - .first() ) - if release is None: - return { - 'message': ( - 'There is no scheduled release plan with ID ' - f'"{release_id}"' - ), - } - if not can_perform(release, user, actions.DeleteRelease.name): - raise PermissionDenied( - "User does not have permissions to delete this release" - ) - await db.delete(release) + .scalars() + .first() + ) + if release is None: return { 'message': ( - f'Scheduled release with ID "{release_id}" is removed' + 'There is no scheduled release plan with ID ' f'"{release_id}"' ), } + if not can_perform(release, user, actions.DeleteRelease.name): + raise PermissionDenied( + "User does not have permissions to delete this release" + ) + await db.delete(release) + await db.flush() + return { + 'message': (f'Scheduled release with ID "{release_id}" is removed'), + } diff --git a/alws/crud/repo_exporter.py b/alws/crud/repo_exporter.py index fde505dd5..776a29776 100644 --- a/alws/crud/repo_exporter.py +++ b/alws/crud/repo_exporter.py @@ -2,7 +2,7 @@ import typing from pathlib import Path -from sqlalchemy import update, delete, insert +from sqlalchemy import delete, insert, update from sqlalchemy.future import select from sqlalchemy.orm import Session @@ -12,119 +12,126 @@ from alws.utils.pulp_client import PulpClient -async def create_pulp_exporters_to_fs(db: Session, - repo_list: typing.List[int]): - query = select(models.Repository).where( - models.Repository.id.in_(repo_list)) +async def create_pulp_exporters_to_fs(db: Session, repo_list: typing.List[int]): + query = select(models.Repository).where(models.Repository.id.in_(repo_list)) export_name = ','.join((str(r) for r in repo_list)) export_repos = [] - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) - async with db.begin(): - et_inserted = await db.execute( - insert(models.ExportTask).values( - name=export_name, status=ExportStatus.NEW) + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) + et_inserted = await db.execute( + insert(models.ExportTask).values( + name=export_name, status=ExportStatus.NEW ) - export_task_pk = et_inserted.inserted_primary_key[0] - response = await db.execute(query) - await db.commit() + ) + export_task_pk = et_inserted.inserted_primary_key[0] + response = await db.execute(query) + await db.flush() for repo in response.scalars().all(): - export_path = str(Path( - settings.pulp_export_path, repo.export_path, 'Packages')) + export_path = str( + Path(settings.pulp_export_path, repo.export_path, 'Packages') + ) fs_exporter_href = await pulp_client.create_filesystem_exporter( - f'{repo.name}-{repo.arch}', export_path) + f'{repo.name}-{repo.arch}', export_path + ) export_repos.append({ 'path': export_path, 'exported_id': export_task_pk, 'repository_id': repo.id, - 'fs_exporter_href': fs_exporter_href + 'fs_exporter_href': fs_exporter_href, }) if export_repos: - async with db.begin(): - await db.execute( - insert(models.RepoExporter), export_repos) - await db.commit() + await db.execute(insert(models.RepoExporter), export_repos) + await db.flush() return export_task_pk -async def execute_pulp_exporters_to_fs(db: Session, - export_id: int): - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) +async def execute_pulp_exporters_to_fs(db: Session, export_id: int): + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) now = datetime.datetime.utcnow() - query = select( - models.RepoExporter.fs_exporter_href, - models.RepoExporter.path, - models.Repository.pulp_href, - models.Repository.url - ).where( - models.RepoExporter.exported_id == export_id - ).join( - models.Repository - ).filter( - models.RepoExporter.repository_id == models.Repository.id) - async with db.begin(): - await db.execute( - update(models.ExportTask).where( - models.ExportTask.id == export_id).values( - exported_at=now, status=ExportStatus.IN_PROGRESS)) - response = await db.execute(query) - await db.commit() + query = ( + select( + models.RepoExporter.fs_exporter_href, + models.RepoExporter.path, + models.Repository.pulp_href, + models.Repository.url, + ) + .where(models.RepoExporter.exported_id == export_id) + .join(models.Repository) + .filter(models.RepoExporter.repository_id == models.Repository.id) + ) + await db.execute( + update(models.ExportTask) + .where(models.ExportTask.id == export_id) + .values(exported_at=now, status=ExportStatus.IN_PROGRESS) + ) + response = await db.execute(query) + await db.flush() exported_data = {} for fs_exporter_href, fse_path, pulp_href, repo_url in response: latest_version_href = await pulp_client.get_repo_latest_version( - pulp_href) + pulp_href + ) await pulp_client.export_to_filesystem( - fs_exporter_href, latest_version_href) + fs_exporter_href, latest_version_href + ) exported_data[fse_path] = repo_url await pulp_client.delete_filesystem_exporter(fs_exporter_href) - async with db.begin(): - await db.execute( - update(models.ExportTask).where( - models.ExportTask.id == export_id).values( - exported_at=now, status=ExportStatus.COMPLETED)) - await db.execute( - delete(models.RepoExporter).where( - models.RepoExporter.exported_id == export_id) + await db.execute( + update(models.ExportTask) + .where(models.ExportTask.id == export_id) + .values(exported_at=now, status=ExportStatus.COMPLETED) + ) + await db.execute( + delete(models.RepoExporter).where( + models.RepoExporter.exported_id == export_id ) - await db.commit() + ) + await db.flush() return exported_data async def create_filesystem_exporter(name: str, path: str) -> str: - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) result = await pulp_client.create_filesystem_exporter(name, path) return result async def list_filesystem_exporters() -> list: - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) result = await pulp_client.list_filesystem_exporters() return result async def get_filesystem_exporter(fse_pulp_href: str): - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) result = await pulp_client.get_filesystem_exporter(fse_pulp_href) return result -async def update_filesystem_exporter(fse_pulp_href: str, - fse_name: str, - fse_path: str): - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) +async def update_filesystem_exporter( + fse_pulp_href: str, fse_name: str, fse_path: str +): + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) result = await pulp_client.update_filesystem_exporter( - fse_pulp_href, fse_name, fse_path) + fse_pulp_href, fse_name, fse_path + ) return result async def delete_filesystem_exporter(fse_pulp_href: str): - pulp_client = PulpClient(settings.pulp_host, settings.pulp_user, - settings.pulp_password) - result = await pulp_client.delete_filesystem_exporter( - fse_pulp_href) + pulp_client = PulpClient( + settings.pulp_host, settings.pulp_user, settings.pulp_password + ) + result = await pulp_client.delete_filesystem_exporter(fse_pulp_href) return result diff --git a/alws/crud/repository.py b/alws/crud/repository.py index 5fe71db00..d7ebe15a8 100644 --- a/alws/crud/repository.py +++ b/alws/crud/repository.py @@ -53,24 +53,23 @@ async def create_repositories( ] query = sqlalchemy.or_(*query_list).with_for_update() repos_mapping = {} - async with db.begin(): - repos_result = await db.execute(query) - for repo in repos_result.scalars().all(): - repo_key = f'{repo.name}-{repo.arch}-{repo.debug}' - repos_mapping[repo_key] = repo - - for repo_item in payload: - repo_item_dict = repo_item.model_dump() - repo_key = f'{repo_item.name}-{repo_item.arch}-{repo_item.debug}' - if repo_key not in repos_mapping: - repos_mapping[repo_key] = models.Repository(**repo_item_dict) - else: - repo = repos_mapping[repo_key] - for field, value in repo_item_dict.items(): - setattr(repo, field, value) - - db.add_all(repos_mapping.values()) - await db.commit() + repos_result = await db.execute(query) + for repo in repos_result.scalars().all(): + repo_key = f'{repo.name}-{repo.arch}-{repo.debug}' + repos_mapping[repo_key] = repo + + for repo_item in payload: + repo_item_dict = repo_item.model_dump() + repo_key = f'{repo_item.name}-{repo_item.arch}-{repo_item.debug}' + if repo_key not in repos_mapping: + repos_mapping[repo_key] = models.Repository(**repo_item_dict) + else: + repo = repos_mapping[repo_key] + for field, value in repo_item_dict.items(): + setattr(repo, field, value) + + db.add_all(repos_mapping.values()) + await db.flush() for repo in repos_mapping.values(): await db.refresh(repo) @@ -88,12 +87,12 @@ async def create_repository( models.Repository.type == payload.type, models.Repository.debug == payload.debug, ) - async with db.begin(): - result = await db.execute(query) - if result.scalars().first(): - raise ValueError('Repository already exists') - repository = models.Repository(**payload.model_dump()) - db.add(repository) + result = await db.execute(query) + if result.scalars().first(): + raise ValueError('Repository already exists') + repository = models.Repository(**payload.model_dump()) + db.add(repository) + await db.flush() await db.refresh(repository) return repository @@ -112,9 +111,8 @@ async def search_repository( query = query.where(models.Repository.type == value) elif key == 'debug': query = query.where(models.Repository.debug == value) - async with db.begin(): - result = await db.execute(query) - return result.scalars().first() + result = await db.execute(query) + return result.scalars().first() async def update_repository( @@ -122,31 +120,29 @@ async def update_repository( repository_id: int, payload: repository_schema.RepositoryUpdate, ) -> models.Repository: - async with db.begin(): - db_repo = await db.execute( - select(models.Repository).where( - models.Repository.id == repository_id, - ) + db_repo = await db.execute( + select(models.Repository).where( + models.Repository.id == repository_id, ) - db_repo = db_repo.scalars().first() - for field, value in payload.model_dump( - exclude_none=True, exclude_unset=True - ).items(): - setattr(db_repo, field, value) - db.add(db_repo) - await db.commit() + ) + db_repo = db_repo.scalars().first() + for field, value in payload.model_dump( + exclude_none=True, exclude_unset=True + ).items(): + setattr(db_repo, field, value) + db.add(db_repo) + await db.flush() await db.refresh(db_repo) return db_repo async def delete_repository(db: Session, repository_id: int): - async with db.begin(): - await db.execute( - delete(models.Repository).where( - models.Repository.id == repository_id, - ) + await db.execute( + delete(models.Repository).where( + models.Repository.id == repository_id, ) - await db.commit() + ) + await db.flush() async def add_to_platform( @@ -175,7 +171,7 @@ async def add_to_platform( platform.repos = new_repos_list db.add(platform) db.add_all(new_repos_list) - await db.commit() + await db.flush() platform_result = await db.execute( select(models.Platform) @@ -196,7 +192,7 @@ async def remove_from_platform( models.PlatformRepo.c.repository_id.in_(repository_ids), ) ) - await db.commit() + await db.flush() platform_result = await db.execute( select(models.Platform) @@ -247,7 +243,7 @@ async def create_repository_remote( pulp_href=remote_href, ) db.add(remote) - await db.commit() + await db.flush() await db.refresh(remote) return remote @@ -257,17 +253,16 @@ async def update_repository_remote( remote_id: int, payload: remote_schema.RemoteUpdate, ) -> models.RepositoryRemote: - async with db.begin(): - result = await db.execute( - select(models.RepositoryRemote).where( - models.RepositoryRemote.id == remote_id - ) + result = await db.execute( + select(models.RepositoryRemote).where( + models.RepositoryRemote.id == remote_id ) - remote = result.scalars().first() - for key, value in payload.model_dump().items(): - setattr(remote, key, value) - db.add(remote) - await db.commit() + ) + remote = result.scalars().first() + for key, value in payload.model_dump().items(): + setattr(remote, key, value) + db.add(remote) + await db.flush() await db.refresh(remote) return remote @@ -278,9 +273,8 @@ async def sync_repo_from_remote( payload: repository_schema.RepositorySync, wait_for_result: bool = False, ): - async with db.begin(): - repository = select(models.Repository).get(repository_id) - remote = select(models.RepositoryRemote).get(payload.remote_id) + repository = select(models.Repository).get(repository_id) + remote = select(models.RepositoryRemote).get(payload.remote_id) pulp_client = PulpClient( settings.pulp_host, diff --git a/alws/crud/roles.py b/alws/crud/roles.py index fc7ed7468..1f8119de1 100644 --- a/alws/crud/roles.py +++ b/alws/crud/roles.py @@ -1,10 +1,10 @@ import typing +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import selectinload from alws import models -from alws.database import Session from alws.perms.roles import RolesList __all__ = [ @@ -13,11 +13,11 @@ ] -async def get_roles(db: Session) -> typing.List[models.UserRole]: +async def get_roles(db: AsyncSession) -> typing.List[models.UserRole]: return (await db.execute(select(models.UserRole))).scalars().all() -async def fix_roles_actions(db: Session, commit: bool = False): +async def fix_roles_actions(db: AsyncSession, commit: bool = False): actions = (await db.execute(select(models.UserAction))).scalars().all() roles = ( ( @@ -56,7 +56,4 @@ async def fix_roles_actions(db: Session, commit: bool = False): new_roles.append(role) db.add_all(new_roles) - if commit: - await db.commit() - else: - await db.flush() + await db.flush() diff --git a/alws/crud/sign_key.py b/alws/crud/sign_key.py index da02dc0bf..2a8b5e01c 100644 --- a/alws/crud/sign_key.py +++ b/alws/crud/sign_key.py @@ -61,7 +61,7 @@ async def create_sign_key( ) sign_key = models.SignKey(**payload.model_dump()) db.add(sign_key) - await db.commit() + await db.flush() await db.refresh(sign_key) return sign_key @@ -75,6 +75,6 @@ async def update_sign_key( for k, v in payload.model_dump().items(): setattr(sign_key, k, v) db.add(sign_key) - await db.commit() + await db.flush() await db.refresh(sign_key) return sign_key diff --git a/alws/crud/sign_task.py b/alws/crud/sign_task.py index d27979c10..97f664dc1 100644 --- a/alws/crud/sign_task.py +++ b/alws/crud/sign_task.py @@ -6,6 +6,7 @@ from collections import defaultdict from dataclasses import dataclass +from fastapi_sqla import open_async_session from sqlalchemy import or_, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select @@ -15,7 +16,7 @@ from alws.config import settings from alws.constants import GenKeyStatus, SignStatus from alws.crud.user import get_user -from alws.database import Session +from alws.dependencies import get_async_db_key from alws.errors import ( BuildAlreadySignedError, DataNotFoundError, @@ -122,7 +123,7 @@ async def create_gen_key_task( product_id=product.id, ) db.add(gen_key_task) - await db.commit() + await db.flush() await db.refresh(gen_key_task) return await get_gen_key_task(db=db, gen_key_task_id=gen_key_task.id) @@ -132,68 +133,67 @@ async def create_sign_task( payload: sign_schema.SignTaskCreate, user_id: int, ) -> models.SignTask: - async with db.begin(): - user = await get_user(db, user_id) - builds = await db.execute( - select(models.Build) - .where(models.Build.id == payload.build_id) - .options( - selectinload(models.Build.source_rpms), - selectinload(models.Build.binary_rpms), - selectinload(models.Build.owner) - .selectinload(models.User.roles) - .selectinload(models.UserRole.actions), - selectinload(models.Build.team) - .selectinload(models.Team.roles) - .selectinload(models.UserRole.actions), - ) + user = await get_user(db, user_id) + builds = await db.execute( + select(models.Build) + .where(models.Build.id == payload.build_id) + .options( + selectinload(models.Build.source_rpms), + selectinload(models.Build.binary_rpms), + selectinload(models.Build.owner) + .selectinload(models.User.roles) + .selectinload(models.UserRole.actions), + selectinload(models.Build.team) + .selectinload(models.Team.roles) + .selectinload(models.UserRole.actions), ) - build = builds.scalars().first() - if not build: - raise DataNotFoundError( - f"Build with ID {payload.build_id} does not exist" - ) - if build.signed: - raise BuildAlreadySignedError( - f"Build with ID {payload.build_id} is already signed" - ) - if not build.source_rpms or not build.binary_rpms: - raise ValueError( - f"No built packages in build with ID {payload.build_id}" - ) - sign_keys = await db.execute( - select(models.SignKey) - .where(models.SignKey.id == payload.sign_key_id) - .options( - selectinload(models.SignKey.owner), - selectinload(models.SignKey.roles).selectinload( - models.UserRole.actions - ), - ) + ) + build = builds.scalars().first() + if not build: + raise DataNotFoundError( + f"Build with ID {payload.build_id} does not exist" ) - sign_key = sign_keys.scalars().first() - - if not sign_key: - raise DataNotFoundError( - f"Sign key with ID {payload.sign_key_id} does not exist" - ) + if build.signed: + raise BuildAlreadySignedError( + f"Build with ID {payload.build_id} is already signed" + ) + if not build.source_rpms or not build.binary_rpms: + raise ValueError( + f"No built packages in build with ID {payload.build_id}" + ) + sign_keys = await db.execute( + select(models.SignKey) + .where(models.SignKey.id == payload.sign_key_id) + .options( + selectinload(models.SignKey.owner), + selectinload(models.SignKey.roles).selectinload( + models.UserRole.actions + ), + ) + ) + sign_key = sign_keys.scalars().first() - if not can_perform(build, user, actions.SignBuild.name): - raise PermissionDenied( - "User does not have permissions to sign this build" - ) - if not can_perform(sign_key, user, actions.UseSignKey.name): - raise PermissionDenied( - "User does not have permissions to use this sign key" - ) + if not sign_key: + raise DataNotFoundError( + f"Sign key with ID {payload.sign_key_id} does not exist" + ) - sign_task = models.SignTask( - status=SignStatus.IDLE, - build_id=payload.build_id, - sign_key_id=payload.sign_key_id, + if not can_perform(build, user, actions.SignBuild.name): + raise PermissionDenied( + "User does not have permissions to sign this build" ) - db.add(sign_task) - await db.commit() + if not can_perform(sign_key, user, actions.UseSignKey.name): + raise PermissionDenied( + "User does not have permissions to use this sign key" + ) + + sign_task = models.SignTask( + status=SignStatus.IDLE, + build_id=payload.build_id, + sign_key_id=payload.sign_key_id, + ) + db.add(sign_task) + await db.flush() await db.refresh(sign_task) sign_tasks = await db.execute( select(models.SignTask) @@ -206,23 +206,22 @@ async def create_sign_task( async def get_available_gen_key_task( db: AsyncSession, ) -> typing.Optional[models.GenKeyTask]: - async with db.begin(): - gen_key_tasks = await db.execute( - select(models.GenKeyTask) - .where(models.GenKeyTask.status == GenKeyStatus.IDLE) - .options( - selectinload(models.GenKeyTask.product).selectinload( - models.Product.owner - ), - ) + gen_key_tasks = await db.execute( + select(models.GenKeyTask) + .where(models.GenKeyTask.status == GenKeyStatus.IDLE) + .options( + selectinload(models.GenKeyTask.product).selectinload( + models.Product.owner + ), + ) + ) + gen_key_task = gen_key_tasks.scalars().first() + if gen_key_task: + await db.execute( + update(models.GenKeyTask) + .where(models.GenKeyTask.id == gen_key_task.id) + .values(status=GenKeyStatus.IN_PROGRESS) ) - gen_key_task = gen_key_tasks.scalars().first() - if gen_key_task: - await db.execute( - update(models.GenKeyTask) - .where(models.GenKeyTask.id == gen_key_task.id) - .values(status=GenKeyStatus.IN_PROGRESS) - ) if gen_key_task: await db.refresh(gen_key_task) return gen_key_task @@ -294,18 +293,14 @@ async def get_available_sign_task( platform_id=src_rpm.artifact.build_task.platform_id, ) repo = repo_mapping[repo_unique_key] - packages.append( - { - "id": src_rpm.artifact.id, - "name": src_rpm.artifact.name, - "cas_hash": src_rpm.artifact.cas_hash, - "arch": "src", - "type": "rpm", - "download_url": __get_package_url( - repo.url, src_rpm.artifact.name - ), - } - ) + packages.append({ + "id": src_rpm.artifact.id, + "name": src_rpm.artifact.name, + "cas_hash": src_rpm.artifact.cas_hash, + "arch": "src", + "type": "rpm", + "download_url": __get_package_url(repo.url, src_rpm.artifact.name), + }) for binary_rpm in build_binary_rpms: debug = is_debuginfo_rpm(binary_rpm.artifact.name) @@ -315,20 +310,18 @@ async def get_available_sign_task( platform_id=binary_rpm.artifact.build_task.platform_id, ) repo = repo_mapping[repo_unique_key] - packages.append( - { - "id": binary_rpm.artifact.id, - "name": binary_rpm.artifact.name, - "cas_hash": binary_rpm.artifact.cas_hash, - "arch": binary_rpm.artifact.build_task.arch, - "type": "rpm", - "download_url": __get_package_url( - repo.url, binary_rpm.artifact.name - ), - } - ) + packages.append({ + "id": binary_rpm.artifact.id, + "name": binary_rpm.artifact.name, + "cas_hash": binary_rpm.artifact.cas_hash, + "arch": binary_rpm.artifact.build_task.arch, + "type": "rpm", + "download_url": __get_package_url( + repo.url, binary_rpm.artifact.name + ), + }) sign_task_payload["packages"] = packages - await db.commit() + await db.flush() return sign_task_payload @@ -354,9 +347,7 @@ async def complete_gen_key_task( gen_key_task_id=gen_key_task_id, ) if not gen_key_task: - raise GenKeyError( - f'Gen key task with id "{gen_key_task_id}" is absent' - ) + raise GenKeyError(f'Gen key task with id "{gen_key_task_id}" is absent') if payload.success: task_status = GenKeyStatus.COMPLETED error_message = None @@ -423,7 +414,7 @@ async def complete_gen_key_task( roles=roles, ) db.add(sign_key) - await db.commit() + await db.flush() await db.refresh(sign_key) return sign_key @@ -479,7 +470,7 @@ async def __failed_post_processing( task.stats = statistics task.status = SignStatus.FAILED db.add(task) - await db.commit() + await db.flush() await db.refresh(task) return task @@ -500,7 +491,7 @@ async def __failed_post_processing( srpms_mapping = defaultdict(list) logging.info("Start processing task %s", sign_task_id) - async with Session() as db, db.begin(): + async with open_async_session(key=get_async_db_key()) as db: builds = await db.execute( select(models.Build) .where(models.Build.id == payload.build_id) @@ -590,16 +581,12 @@ async def __failed_post_processing( [pkg.sha256 for pkg in packages_to_convert.values()], ) logging.info("Start processing packages for task %s", sign_task_id) - results = await asyncio.gather( - *( - __process_single_package(package, pulp_db_packages) - for package in packages_to_convert.values() - ) - ) + results = await asyncio.gather(*( + __process_single_package(package, pulp_db_packages) + for package in packages_to_convert.values() + )) converted_packages = dict(results) - logging.info( - "Finish processing packages for task %s", sign_task_id - ) + logging.info("Finish processing packages for task %s", sign_task_id) logging.info( "Updating href and add sign key for every srpm in project" ) @@ -651,12 +638,10 @@ async def __failed_post_processing( sign_task = await __failed_post_processing(sign_task, stats) return sign_task logging.info("Start modify repository for task %s", sign_task_id) - await asyncio.gather( - *( - pulp_client.modify_repository(repo_href, add=packages) - for repo_href, packages in packages_to_add.items() - ) - ) + await asyncio.gather(*( + pulp_client.modify_repository(repo_href, add=packages) + for repo_href, packages in packages_to_add.items() + )) logging.info("Finish modify repository for task %s", sign_task_id) if payload.success and not sign_failed: diff --git a/alws/crud/teams.py b/alws/crud/teams.py index 837106cd7..2f7877274 100644 --- a/alws/crud/teams.py +++ b/alws/crud/teams.py @@ -22,7 +22,6 @@ ) from alws.schemas import team_schema - __all__ = [ 'create_team', 'create_team_roles', @@ -36,20 +35,35 @@ def get_team_role_name(team_name: str, role_name: str): async def create_team_roles(session: AsyncSession, team_name: str): - required_roles = (Contributor, Manager, Observer, ProductMaintainer, Signer) - new_role_names = [get_team_role_name(team_name, role.name) - for role in required_roles] - - existing_roles = (await session.execute(select(UserRole).where( - UserRole.name.in_(new_role_names)))).scalars().all() + required_roles = ( + Contributor, + Manager, + Observer, + ProductMaintainer, + Signer, + ) + new_role_names = [ + get_team_role_name(team_name, role.name) for role in required_roles + ] + + existing_roles = ( + ( + await session.execute( + select(UserRole).where(UserRole.name.in_(new_role_names)) + ) + ) + .scalars() + .all() + ) existing_role_names = {r.name for r in existing_roles} if len(new_role_names) == len(existing_roles): return existing_roles await ensure_all_actions_exist(session) - existing_actions = (await session.execute( - select(UserAction))).scalars().all() + existing_actions = ( + (await session.execute(select(UserAction))).scalars().all() + ) new_roles = [] for role in required_roles: @@ -79,20 +93,36 @@ async def create_team( payload: team_schema.TeamCreate, flush: bool = False, ) -> Team: - owner = (await session.execute(select(User).where( - User.id == payload.user_id).options( - selectinload(User.roles) - ))).scalars().first() + owner = ( + ( + await session.execute( + select(User) + .where(User.id == payload.user_id) + .options(selectinload(User.roles)) + ) + ) + .scalars() + .first() + ) if not owner: raise TeamError(f'Unknown user ID: {payload.user_id}') - existing_team = (await session.execute(select(Team).where( - Team.name == payload.team_name).options( - selectinload(Team.roles), - selectinload(Team.owner), - selectinload(Team.members), - ))).scalars().first() + existing_team = ( + ( + await session.execute( + select(Team) + .where(Team.name == payload.team_name) + .options( + selectinload(Team.roles), + selectinload(Team.owner), + selectinload(Team.members), + ) + ) + ) + .scalars() + .first() + ) if existing_team: raise TeamError(f'Team={payload.team_name} already exist') @@ -109,10 +139,7 @@ async def create_team( session.add(new_team) session.add_all(team_roles) session.add(owner) - if flush: - await session.flush() - else: - await session.commit() + await session.flush() await session.refresh(new_team) return new_team @@ -125,11 +152,15 @@ async def get_teams( ) -> typing.Union[typing.List[Team], typing.Dict[str, typing.Any], Team]: def generate_query(count=False): - query = select(Team).order_by(Team.id.desc()).options( - selectinload(Team.members), - selectinload(Team.owner), - selectinload(Team.roles).selectinload(UserRole.actions), - selectinload(Team.products), + query = ( + select(Team) + .order_by(Team.id.desc()) + .options( + selectinload(Team.members), + selectinload(Team.owner), + selectinload(Team.roles).selectinload(UserRole.actions), + selectinload(Team.products), + ) ) if name: query = query.where(Team.name == name) @@ -162,26 +193,30 @@ async def update_members( modification: str, ) -> Team: items_to_update = [] - db_team = (await session.execute( - select(Team).where(Team.id == team_id).options( - selectinload(Team.members), - selectinload(Team.owner), - selectinload(Team.roles), - ), - )).scalars().first() + db_team = ( + ( + await session.execute( + select(Team) + .where(Team.id == team_id) + .options( + selectinload(Team.members), + selectinload(Team.owner), + selectinload(Team.roles), + ), + ) + ) + .scalars() + .first() + ) if not db_team: raise TeamError(f'Team={team_id} doesn`t exist') db_users = await session.execute( - select(User).where(User.id.in_(( - user.id for user in payload.members_to_update - ))).options( - selectinload(User.roles), - selectinload(User.oauth_accounts) - ), + select(User) + .where(User.id.in_((user.id for user in payload.members_to_update))) + .options(selectinload(User.roles), selectinload(User.oauth_accounts)), ) db_contributor_team_role = next( - role for role in db_team.roles - if Contributor.name in role.name + role for role in db_team.roles if Contributor.name in role.name ) operation = 'append' if modification == 'add' else 'remove' db_team_members_update_operation = getattr(db_team.members, operation) @@ -201,7 +236,7 @@ async def update_members( items_to_update.append(db_user) items_to_update.append(db_team) session.add_all(items_to_update) - await session.commit() + await session.flush() await session.refresh(db_team) return db_team @@ -215,4 +250,4 @@ async def remove_team(db: AsyncSession, team_id: int): f"Cannot delete Team={team_id}, team contains undeleted products", ) await db.delete(db_team) - await db.commit() + await db.flush() diff --git a/alws/crud/test.py b/alws/crud/test.py index 36872d9fe..ff9fe96d3 100644 --- a/alws/crud/test.py +++ b/alws/crud/test.py @@ -65,88 +65,81 @@ def get_repos_for_test_task(task: models.TestTask) -> List[dict]: async def get_available_test_tasks(session: AsyncSession) -> List[dict]: response = [] - async with session.begin(): - updated_tasks = [] - test_tasks = await session.execute( - select(models.TestTask) - .where( - models.TestTask.status == TestTaskStatus.CREATED, - ) - .with_for_update() - .options( - selectinload(models.TestTask.build_task) - .selectinload(models.BuildTask.build) - .selectinload(models.Build.repos), - selectinload(models.TestTask.build_task).selectinload( - models.BuildTask.ref - ), - selectinload(models.TestTask.build_task) - .selectinload(models.BuildTask.build) - .selectinload(models.Build.linked_builds), - selectinload(models.TestTask.build_task) - .selectinload(models.BuildTask.build) - .selectinload(models.Build.platform_flavors) - .selectinload(models.PlatformFlavour.repos), - selectinload(models.TestTask.build_task).selectinload( - models.BuildTask.platform - ), - selectinload(models.TestTask.build_task).selectinload( - models.BuildTask.rpm_modules - ), - ) - .order_by(models.TestTask.id.asc()) - .limit(10) + updated_tasks = [] + test_tasks = await session.execute( + select(models.TestTask) + .where( + models.TestTask.status == TestTaskStatus.CREATED, ) - for task in test_tasks.scalars().all(): - platform = task.build_task.platform - module_info = next( - ( - i - for i in task.build_task.rpm_modules - if '-devel' not in i.name - ), - None, - ) - module_name = module_info.name if module_info else None - module_stream = module_info.stream if module_info else None - module_version = module_info.version if module_info else None - repositories = get_repos_for_test_task(task) - task.status = TestTaskStatus.STARTED - task.scheduled_at = datetime.datetime.utcnow() - test_configuration = task.build_task.ref.test_configuration - payload = { - 'bs_task_id': task.id, - 'runner_type': 'docker', - 'dist_name': platform.test_dist_name, - 'dist_version': platform.distr_version, - 'dist_arch': task.env_arch, - 'package_name': task.package_name, - 'package_version': ( - f'{task.package_version}-{task.package_release}' - if task.package_release - else task.package_version - ), - 'callback_href': f'/api/v1/tests/{task.id}/result/', - } - if module_name and module_stream and module_version: - payload.update( - { - 'module_name': module_name, - 'module_stream': module_stream, - 'module_version': module_version, - } - ) - if repositories: - payload['repositories'] = repositories - if test_configuration: - if test_configuration['tests'] is None: - test_configuration['tests'] = [] - payload['test_configuration'] = test_configuration - response.append(payload) - updated_tasks.append(task) - if updated_tasks: - session.add_all(updated_tasks) - await session.commit() + .with_for_update() + .options( + selectinload(models.TestTask.build_task) + .selectinload(models.BuildTask.build) + .selectinload(models.Build.repos), + selectinload(models.TestTask.build_task).selectinload( + models.BuildTask.ref + ), + selectinload(models.TestTask.build_task) + .selectinload(models.BuildTask.build) + .selectinload(models.Build.linked_builds), + selectinload(models.TestTask.build_task) + .selectinload(models.BuildTask.build) + .selectinload(models.Build.platform_flavors) + .selectinload(models.PlatformFlavour.repos), + selectinload(models.TestTask.build_task).selectinload( + models.BuildTask.platform + ), + selectinload(models.TestTask.build_task).selectinload( + models.BuildTask.rpm_modules + ), + ) + .order_by(models.TestTask.id.asc()) + .limit(10) + ) + for task in test_tasks.scalars().all(): + platform = task.build_task.platform + module_info = next( + (i for i in task.build_task.rpm_modules if '-devel' not in i.name), + None, + ) + module_name = module_info.name if module_info else None + module_stream = module_info.stream if module_info else None + module_version = module_info.version if module_info else None + repositories = get_repos_for_test_task(task) + task.status = TestTaskStatus.STARTED + task.scheduled_at = datetime.datetime.utcnow() + test_configuration = task.build_task.ref.test_configuration + payload = { + 'bs_task_id': task.id, + 'runner_type': 'docker', + 'dist_name': platform.test_dist_name, + 'dist_version': platform.distr_version, + 'dist_arch': task.env_arch, + 'package_name': task.package_name, + 'package_version': ( + f'{task.package_version}-{task.package_release}' + if task.package_release + else task.package_version + ), + 'callback_href': f'/api/v1/tests/{task.id}/result/', + } + if module_name and module_stream and module_version: + payload.update({ + 'module_name': module_name, + 'module_stream': module_stream, + 'module_version': module_version, + }) + if repositories: + payload['repositories'] = repositories + if test_configuration: + if test_configuration['tests'] is None: + test_configuration['tests'] = [] + payload['test_configuration'] = test_configuration + response.append(payload) + updated_tasks.append(task) + if updated_tasks: + session.add_all(updated_tasks) + await session.flush() return response @@ -177,23 +170,22 @@ async def __get_log_repository( async def create_test_tasks_for_build_id(db: AsyncSession, build_id: int): - async with db.begin(): - # We get all build_tasks with the same build_id - # and whose status is COMPLETED - build_task_ids = ( - ( - await db.execute( - select(models.BuildTask.id).where( - models.BuildTask.build_id == build_id, - models.BuildTask.status == BuildTaskStatus.COMPLETED, - ) + # We get all build_tasks with the same build_id + # and whose status is COMPLETED + build_task_ids = ( + ( + await db.execute( + select(models.BuildTask.id).where( + models.BuildTask.build_id == build_id, + models.BuildTask.status == BuildTaskStatus.COMPLETED, ) ) - .scalars() - .all() ) + .scalars() + .all() + ) - test_log_repository = await __get_log_repository(db, build_id) + test_log_repository = await __get_log_repository(db, build_id) for build_task_id in build_task_ids: await create_test_tasks(db, build_task_id, test_log_repository.id) @@ -219,79 +211,78 @@ async def create_test_tasks( build_task_id: int, repository_id: int, ): - async with db.begin(): - build_task_query = await db.execute( - select(models.BuildTask) - .where( - models.BuildTask.id == build_task_id, - ) - .options(selectinload(models.BuildTask.artifacts)), + build_task_query = await db.execute( + select(models.BuildTask) + .where( + models.BuildTask.id == build_task_id, ) - build_task = build_task_query.scalars().first() + .options(selectinload(models.BuildTask.artifacts)), + ) + build_task = build_task_query.scalars().first() - latest_revision_query = select( - func.max(models.TestTask.revision), - ).filter( - models.TestTask.build_task_id == build_task_id, - ) - result = await db.execute(latest_revision_query) - latest_revision = result.scalars().first() - new_revision = 1 - if latest_revision: - new_revision = latest_revision + 1 - - test_tasks = [] - pulp_packages = get_pulp_packages(build_task.artifacts) - for artifact in build_task.artifacts: - if artifact.type != 'rpm': - continue - artifact_info = pulp_packages.get(artifact.href) - if not artifact_info: - logging.error( - 'Cannot get information about artifact %s with href %s', - artifact.name, - artifact.href, - ) - continue - if artifact_info.arch == 'src': - continue - task = models.TestTask( - build_task_id=build_task_id, - package_name=artifact_info.name, - package_version=artifact_info.version, - env_arch=build_task.arch, - status=TestTaskStatus.CREATED, - revision=new_revision, - repository_id=repository_id, + latest_revision_query = select( + func.max(models.TestTask.revision), + ).filter( + models.TestTask.build_task_id == build_task_id, + ) + result = await db.execute(latest_revision_query) + latest_revision = result.scalars().first() + new_revision = 1 + if latest_revision: + new_revision = latest_revision + 1 + + test_tasks = [] + pulp_packages = get_pulp_packages(build_task.artifacts) + for artifact in build_task.artifacts: + if artifact.type != 'rpm': + continue + artifact_info = pulp_packages.get(artifact.href) + if not artifact_info: + logging.error( + 'Cannot get information about artifact %s with href %s', + artifact.name, + artifact.href, ) - if artifact_info.release: - task.package_release = artifact_info.release - test_tasks.append(task) - if test_tasks: - db.add_all(test_tasks) - await db.commit() + continue + if artifact_info.arch == 'src': + continue + task = models.TestTask( + build_task_id=build_task_id, + package_name=artifact_info.name, + package_version=artifact_info.version, + env_arch=build_task.arch, + status=TestTaskStatus.CREATED, + revision=new_revision, + repository_id=repository_id, + ) + if artifact_info.release: + task.package_release = artifact_info.release + test_tasks.append(task) + if test_tasks: + db.add_all(test_tasks) + await db.flush() async def restart_build_tests(db: AsyncSession, build_id: int): # Note that this functionality is triggered by frontend, # which only restarts tests for those builds that already # had passed the tests - async with db.begin(): - # Set cancel_testing to False just in case - await db.execute( - update(models.Build) - .where(models.Build.id == build_id) - .values(cancel_testing=False) - ) - query = ( - select(models.BuildTask) - .options(joinedload(models.BuildTask.test_tasks)) - .where(models.BuildTask.build_id == build_id) - ) + # Set cancel_testing to False just in case + await db.execute( + update(models.Build) + .where(models.Build.id == build_id) + .values(cancel_testing=False) + ) + query = ( + select(models.BuildTask) + .options(joinedload(models.BuildTask.test_tasks)) + .where(models.BuildTask.build_id == build_id) + ) - build_tasks = await db.execute(query) - build_tasks = build_tasks.scalars().unique().all() - test_log_repository = await __get_log_repository(db, build_id) + build_tasks = await db.execute(query) + build_tasks = build_tasks.scalars().unique().all() + test_log_repository = await __get_log_repository(db, build_id) + await db.flush() for build_task in build_tasks: if not build_task.test_tasks: continue @@ -315,78 +306,77 @@ async def restart_build_tests(db: AsyncSession, build_id: int): async def restart_build_task_tests(db: AsyncSession, build_task_id: int): - async with db.begin(): - build_task = ( - ( - await db.execute( - select(models.BuildTask) - .where(models.BuildTask.id == build_task_id) - .options( - selectinload(models.BuildTask.build).selectinload( - models.Build.repos - ) + build_task = ( + ( + await db.execute( + select(models.BuildTask) + .where(models.BuildTask.id == build_task_id) + .options( + selectinload(models.BuildTask.build).selectinload( + models.Build.repos ) ) ) - .scalars() - .first() ) - test_log_repository = next( - (i for i in build_task.build.repos if i.type == 'test_log'), - None, + .scalars() + .first() + ) + test_log_repository = next( + (i for i in build_task.build.repos if i.type == 'test_log'), + None, + ) + if not test_log_repository: + raise ValueError( + 'Cannot create test tasks: the log repository is not found' ) - if not test_log_repository: - raise ValueError( - 'Cannot create test tasks: the log repository is not found' - ) await create_test_tasks(db, build_task_id, test_log_repository.id) async def cancel_build_tests(db: AsyncSession, build_id: int): - async with db.begin(): - # Set cancel_testing to True in db - await db.execute( - update(models.Build) - .where(models.Build.id == build_id) - .values(cancel_testing=True) - ) + # Set cancel_testing to True in db + await db.execute( + update(models.Build) + .where(models.Build.id == build_id) + .values(cancel_testing=True) + ) - build_task_ids = ( - ( - await db.execute( - select(models.BuildTask.id).where( - models.BuildTask.build_id == build_id - ) + build_task_ids = ( + ( + await db.execute( + select(models.BuildTask.id).where( + models.BuildTask.build_id == build_id ) ) - .scalars() - .all() ) + .scalars() + .all() + ) - # Set TestTaskStatus.CANCELLED for those that are still - # with status TestTaskStatus.CREATED - await db.execute( - update(models.TestTask) - .where( - models.TestTask.status == TestTaskStatus.CREATED, - models.TestTask.build_task_id.in_(build_task_ids), - ) - .values(status=TestTaskStatus.CANCELLED) + # Set TestTaskStatus.CANCELLED for those that are still + # with status TestTaskStatus.CREATED + await db.execute( + update(models.TestTask) + .where( + models.TestTask.status == TestTaskStatus.CREATED, + models.TestTask.build_task_id.in_(build_task_ids), ) + .values(status=TestTaskStatus.CANCELLED) + ) - started_test_tasks_ids = ( - ( - await db.execute( - select(models.TestTask.id).where( - models.TestTask.status == TestTaskStatus.STARTED, - models.TestTask.build_task_id.in_(build_task_ids), - ) + started_test_tasks_ids = ( + ( + await db.execute( + select(models.TestTask.id).where( + models.TestTask.status == TestTaskStatus.STARTED, + models.TestTask.build_task_id.in_(build_task_ids), ) ) - .scalars() - .all() ) + .scalars() + .all() + ) + await db.flush() # Tell ALTS to cancel those with TestTaskStatus.STARTED. ALTS # will notify statuses back when its done cancelling tests if started_test_tasks_ids: diff --git a/alws/crud/test_repository.py b/alws/crud/test_repository.py index ec5f1f42c..4fba38f5c 100644 --- a/alws/crud/test_repository.py +++ b/alws/crud/test_repository.py @@ -108,10 +108,7 @@ async def create_package_mapping( ) new_package.test_repository = test_repository session.add(new_package) - if flush: - await session.flush() - else: - await session.commit() + await session.flush() await session.refresh(new_package) return new_package @@ -139,7 +136,7 @@ async def bulk_create_package_mapping( if (pkg.package_name, pkg.folder_name) not in existing_packages ] session.add_all(new_packages) - await session.commit() + await session.flush() async def create_repository( @@ -167,10 +164,7 @@ async def create_repository( repository = models.TestRepository(**payload.model_dump()) session.add(repository) - if flush: - await session.flush() - else: - await session.commit() + await session.flush() await session.refresh(repository) return repository @@ -190,7 +184,7 @@ async def update_repository( for field, value in payload.model_dump().items(): setattr(db_repo, field, value) session.add(db_repo) - await session.commit() + await session.flush() await session.refresh(db_repo) return db_repo @@ -200,7 +194,7 @@ async def delete_package_mapping(session: AsyncSession, package_id: int): if not db_package: raise DataNotFoundError(f"Package={package_id} doesn`t exist") await session.delete(db_package) - await session.commit() + await session.flush() async def bulk_delete_package_mapping( @@ -214,7 +208,7 @@ async def bulk_delete_package_mapping( models.PackageTestRepository.test_repository_id == repository_id, ) ) - await session.commit() + await session.flush() async def delete_repository(session: AsyncSession, repository_id: int): @@ -224,4 +218,4 @@ async def delete_repository(session: AsyncSession, repository_id: int): f"Test repository={repository_id} doesn`t exist" ) await session.delete(db_repo) - await session.commit() + await session.flush() diff --git a/alws/crud/user.py b/alws/crud/user.py index fee66f935..f205e1042 100644 --- a/alws/crud/user.py +++ b/alws/crud/user.py @@ -222,19 +222,16 @@ async def check_valuable_artifacts(user_id: int, db: AsyncSession): user_artifacts['team_membership'] = len(user.teams) valuable_artifacts = [ - artifact - for artifact in user_artifacts - if user_artifacts[artifact] >= 1 + artifact for artifact in user_artifacts if user_artifacts[artifact] >= 1 ] return valuable_artifacts async def remove_user(user_id: int, db: AsyncSession): - async with db.begin(): - user = await get_user(db, user_id=user_id) - if not user: - raise UserError(f'User with ID {user_id} does not exist') - valuable_artifacts = await check_valuable_artifacts(user_id, db) + user = await get_user(db, user_id=user_id) + if not user: + raise UserError(f'User with ID {user_id} does not exist') + valuable_artifacts = await check_valuable_artifacts(user_id, db) if valuable_artifacts: err = f"Can't delete the user {user.username} because he/she " @@ -274,23 +271,22 @@ async def update_user( if v != None: setattr(user, k, v) db.add(user) - await db.commit() + await db.flush() await db.refresh(user) async def get_user_roles(db: AsyncSession, user_id: int): - async with db.begin(): - user = ( - ( - await db.execute( - select(models.User) - .where(models.User.id == user_id) - .options(selectinload(models.User.roles)) - ) + user = ( + ( + await db.execute( + select(models.User) + .where(models.User.id == user_id) + .options(selectinload(models.User.roles)) ) - .scalars() - .first() ) + .scalars() + .first() + ) if not user: raise UserError(f'User with ID {user_id} does not exist') @@ -341,27 +337,24 @@ async def add_roles( roles_ids: typing.List[int], current_user_id: int, ): - async with db.begin(): - user = await get_user(db, user_id) + user = await get_user(db, user_id) - if not await can_edit_teams_roles(db, roles_ids, current_user_id): - raise PermissionDenied( - "The user has no permissions to edit teams user roles" - ) + if not await can_edit_teams_roles(db, roles_ids, current_user_id): + raise PermissionDenied( + "The user has no permissions to edit teams user roles" + ) - add_roles = ( - ( - await db.execute( - select(models.UserRole).where( - models.UserRole.id.in_(roles_ids) - ) - ) + add_roles = ( + ( + await db.execute( + select(models.UserRole).where(models.UserRole.id.in_(roles_ids)) ) - .scalars() - .all() ) - user.roles.extend(add_roles) - db.add(user) + .scalars() + .all() + ) + user.roles.extend(add_roles) + db.add(user) async def remove_roles( @@ -370,34 +363,32 @@ async def remove_roles( roles_ids: typing.List[int], current_user_id: int, ): - async with db.begin(): - user = await get_user(db, user_id) + user = await get_user(db, user_id) - if not await can_edit_teams_roles(db, roles_ids, current_user_id): - raise PermissionDenied( - "The user has no permissions to edit teams user roles" - ) + if not await can_edit_teams_roles(db, roles_ids, current_user_id): + raise PermissionDenied( + "The user has no permissions to edit teams user roles" + ) - await db.execute( - delete(models.UserRoleMapping).where( - models.UserRoleMapping.c.role_id.in_(roles_ids), - models.UserRoleMapping.c.user_id == user_id, - ) + await db.execute( + delete(models.UserRoleMapping).where( + models.UserRoleMapping.c.role_id.in_(roles_ids), + models.UserRoleMapping.c.user_id == user_id, ) + ) async def get_user_teams(db: AsyncSession, user_id: int): - async with db.begin(): - user = ( - ( - await db.execute( - select(models.User) - .where(models.User.id == user_id) - .options(selectinload(models.User.teams)) - ) + user = ( + ( + await db.execute( + select(models.User) + .where(models.User.id == user_id) + .options(selectinload(models.User.teams)) ) - .scalars() - .first() ) + .scalars() + .first() + ) response = [{'id': team.id, 'name': team.name} for team in user.teams] return response diff --git a/alws/database.py b/alws/database.py index 491958e1c..cf24630af 100644 --- a/alws/database.py +++ b/alws/database.py @@ -1,34 +1,11 @@ # -*- mode:python; coding:utf-8; -*- # author: Vyacheslav Potoropin # created: 2021-06-22 -from sqlalchemy import MetaData, create_engine -from sqlalchemy.ext.asyncio import ( - AsyncAttrs, - async_sessionmaker, - create_async_engine, -) -from sqlalchemy.orm import DeclarativeBase, scoped_session, sessionmaker -from sqlalchemy.pool import NullPool +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncAttrs +from sqlalchemy.orm import DeclarativeBase -from alws.config import settings - -__all__ = [ - 'Base', - 'Session', - 'SyncSession', - 'PulpAsyncSession', - 'PulpSession', - 'engine', -] - - -# ALBS db -DATABASE_URL = settings.database_url - -engine = create_async_engine(DATABASE_URL, poolclass=NullPool, echo_pool=True) -sync_engine = create_engine( - settings.sync_database_url, pool_pre_ping=True, pool_recycle=3600 -) +__all__ = ['Base', 'PulpBase'] class Base(AsyncAttrs, DeclarativeBase): @@ -36,25 +13,6 @@ class Base(AsyncAttrs, DeclarativeBase): metadata = MetaData() -sync_session_factory = sessionmaker(sync_engine, expire_on_commit=False) -Session = async_sessionmaker(engine, expire_on_commit=False) -SyncSession = scoped_session(sync_session_factory) - - # Pulp db class PulpBase(AsyncAttrs, DeclarativeBase): __allow_unmapped__ = True - - -pulp_async_engine = create_async_engine( - settings.pulp_async_database_url, poolclass=NullPool, echo_pool=True -) -PulpAsyncSession = async_sessionmaker( - pulp_async_engine, expire_on_commit=False -) - -pulp_engine = create_engine( - settings.pulp_database_url, pool_pre_ping=True, pool_recycle=3600 -) -pulp_session_factory = sessionmaker(pulp_engine, expire_on_commit=False) -PulpSession = scoped_session(pulp_session_factory) diff --git a/alws/dependencies.py b/alws/dependencies.py index ce88a751d..5c9858de8 100644 --- a/alws/dependencies.py +++ b/alws/dependencies.py @@ -1,53 +1,8 @@ -import asyncio -from contextlib import contextmanager - from redis import asyncio as aioredis -from sqlalchemy.orm import Session -from alws import database from alws.config import settings -__all__ = [ - 'get_async_session', - 'get_db', - 'get_pulp_db', - 'get_redis', -] - - -# Usually PostgreSQL supports up to 100 concurrent connections, -# so making semaphore a bit less to not hit that limit -DB_SEMAPHORE = asyncio.Semaphore(90) - - -# FIXME: `get_current_user` dependency causes a transaction -# to exist on a connection so we need a separate dependency for it for now. -# Remove this later when better approach is found. -async def get_async_session() -> database.Session: - async with DB_SEMAPHORE: - async with database.Session() as session: - try: - yield session - finally: - await session.close() - - -async def get_db() -> database.Session: - async with DB_SEMAPHORE: - async with database.Session() as session: - try: - yield session - finally: - await session.close() - - -@contextmanager -def get_pulp_db() -> Session: - with database.PulpSession() as session: - try: - yield session - finally: - session.close() +__all__ = ['get_redis', 'get_async_db_key'] async def get_redis() -> aioredis.Redis: @@ -56,3 +11,7 @@ async def get_redis() -> aioredis.Redis: yield client finally: await client.close() + + +def get_async_db_key() -> str: + return "async" diff --git a/alws/dramatiq/build.py b/alws/dramatiq/build.py index 22f4f5302..e3a1cd244 100644 --- a/alws/dramatiq/build.py +++ b/alws/dramatiq/build.py @@ -1,13 +1,13 @@ import datetime import logging -from contextlib import asynccontextmanager from typing import Any, Dict import dramatiq +from fastapi_sqla import open_async_session, open_session from sqlalchemy import update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from sqlalchemy.orm import joinedload +from sqlalchemy.orm import Session, joinedload from sqlalchemy.sql.expression import func from alws import models @@ -20,8 +20,7 @@ ) from alws.crud import build_node as build_node_crud from alws.crud import test -from alws.database import SyncSession -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.dramatiq import event_loop from alws.errors import ( ArtifactConversionError, @@ -32,6 +31,7 @@ SrpmProvisionError, ) from alws.schemas import build_node_schema, build_schema +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.github_integration_helper import ( find_issues_by_repo_name, get_github_client, @@ -44,7 +44,7 @@ logger = logging.getLogger(__name__) -def _sync_fetch_build(db: SyncSession, build_id: int) -> models.Build: +def _sync_fetch_build(db: Session, build_id: int) -> models.Build: query = select(models.Build).where(models.Build.id == build_id) result = db.execute(query) return result.scalars().first() @@ -73,7 +73,7 @@ async def _start_build(build_id: int, build_request: build_schema.BuildCreate): module_build_index = {} if has_modules: - with SyncSession() as db, db.begin(): + with open_session() as db: platforms = ( db.execute( select(models.Platform).where( @@ -98,34 +98,29 @@ async def _start_build(build_id: int, build_request: build_schema.BuildCreate): db.flush() for platform in platforms: module_build_index[platform.name] = platform.module_build_index - db.commit() - db.close() - async with asynccontextmanager(get_db)() as db: - async with db.begin(): - build = await fetch_build(db, build_id) - planner = BuildPlanner( - db, - build, - is_secure_boot=build_request.is_secure_boot, - module_build_index=module_build_index, - logger=logger, - ) - await planner.init( - platforms=build_request.platforms, - platform_flavors=build_request.platform_flavors, - ) - for ref in build_request.tasks: - await planner.add_git_project(ref) - for linked_id in build_request.linked_builds: - linked_build = await fetch_build(db, linked_id) - if linked_build: - await planner.add_linked_builds(linked_build) - await planner.build_dependency_map() - await db.flush() - await planner.init_build_repos() - await db.commit() - await db.close() + async with open_async_session(key=get_async_db_key()) as db: + build = await fetch_build(db, build_id) + planner = BuildPlanner( + db, + build, + is_secure_boot=build_request.is_secure_boot, + module_build_index=module_build_index, + logger=logger, + ) + await planner.init( + platforms=build_request.platforms, + platform_flavors=build_request.platform_flavors, + ) + for ref in build_request.tasks: + await planner.add_git_project(ref) + for linked_id in build_request.linked_builds: + linked_build = await fetch_build(db, linked_id) + if linked_build: + await planner.add_linked_builds(linked_build) + await planner.build_dependency_map() + await db.flush() + await planner.init_build_repos() if settings.github_integration_enabled: try: @@ -160,7 +155,7 @@ async def _start_build(build_id: int, build_request: build_schema.BuildCreate): async def _build_done(request: build_node_schema.BuildDone): - async for db in get_db(): + async with open_async_session(key=get_async_db_key()) as db: try: await build_node_crud.safe_build_done(db, request) except Exception as e: @@ -185,7 +180,7 @@ async def _build_done(request: build_node_schema.BuildDone): build_task.error = str(e) build_task.status = BuildTaskStatus.FAILED await build_node_crud.fast_fail_other_tasks_by_ref(db, build_task) - await db.commit() + await db.flush() # We don't want to create the test tasks until all build tasks # of the same build_id are completed. @@ -219,52 +214,49 @@ async def _build_done(request: build_node_schema.BuildDone): .where(models.Build.id == build_id) .values(finished_at=datetime.datetime.utcnow()) ) - await db.commit() async def _get_build_id(db: AsyncSession, build_task_id: int) -> int: - async with db.begin(): - build_id = ( - ( - await db.execute( - select(models.BuildTask.build_id).where( - models.BuildTask.id == build_task_id - ) + build_id = ( + ( + await db.execute( + select(models.BuildTask.build_id).where( + models.BuildTask.id == build_task_id ) ) - .scalars() - .first() ) - return build_id + .scalars() + .first() + ) + return build_id async def _check_build_and_completed_tasks( db: AsyncSession, build_id: int ) -> bool: - async with db.begin(): - build_tasks = ( - await db.execute( - select(func.count()) - .select_from(models.BuildTask) - .where(models.BuildTask.build_id == build_id) - ) - ).scalar() + build_tasks = ( + await db.execute( + select(func.count()) + .select_from(models.BuildTask) + .where(models.BuildTask.build_id == build_id) + ) + ).scalar() - completed_tasks = ( - await db.execute( - select(func.count()) - .select_from(models.BuildTask) - .where( - models.BuildTask.build_id == build_id, - models.BuildTask.status.notin_([ - BuildTaskStatus.IDLE, - BuildTaskStatus.STARTED, - ]), - ) + completed_tasks = ( + await db.execute( + select(func.count()) + .select_from(models.BuildTask) + .where( + models.BuildTask.build_id == build_id, + models.BuildTask.status.notin_([ + BuildTaskStatus.IDLE, + BuildTaskStatus.STARTED, + ]), ) - ).scalar() + ) + ).scalar() - return completed_tasks == build_tasks + return completed_tasks == build_tasks async def _all_build_tasks_completed( @@ -283,6 +275,7 @@ async def _all_build_tasks_completed( ) def start_build(build_id: int, build_request: Dict[str, Any]): parsed_build = build_schema.BuildCreate(**build_request) + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete(_start_build(build_id, parsed_build)) @@ -301,4 +294,5 @@ def start_build(build_id: int, build_request: Dict[str, Any]): ) def build_done(request: Dict[str, Any]): parsed_build = build_node_schema.BuildDone(**request) + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete(_build_done(parsed_build)) diff --git a/alws/dramatiq/errata.py b/alws/dramatiq/errata.py index ed31d3865..3b4482b08 100644 --- a/alws/dramatiq/errata.py +++ b/alws/dramatiq/errata.py @@ -3,12 +3,28 @@ import dramatiq from alws.constants import DRAMATIQ_TASK_TIMEOUT -from alws.crud.errata import bulk_errata_records_release, release_errata_record +from alws.crud.errata import ( + bulk_errata_records_release, + release_errata_record, +) from alws.dramatiq import event_loop +from alws.utils.fastapi_sqla_setup import setup_all __all__ = ["release_errata"] +async def _release_errata_record(record_id: str, platform_id: int, force: bool): + await release_errata_record( + record_id, + platform_id, + force, + ) + + +async def _bulk_errata_records_release(records_ids: typing.List[str]): + await bulk_errata_records_release(records_ids) + + @dramatiq.actor( max_retries=0, priority=0, @@ -16,8 +32,9 @@ time_limit=DRAMATIQ_TASK_TIMEOUT, ) def release_errata(record_id: str, platform_id: int, force: bool): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete( - release_errata_record( + _release_errata_record( record_id, platform_id, force, @@ -32,4 +49,5 @@ def release_errata(record_id: str, platform_id: int, force: bool): time_limit=DRAMATIQ_TASK_TIMEOUT, ) def bulk_errata_release(records_ids: typing.List[str]): - event_loop.run_until_complete(bulk_errata_records_release(records_ids)) + event_loop.run_until_complete(setup_all()) + event_loop.run_until_complete(_bulk_errata_records_release(records_ids)) diff --git a/alws/dramatiq/products.py b/alws/dramatiq/products.py index 94876ba75..dbb113768 100644 --- a/alws/dramatiq/products.py +++ b/alws/dramatiq/products.py @@ -4,6 +4,7 @@ from collections import defaultdict import dramatiq +from fastapi_sqla import open_async_session from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import selectinload @@ -11,8 +12,9 @@ from alws import models from alws.config import settings from alws.constants import DRAMATIQ_TASK_TIMEOUT, BuildTaskStatus -from alws.database import Session +from alws.dependencies import get_async_db_key from alws.dramatiq import event_loop +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.log_utils import setup_logger from alws.utils.pulp_client import PulpClient @@ -274,7 +276,7 @@ async def _perform_product_modification( build_id, product_id, ) - async with Session() as db, db.begin(): + async with open_async_session(key=get_async_db_key()) as db: db_product = ( ( await db.execute( @@ -322,49 +324,45 @@ async def _perform_product_modification( db_build.tasks, ) - await set_platform_for_products_repos(db=db, product=db_product) - await set_platform_for_build_repos(db=db, build=db_build) + await set_platform_for_products_repos(db=db, product=db_product) + await set_platform_for_build_repos(db=db, build=db_build) + + modify = await prepare_repo_modify_dict( + db_build, + db_product, + pulp_client, + modification, + pkgs_blacklist, + ) + tasks = [] + publish_tasks = [] + for key, value in modify.items(): + if modification == "add": + tasks.append( + pulp_client.modify_repository(add=value, repo_to=key) + ) + else: + tasks.append( + pulp_client.modify_repository(remove=value, repo_to=key) + ) + # We've changed products repositories to not invoke + # automatic publications, so now we need + # to manually publish them after modification + publish_tasks.append(pulp_client.create_rpm_publication(key)) + logger.debug('Adding packages to pulp repositories') + await asyncio.gather(*tasks) + logger.debug('Creating RPM publications for pulp repositories') + await asyncio.gather(*publish_tasks) - modify = await prepare_repo_modify_dict( - db_build, - db_product, - pulp_client, - modification, - pkgs_blacklist, - ) - tasks = [] - publish_tasks = [] - for key, value in modify.items(): if modification == "add": - tasks.append(pulp_client.modify_repository(add=value, repo_to=key)) + db_product.builds.append(db_build) else: - tasks.append( - pulp_client.modify_repository(remove=value, repo_to=key) - ) - # We've changed products repositories to not invoke - # automatic publications, so now we need - # to manually publish them after modification - publish_tasks.append(pulp_client.create_rpm_publication(key)) - logger.debug('Adding packages to pulp repositories') - await asyncio.gather(*tasks) - logger.debug('Creating RPM publications for pulp repositories') - await asyncio.gather(*publish_tasks) - - if modification == "add": - db_product.builds.append(db_build) - else: - db_product.builds.remove(db_build) - db.add_all( - [ + db_product.builds.remove(db_build) + db.add_all([ db_product, db_build, - ] - ) - try: - await db.commit() - except Exception: - logger.exception('Cannot commit changes:') - await db.rollback() + ]) + await db.flush() logger.info( 'Packages from the build %d were added to the product %d', build_id, @@ -383,6 +381,7 @@ def perform_product_modification( product_id: int, modification: str, ): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete( _perform_product_modification(build_id, product_id, modification) ) diff --git a/alws/dramatiq/releases.py b/alws/dramatiq/releases.py index 407a9e0a0..b9e035cd9 100644 --- a/alws/dramatiq/releases.py +++ b/alws/dramatiq/releases.py @@ -1,22 +1,24 @@ from contextlib import asynccontextmanager import dramatiq +from fastapi_sqla import open_async_session from alws.constants import DRAMATIQ_TASK_TIMEOUT from alws.crud import release as r_crud +from alws.dependencies import get_async_db_key from alws.dramatiq import event_loop -from alws.dependencies import get_db +from alws.utils.fastapi_sqla_setup import setup_all __all__ = ["execute_release_plan"] async def _commit_release(release_id, user_id): - async with asynccontextmanager(get_db)() as db: + async with open_async_session(key=get_async_db_key()) as db: await r_crud.commit_release(db, release_id, user_id) async def _revert_release(release_id, user_id): - async with asynccontextmanager(get_db)() as db: + async with open_async_session(key=get_async_db_key()) as db: await r_crud.revert_release(db, release_id, user_id) @@ -27,6 +29,7 @@ async def _revert_release(release_id, user_id): time_limit=DRAMATIQ_TASK_TIMEOUT, ) def execute_release_plan(release_id: int, user_id: int): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete(_commit_release(release_id, user_id)) @@ -37,4 +40,5 @@ def execute_release_plan(release_id: int, user_id: int): time_limit=DRAMATIQ_TASK_TIMEOUT, ) def revert_release(release_id: int, user_id: int): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete(_revert_release(release_id, user_id)) diff --git a/alws/dramatiq/sign_task.py b/alws/dramatiq/sign_task.py index bb387cca6..dd7699992 100644 --- a/alws/dramatiq/sign_task.py +++ b/alws/dramatiq/sign_task.py @@ -6,6 +6,7 @@ from alws.crud import sign_task from alws.dramatiq import event_loop from alws.schemas import sign_schema +from alws.utils.fastapi_sqla_setup import setup_all __all__ = ['complete_sign_task'] @@ -27,4 +28,5 @@ async def _complete_sign_task( time_limit=DRAMATIQ_TASK_TIMEOUT, ) def complete_sign_task(task_id: int, payload: typing.Dict[str, typing.Any]): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete(_complete_sign_task(task_id, payload)) diff --git a/alws/dramatiq/tests.py b/alws/dramatiq/tests.py index 01b930b03..f3aef1a88 100644 --- a/alws/dramatiq/tests.py +++ b/alws/dramatiq/tests.py @@ -2,19 +2,20 @@ import typing import dramatiq +from fastapi_sqla import open_async_session from alws.constants import DRAMATIQ_TASK_TIMEOUT, TestTaskStatus from alws.crud import test as t_crud -from alws.database import Session +from alws.dependencies import get_async_db_key from alws.dramatiq import event_loop from alws.schemas.test_schema import TestTaskResult - +from alws.utils.fastapi_sqla_setup import setup_all __all__ = ['complete_test_task'] async def _complete_test_task(task_id: int, task_result: TestTaskResult): - async with Session() as db: + async with open_async_session(key=get_async_db_key()) as db: try: logging.info('Start processing test task %s', task_id) await t_crud.complete_test_task(db, task_id, task_result) @@ -22,20 +23,24 @@ async def _complete_test_task(task_id: int, task_result: TestTaskResult): except Exception as e: logging.exception( 'Cannot set test task "%d" result, marking as failed.' - 'Error: %s', task_id, str(e)) - await db.rollback() + 'Error: %s', + task_id, + str(e), + ) await t_crud.update_test_task( - db, task_id, task_result, status=TestTaskStatus.FAILED) - finally: - await db.commit() + db, task_id, task_result, status=TestTaskStatus.FAILED + ) + raise e @dramatiq.actor( max_retries=0, priority=0, queue_name='tests', - time_limit=DRAMATIQ_TASK_TIMEOUT + time_limit=DRAMATIQ_TASK_TIMEOUT, ) def complete_test_task(task_id: int, payload: typing.Dict[str, typing.Any]): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete( - _complete_test_task(task_id, TestTaskResult(**payload))) + _complete_test_task(task_id, TestTaskResult(**payload)) + ) diff --git a/alws/dramatiq/user.py b/alws/dramatiq/user.py index 9b9ad738b..0c7d918f9 100644 --- a/alws/dramatiq/user.py +++ b/alws/dramatiq/user.py @@ -1,35 +1,41 @@ import dramatiq - +from fastapi_sqla import open_async_session from sqlalchemy import delete from sqlalchemy.future import select from alws import models from alws.constants import DRAMATIQ_TASK_TIMEOUT from alws.crud import build as build_crud -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.dramatiq import event_loop +from alws.utils.fastapi_sqla_setup import setup_all -__all__ = ['perform_user_removal'] +__all__ = ['perform_user_removal'] async def _perform_user_removal(user_id: int): - async for db in get_db(): - async with db.begin(): - # Remove builds - build_ids = (await db.execute( - select(models.Build.id).where( - models.Build.owner_id == user_id + async with open_async_session(key=get_async_db_key()) as db: + # Remove builds + build_ids = ( + ( + await db.execute( + select(models.Build.id).where( + models.Build.owner_id == user_id + ) ) - )).scalars().all() + ) + .scalars() + .all() + ) + await db.flush() for build_id in build_ids: await build_crud.remove_build_job(db, build_id) - async with db.begin(): - await db.execute(delete(models.User).where( - models.User.id == user_id)) + await db.execute(delete(models.User).where(models.User.id == user_id)) @dramatiq.actor(max_retries=0, priority=0, time_limit=DRAMATIQ_TASK_TIMEOUT) def perform_user_removal(user_id: int): + event_loop.run_until_complete(setup_all()) event_loop.run_until_complete(_perform_user_removal(user_id)) diff --git a/alws/models.py b/alws/models.py index 507cd05d8..1c07a1fe3 100644 --- a/alws/models.py +++ b/alws/models.py @@ -12,7 +12,11 @@ SQLAlchemyBaseAccessTokenTable, ) from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy +from sqlalchemy.ext.associationproxy import ( + AssociationProxy, + association_proxy, +) +from sqlalchemy.ext.asyncio import create_async_engine from sqlalchemy.orm import ( Mapped, declarative_mixin, @@ -22,6 +26,7 @@ ) from sqlalchemy.sql import func +from alws.config import settings from alws.constants import ( ErrataPackageStatus, ErrataReferenceType, @@ -32,7 +37,7 @@ ReleaseStatus, SignStatus, ) -from alws.database import Base, engine +from alws.database import Base __all__ = [ "Build", @@ -245,9 +250,7 @@ class Platform(PermissionsMixin, Base): modularity: Mapped[Optional[Dict[str, Any]]] = mapped_column( JSONB, nullable=True ) - test_dist_name: Mapped[str] = mapped_column( - sqlalchemy.Text, nullable=False - ) + test_dist_name: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) name: Mapped[str] = mapped_column( sqlalchemy.Text, nullable=False, unique=True, index=True ) @@ -660,9 +663,7 @@ class SourceRpm(Base): nullable=False, index=True, ) - build: Mapped["Build"] = relationship( - "Build", back_populates="source_rpms" - ) + build: Mapped["Build"] = relationship("Build", back_populates="source_rpms") artifact_id: Mapped[int] = mapped_column( sqlalchemy.Integer, sqlalchemy.ForeignKey("build_artifacts.id"), @@ -681,9 +682,7 @@ class BinaryRpm(Base): build_id: Mapped[int] = mapped_column( sqlalchemy.Integer, sqlalchemy.ForeignKey("builds.id"), nullable=False ) - build: Mapped["Build"] = relationship( - "Build", back_populates="binary_rpms" - ) + build: Mapped["Build"] = relationship("Build", back_populates="binary_rpms") artifact_id: Mapped[int] = mapped_column( sqlalchemy.Integer, sqlalchemy.ForeignKey("build_artifacts.id"), @@ -1200,9 +1199,7 @@ class Release(PermissionsMixin, TeamMixin, TimeMixin, Base): nullable=False, ) product: Mapped["Product"] = relationship("Product") - plan: Mapped[Optional[Dict[str, Any]]] = mapped_column( - JSONB, nullable=True - ) + plan: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSONB, nullable=True) status: Mapped[int] = mapped_column( sqlalchemy.Integer, default=ReleaseStatus.SCHEDULED ) @@ -1253,9 +1250,7 @@ class SignKey(PermissionsMixin, Base): sqlalchemy.Text, nullable=True ) keyid: Mapped[str] = mapped_column(sqlalchemy.String(16), unique=True) - fingerprint: Mapped[str] = mapped_column( - sqlalchemy.String(40), unique=True - ) + fingerprint: Mapped[str] = mapped_column(sqlalchemy.String(40), unique=True) public_url: Mapped[str] = mapped_column(sqlalchemy.Text) inserted: Mapped[datetime.datetime] = mapped_column( sqlalchemy.DateTime, default=datetime.datetime.utcnow() @@ -1385,9 +1380,7 @@ class PlatformFlavour(PermissionsMixin, Base): repos: Mapped[List["Repository"]] = relationship( "Repository", secondary=FlavourRepo ) - data: Mapped[Optional[Dict[str, Any]]] = mapped_column( - JSONB, nullable=True - ) + data: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSONB, nullable=True) class NewErrataRecord(Base): @@ -1445,15 +1438,11 @@ class NewErrataRecord(Base): original_description: Mapped[str] = mapped_column( sqlalchemy.Text, nullable=False ) - title: Mapped[Optional[str]] = mapped_column( - sqlalchemy.Text, nullable=True - ) + title: Mapped[Optional[str]] = mapped_column(sqlalchemy.Text, nullable=True) oval_title: Mapped[Optional[str]] = mapped_column( sqlalchemy.Text, nullable=True ) - original_title: Mapped[str] = mapped_column( - sqlalchemy.Text, nullable=False - ) + original_title: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) contact_mail: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) status: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) version: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) @@ -1745,15 +1734,11 @@ class ErrataRecord(Base): original_description: Mapped[str] = mapped_column( sqlalchemy.Text, nullable=False ) - title: Mapped[Optional[str]] = mapped_column( - sqlalchemy.Text, nullable=True - ) + title: Mapped[Optional[str]] = mapped_column(sqlalchemy.Text, nullable=True) oval_title: Mapped[Optional[str]] = mapped_column( sqlalchemy.Text, nullable=True ) - original_title: Mapped[str] = mapped_column( - sqlalchemy.Text, nullable=False - ) + original_title: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) contact_mail: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) status: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) version: Mapped[str] = mapped_column(sqlalchemy.Text, nullable=False) @@ -2112,6 +2097,8 @@ class PerformanceStats(Base): async def create_tables(): + engine = create_async_engine(settings.database_url, echo_pool=True) + async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) diff --git a/alws/release_planner.py b/alws/release_planner.py index 6481321e7..24e4d30f9 100644 --- a/alws/release_planner.py +++ b/alws/release_planner.py @@ -103,17 +103,16 @@ async def revert_release( ) try: await self.revert_release_plan(release) - except Exception: - await self.db.rollback() - await self.db.refresh(release) + except Exception as e: message = f"Cannot revert release:\n{traceback.format_exc()}" release.status = ReleaseStatus.FAILED logging.exception("Cannot revert release: %d", release_id) + raise e # for updating release plan, we should use deepcopy release_plan = copy.deepcopy(release.plan) release_plan["last_log"] = message release.plan = release_plan - await self.db.commit() + await self.db.flush() logging.info("Successfully reverted release: %s", release_id) async def remove_packages_from_repositories( @@ -124,9 +123,7 @@ async def remove_packages_from_repositories( repo_ids_to_remove = [] for pkg_dict in release.plan.get("packages", []): pkg_href = pkg_dict.get("package", {}).get("artifact_href", "") - repo_ids = [ - repo["id"] for repo in pkg_dict.get("repositories", []) - ] + repo_ids = [repo["id"] for repo in pkg_dict.get("repositories", [])] if not pkg_href or not repo_ids: continue pkgs_to_remove.append(pkg_href) @@ -513,7 +510,7 @@ async def create_new_release( new_release.started_at = start new_release.finished_at = datetime.datetime.utcnow() self.db.add(new_release) - await self.db.commit() + await self.db.flush() await self.db.refresh(new_release) logging.info("New release %d successfully created", new_release.id) @@ -558,7 +555,7 @@ async def update_release( release.plan = new_plan release.finished_at = datetime.datetime.utcnow() self.db.add(release) - await self.db.commit() + await self.db.flush() await self.db.refresh(release) logging.info("Successfully updated release %d", release_id) return await self.get_final_release(release.id) @@ -623,7 +620,7 @@ async def commit_release( release.plan = release_plan release.finished_at = datetime.datetime.utcnow() self.db.add(release) - await self.db.commit() + await self.db.flush() await self.db.refresh(release) logging.info("Successfully committed release %d", release_id) release = await self.get_final_release(release_id) @@ -846,9 +843,7 @@ async def execute_release_plan( ) repo_module_index = IndexWrapper() if template: - repo_module_index = IndexWrapper.from_template( - template - ) + repo_module_index = IndexWrapper.from_template(template) prod_repo_modules_cache[repo_url] = repo_module_index module_info = module["module"] release_module = ModuleWrapper.from_template( @@ -952,9 +947,7 @@ async def revert_release_plan( models.NewErrataToALBSPackage.albs_artifact_id.in_( subquery ), - models.NewErrataToALBSPackage.pulp_href.in_( - pkgs_to_remove - ), + models.NewErrataToALBSPackage.pulp_href.in_(pkgs_to_remove), ) ) ) @@ -1336,8 +1329,7 @@ async def get_release_plan( if not settings.package_beholder_enabled: rpm_modules = [ - {"module": module, "repositories": []} - for module in rpm_modules + {"module": module, "repositories": []} for module in rpm_modules ] return await self.get_pulp_based_response( pulp_packages=pulp_packages, @@ -1358,9 +1350,7 @@ async def get_release_plan( if module["arch"] in weak_arches: module_arch_list.append(strong_arch) - platforms_list = base_platform.reference_platforms + [ - base_platform - ] + platforms_list = base_platform.reference_platforms + [base_platform] module_responses = await self._beholder_client.retrieve_responses( platforms_list, module_name=module_name, @@ -1714,9 +1704,7 @@ async def execute_release_plan( ) repo_module_index = IndexWrapper() if template: - repo_module_index = IndexWrapper.from_template( - template - ) + repo_module_index = IndexWrapper.from_template(template) prod_repo_modules_cache[repo_url] = repo_module_index if repo_name not in packages_to_repo_layout: packages_to_repo_layout[repo_name] = {} @@ -1832,9 +1820,7 @@ async def check_released_errata_packages( models.NewErrataToALBSPackage.pulp_href.in_(package_hrefs), ) ) - .options( - selectinload(models.NewErrataToALBSPackage.errata_package) - ) + .options(selectinload(models.NewErrataToALBSPackage.errata_package)) ) albs_pkgs = albs_pkgs.scalars().all() @@ -1858,7 +1844,7 @@ async def check_released_errata_packages( if errata_record_status != ErrataReleaseStatus.RELEASED: for albs_pkg in albs_pkgs: albs_pkg.status = ErrataPackageStatus.released - await self.db.commit() + await self.db.flush() @class_measure_work_time_async("update_release_plan") async def update_release_plan( diff --git a/alws/routers/build_node.py b/alws/routers/build_node.py index 2e606aee4..a7874c71e 100644 --- a/alws/routers/build_node.py +++ b/alws/routers/build_node.py @@ -3,6 +3,7 @@ import typing from fastapi import APIRouter, Depends, Response, status +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws import dramatiq @@ -10,7 +11,7 @@ from alws.config import settings from alws.constants import BuildTaskRefType, BuildTaskStatus from alws.crud import build_node -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.schemas import build_node_schema router = APIRouter( @@ -23,7 +24,7 @@ @router.post("/ping") async def ping( node_status: build_node_schema.Ping, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): if not node_status.active_tasks: return {} @@ -35,7 +36,7 @@ async def ping( async def build_done( build_done_: build_node_schema.BuildDone, response: Response, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): build_task = await build_node.get_build_task(db, build_done_.task_id) if BuildTaskStatus.is_finished(build_task.status): @@ -46,7 +47,7 @@ async def build_done( # won't rebuild task again and again while it's in the queue # in the future this probably should be handled somehow better build_task.ts = datetime.datetime.utcnow() + datetime.timedelta(hours=3) - await db.commit() + await db.flush() dramatiq.build_done.send(build_done_.model_dump()) return {"ok": True} @@ -57,7 +58,7 @@ async def build_done( ) async def get_task( request: build_node_schema.RequestTask, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): task = await build_node.get_available_build_task(db, request) if not task: @@ -140,14 +141,12 @@ async def get_task( module_build_options = { "definitions": { "_module_build": "1", - "modularitylabel": ":".join( - [ - module.name, - module.stream, - module.version, - module.context, - ] - ), + "modularitylabel": ":".join([ + module.name, + module.stream, + module.version, + module.context, + ]), } } response["platform"].add_mock_options(module_build_options) diff --git a/alws/routers/builds.py b/alws/routers/builds.py index 978178217..3262fee29 100644 --- a/alws/routers/builds.py +++ b/alws/routers/builds.py @@ -1,6 +1,7 @@ import typing from fastapi import APIRouter, Depends, HTTPException, status +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws import models @@ -9,7 +10,7 @@ from alws.crud import build_node from alws.crud import platform as platform_crud from alws.crud import platform_flavors as flavors_crud -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.errors import BuildError, DataNotFoundError from alws.schemas import build_schema @@ -29,7 +30,7 @@ async def create_build( build: build_schema.BuildCreate, user: models.User = Depends(get_current_user), - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await build_crud.create_build(db, build, user.id) @@ -56,7 +57,7 @@ async def get_builds_per_page( released: typing.Optional[bool] = None, signed: typing.Optional[bool] = None, is_running: typing.Optional[bool] = None, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await build_crud.get_builds( db=db, @@ -80,7 +81,7 @@ async def get_builds_per_page( @router.post('/get_module_preview/', response_model=build_schema.ModulePreview) async def get_module_preview( module_request: build_schema.ModulePreviewRequest, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): platform = await platform_crud.get_platform( db, @@ -100,7 +101,10 @@ async def get_module_preview( @public_router.get('/{build_id}/', response_model=build_schema.Build) -async def get_build(build_id: int, db: AsyncSession = Depends(get_db)): +async def get_build( + build_id: int, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): db_build = await build_crud.get_builds(db, build_id) if db_build is None: raise HTTPException( @@ -113,7 +117,7 @@ async def get_build(build_id: int, db: AsyncSession = Depends(get_db)): @router.patch('/{build_id}/restart-failed', status_code=status.HTTP_200_OK) async def restart_failed_build_items( build_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await build_node.update_failed_build_items(db, build_id) @@ -121,7 +125,9 @@ async def restart_failed_build_items( @router.patch("/{build_id}/cancel", status_code=status.HTTP_200_OK) async def cancel_idle_build_items( build_id: int, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): await build_node.mark_build_tasks_as_cancelled(session, build_id) @@ -132,13 +138,16 @@ async def cancel_idle_build_items( ) async def parallel_restart_failed_build_items( build_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await build_node.update_failed_build_items_in_parallel(db, build_id) @router.delete('/{build_id}/remove', status_code=status.HTTP_204_NO_CONTENT) -async def remove_build(build_id: int, db: AsyncSession = Depends(get_db)): +async def remove_build( + build_id: int, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): try: await build_crud.remove_build_job(db, build_id) except DataNotFoundError as exc: diff --git a/alws/routers/coprs.py b/alws/routers/coprs.py index 96af4de36..70ff88430 100644 --- a/alws/routers/coprs.py +++ b/alws/routers/coprs.py @@ -2,12 +2,13 @@ from fastapi import APIRouter, Depends, HTTPException, status from fastapi.responses import PlainTextResponse -from sqlalchemy import select, and_ +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy import and_, select +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload -from alws import database from alws import models -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.utils.copr import ( generate_repo_config, get_clean_copr_chroot, @@ -22,13 +23,17 @@ @copr_router.get('/api_3/project/search') async def search_repos( query: str, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ) -> typing.Dict: - query = select(models.Product).where( - models.Product.name == query, - ).options( - selectinload(models.Product.repositories), - selectinload(models.Product.owner), + query = ( + select(models.Product) + .where( + models.Product.name == query, + ) + .options( + selectinload(models.Product.repositories), + selectinload(models.Product.owner), + ) ) db_products = (await db.execute(query)).scalars().all() return {'items': make_copr_plugin_response(db_products)} @@ -37,13 +42,17 @@ async def search_repos( @copr_router.get('/api_3/project/list') async def list_repos( ownername: str, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ) -> typing.Dict: - query = select(models.Product).where( - models.Product.owner.has(username=ownername), - ).options( - selectinload(models.Product.repositories), - selectinload(models.Product.owner), + query = ( + select(models.Product) + .where( + models.Product.owner.has(username=ownername), + ) + .options( + selectinload(models.Product.repositories), + selectinload(models.Product.owner), + ) ) db_products = (await db.execute(query)).scalars().all() return {'items': make_copr_plugin_response(db_products)} @@ -58,15 +67,19 @@ async def get_dnf_repo_config( name: str, platform: str, arch: str, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): chroot = f'{platform}-{arch}' clean_chroot = get_clean_copr_chroot(chroot) db_product = await db.execute( - select(models.Product).where(and_( - models.Product.name == name, - models.Product.owner.has(username=ownername), - )).options( + select(models.Product) + .where( + and_( + models.Product.name == name, + models.Product.owner.has(username=ownername), + ) + ) + .options( selectinload(models.Product.repositories), selectinload(models.Product.owner), ), @@ -75,15 +88,16 @@ async def get_dnf_repo_config( if not db_product: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Product {name} for user {ownername} is not found' + detail=f'Product {name} for user {ownername} is not found', ) for product_repo in db_product.repositories: if product_repo.debug or arch != product_repo.arch: continue if product_repo.name.lower().endswith(clean_chroot): return generate_repo_config( - product_repo, db_product.name, ownername) + product_repo, db_product.name, ownername + ) raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f"Didn't find matching repositories in {name} for chroot {chroot}" + detail=f"Didn't find matching repositories in {name} for chroot {chroot}", ) diff --git a/alws/routers/errata.py b/alws/routers/errata.py index 05fd00924..22ba98383 100644 --- a/alws/routers/errata.py +++ b/alws/routers/errata.py @@ -2,13 +2,13 @@ from fastapi import APIRouter, Depends, HTTPException, Query, status from fastapi.responses import PlainTextResponse +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws.auth import get_current_user -from alws.config import settings from alws.constants import ErrataReleaseStatus from alws.crud import errata as errata_crud -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.dramatiq import bulk_errata_release, release_errata from alws.schemas import errata_schema @@ -27,7 +27,7 @@ @router.post("/", response_model=errata_schema.CreateErrataResponse) async def create_errata_record( errata: errata_schema.BaseErrataRecord, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): record = await errata_crud.create_errata_record( db, @@ -40,7 +40,7 @@ async def create_errata_record( async def get_errata_record( errata_id: str, errata_platform_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): errata_record = await errata_crud.get_errata_record( db, @@ -59,7 +59,7 @@ async def get_errata_record( async def get_oval_xml( platform_name: str, only_released: bool = False, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): records = await errata_crud.get_oval_xml(db, platform_name, only_released) if not records: @@ -79,7 +79,7 @@ async def list_errata_records( platformId: Optional[int] = None, cveId: Optional[str] = None, status: Optional[ErrataReleaseStatus] = None, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await errata_crud.list_errata_records( db, @@ -100,7 +100,7 @@ async def list_errata_records( async def get_updateinfo_xml( record_id: str, platform_id: Optional[int] = None, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(get_async_db_key())), ): updateinfo_xml = await errata_crud.get_updateinfo_xml_from_pulp( db, record_id, platform_id @@ -119,7 +119,7 @@ async def get_updateinfo_xml( @router.post("/update/", response_model=errata_schema.ErrataRecord) async def update_errata_record( errata: errata_schema.UpdateErrataRequest, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await errata_crud.update_errata_record(db, errata) @@ -129,7 +129,7 @@ async def update_errata_record( # See https://github.com/AlmaLinux/build-system/issues/207 @router.get("/all/", response_model=List[errata_schema.CompactErrataRecord]) async def list_all_errata_records( - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): records = await errata_crud.list_errata_records(db, compact=True) return [ @@ -147,7 +147,7 @@ async def list_all_errata_records( ) async def update_package_status( packages: List[errata_schema.ChangeErrataPackageStatusRequest], - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): try: return { @@ -165,7 +165,9 @@ async def release_errata_record( record_id: str, platform_id: int, force: bool = False, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): db_record = await errata_crud.get_errata_record( session, @@ -178,7 +180,7 @@ async def release_errata_record( return {"message": f"Record {record_id} already in progress"} db_record.release_status = ErrataReleaseStatus.IN_PROGRESS db_record.last_release_log = None - await session.commit() + await session.flush() release_errata.send(record_id, platform_id, force) return { "message": f"Release updateinfo record {record_id} has been started" @@ -199,7 +201,9 @@ async def bulk_release_errata_records(records_ids: List[str]): @router.post('/reset-matched-packages') async def reset_matched_packages( record_id: str, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): await errata_crud.reset_matched_errata_packages(record_id, session) return {'message': f'Packages for record {record_id} have been matched'} diff --git a/alws/routers/platform_flavors.py b/alws/routers/platform_flavors.py index 218d24af4..65b682688 100644 --- a/alws/routers/platform_flavors.py +++ b/alws/routers/platform_flavors.py @@ -1,24 +1,25 @@ from typing import List from fastapi import APIRouter, Depends -from alws import database +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession from alws.auth import get_current_user -from alws.dependencies import get_db -from alws.schemas import platform_flavors_schema as pf_schema from alws.crud import platform_flavors as pf_crud +from alws.dependencies import get_async_db_key +from alws.schemas import platform_flavors_schema as pf_schema router = APIRouter( prefix='/platform_flavors', tags=['platform_flavors'], - dependencies=[Depends(get_current_user)] + dependencies=[Depends(get_current_user)], ) @router.post('/', response_model=pf_schema.FlavourResponse) async def create_flavour( flavour: pf_schema.CreateFlavour, - db: database.Session = Depends(get_db) + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await pf_crud.create_flavour(db, flavour) @@ -26,13 +27,13 @@ async def create_flavour( @router.patch('/', response_model=pf_schema.FlavourResponse) async def update_flavour( flavour: pf_schema.UpdateFlavour, - db: database.Session = Depends(get_db) + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await pf_crud.update_flavour(db, flavour) @router.get('/', response_model=List[pf_schema.FlavourResponse]) async def get_flavours( - db: database.Session = Depends(get_db) + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await pf_crud.list_flavours(db) diff --git a/alws/routers/platforms.py b/alws/routers/platforms.py index 7090b67cb..ed2d779a0 100644 --- a/alws/routers/platforms.py +++ b/alws/routers/platforms.py @@ -1,18 +1,19 @@ import typing from fastapi import APIRouter, Depends +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.auth import get_current_user -from alws.crud import platform as pl_crud, repository -from alws.dependencies import get_db +from alws.crud import platform as pl_crud +from alws.crud import repository +from alws.dependencies import get_async_db_key from alws.schemas import platform_schema - router = APIRouter( prefix='/platforms', tags=['platforms'], - dependencies=[Depends(get_current_user)] + dependencies=[Depends(get_current_user)], ) public_router = APIRouter( @@ -23,38 +24,47 @@ @router.post('/', response_model=platform_schema.Platform) async def create_platform( - platform: platform_schema.PlatformCreate, - db: database.Session = Depends(get_db) - ): + platform: platform_schema.PlatformCreate, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await pl_crud.create_platform(db, platform) @router.put('/', response_model=platform_schema.Platform) async def modify_platform( - platform: platform_schema.PlatformModify, - db: database.Session = Depends(get_db) - ): + platform: platform_schema.PlatformModify, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await pl_crud.modify_platform(db, platform) @public_router.get('/', response_model=typing.List[platform_schema.Platform]) -async def get_platforms(db: database.Session = Depends(get_db)): +async def get_platforms( + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await pl_crud.get_platforms(db) -@router.patch('/{platform_id}/add-repositories', - response_model=platform_schema.Platform) +@router.patch( + '/{platform_id}/add-repositories', response_model=platform_schema.Platform +) async def add_repositories_to_platform( - platform_id: int, repositories_ids: typing.List[int], - db: database.Session = Depends(get_db)): - return await repository.add_to_platform( - db, platform_id, repositories_ids) + platform_id: int, + repositories_ids: typing.List[int], + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): + return await repository.add_to_platform(db, platform_id, repositories_ids) -@router.patch('/{platform_id}/remove-repositories', - response_model=platform_schema.Platform) +@router.patch( + '/{platform_id}/remove-repositories', + response_model=platform_schema.Platform, +) async def remove_repositories_to_platform( - platform_id: int, repositories_ids: typing.List[int], - db: database.Session = Depends(get_db)): + platform_id: int, + repositories_ids: typing.List[int], + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await repository.remove_from_platform( - db, platform_id, repositories_ids) + db, platform_id, repositories_ids + ) diff --git a/alws/routers/products.py b/alws/routers/products.py index c55431c97..33bf456ff 100644 --- a/alws/routers/products.py +++ b/alws/routers/products.py @@ -6,11 +6,12 @@ HTTPException, status, ) +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws.auth import get_current_user from alws.crud import products, sign_task -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.models import User from alws.schemas import ( product_schema, @@ -39,7 +40,7 @@ async def get_products( pageNumber: Optional[int] = None, search_string: Optional[str] = None, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await products.get_products( db, page_number=pageNumber, search_string=search_string @@ -49,12 +50,11 @@ async def get_products( @public_router.post("/", response_model=product_schema.Product) async def create_product( product: product_schema.ProductCreate, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: User = Depends(get_current_user), ): - async with db.begin(): - db_product = await products.create_product(db, product) - await db.commit() + db_product = await products.create_product(db, product) + await db.flush() await db.refresh(db_product) # await sign_task.create_gen_key_task( # db=db, @@ -67,7 +67,7 @@ async def create_product( @public_router.get("/{product_id}/", response_model=product_schema.Product) async def get_product( product_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): db_product = await products.get_products(db, product_id=product_id) if db_product is None: @@ -85,7 +85,7 @@ async def get_product( async def add_to_product( product: str, build_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: User = Depends(get_current_user), ): try: @@ -108,7 +108,7 @@ async def add_to_product( async def remove_from_product( product: str, build_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: User = Depends(get_current_user), ): try: @@ -132,7 +132,7 @@ async def remove_from_product( ) async def remove_product( product_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: User = Depends(get_current_user), ): try: @@ -154,7 +154,7 @@ async def remove_product( ) async def create_gen_key_task( product_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: User = Depends(get_current_user), ): product = await products.get_products(db=db, product_id=product_id) diff --git a/alws/routers/releases.py b/alws/routers/releases.py index 50375a661..383c0137a 100644 --- a/alws/routers/releases.py +++ b/alws/routers/releases.py @@ -1,6 +1,7 @@ import typing from fastapi import APIRouter, Depends +from fastapi_sqla import AsyncSessionDependency from sqlalchemy import select, update from sqlalchemy.ext.asyncio import AsyncSession @@ -8,7 +9,7 @@ from alws.auth import get_current_user from alws.constants import ReleaseStatus from alws.crud import release as r_crud -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.dramatiq import execute_release_plan, revert_release from alws.schemas import release_schema @@ -37,7 +38,7 @@ async def get_releases( platform_id: typing.Optional[int] = None, status: typing.Optional[int] = None, package_name: typing.Optional[str] = None, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await r_crud.get_releases( db, @@ -52,7 +53,7 @@ async def get_releases( @public_router.get("/{release_id}/", response_model=release_schema.Release) async def get_release( release_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await r_crud.get_releases(db, release_id=release_id) @@ -60,7 +61,7 @@ async def get_release( @router.post("/new/", response_model=release_schema.Release) async def create_new_release( payload: release_schema.ReleaseCreate, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: models.User = Depends(get_current_user), ): release = await r_crud.create_release(db, user.id, payload) @@ -71,7 +72,7 @@ async def create_new_release( async def update_release( release_id: int, payload: release_schema.ReleaseUpdate, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: models.User = Depends(get_current_user), ): release = await r_crud.update_release(db, release_id, user.id, payload) @@ -84,18 +85,18 @@ async def update_release( ) async def commit_release( release_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: models.User = Depends(get_current_user), ): # it's ugly hack for updating release status before execution in background - async with db.begin(): - await db.execute( - update(models.Release) - .where( - models.Release.id == release_id, - ) - .values(status=ReleaseStatus.IN_PROGRESS) + await db.execute( + update(models.Release) + .where( + models.Release.id == release_id, ) + .values(status=ReleaseStatus.IN_PROGRESS) + ) + await db.flush() execute_release_plan.send(release_id, user.id) return {"message": "Release plan execution has been started"} @@ -106,18 +107,18 @@ async def commit_release( ) async def revert_db_release( release_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: models.User = Depends(get_current_user), ): # it's ugly hack for updating release status before execution in background - async with db.begin(): - await db.execute( - update(models.Release) - .where( - models.Release.id == release_id, - ) - .values(status=ReleaseStatus.IN_PROGRESS) + await db.execute( + update(models.Release) + .where( + models.Release.id == release_id, ) + .values(status=ReleaseStatus.IN_PROGRESS) + ) + await db.flush() revert_release.send(release_id, user.id) return {"message": "Release plan revert has been started"} @@ -128,7 +129,7 @@ async def revert_db_release( ) async def delete_release( release_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user: models.User = Depends(get_current_user), ): """ diff --git a/alws/routers/repositories.py b/alws/routers/repositories.py index 5f76ed300..4eecdb163 100644 --- a/alws/routers/repositories.py +++ b/alws/routers/repositories.py @@ -1,31 +1,37 @@ import typing from fastapi import APIRouter, Depends +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.auth import get_current_user from alws.crud import repository -from alws.dependencies import get_db -from alws.utils.exporter import fs_export_repository +from alws.dependencies import get_async_db_key from alws.schemas import repository_schema - +from alws.utils.exporter import fs_export_repository router = APIRouter( prefix='/repositories', tags=['repositories'], - dependencies=[Depends(get_current_user)] + dependencies=[Depends(get_current_user)], ) @router.get('/', response_model=typing.List[repository_schema.Repository]) -async def get_repositories(db: database.Session = Depends(get_db)): +async def get_repositories( + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await repository.get_repositories(db) -@router.get('/{repository_id}/', - response_model=typing.Union[None, repository_schema.Repository]) -async def get_repository(repository_id: int, - db: database.Session = Depends(get_db)): +@router.get( + '/{repository_id}/', + response_model=typing.Union[None, repository_schema.Repository], +) +async def get_repository( + repository_id: int, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): result = await repository.get_repositories(db, repository_id=repository_id) if result: return result[0] @@ -33,6 +39,8 @@ async def get_repository(repository_id: int, @router.post('/exports/', response_model=typing.List[str]) -async def filesystem_export_repository(repository_ids: typing.List[int], - db: database.Session = Depends(get_db)): +async def filesystem_export_repository( + repository_ids: typing.List[int], + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await fs_export_repository(repository_ids, db) diff --git a/alws/routers/roles.py b/alws/routers/roles.py index 6c2b45230..7357c678d 100644 --- a/alws/routers/roles.py +++ b/alws/routers/roles.py @@ -1,21 +1,21 @@ import typing from fastapi import APIRouter, Depends +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.auth import get_current_user from alws.crud import roles -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.schemas import role_schema - router = APIRouter( - prefix='/roles', - tags=['roles'], - dependencies=[Depends(get_current_user)] + prefix='/roles', tags=['roles'], dependencies=[Depends(get_current_user)] ) @router.get('/', response_model=typing.List[role_schema.Role]) -async def get_roles(db: database.Session = Depends(get_db)): +async def get_roles( + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await roles.get_roles(db) diff --git a/alws/routers/sign_key.py b/alws/routers/sign_key.py index 2b7cd5cab..28620524d 100644 --- a/alws/routers/sign_key.py +++ b/alws/routers/sign_key.py @@ -6,11 +6,12 @@ HTTPException, status, ) +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.auth import get_current_user from alws.crud import sign_key -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.errors import PlatformMissingError, SignKeyAlreadyExistsError from alws.schemas import sign_schema @@ -23,7 +24,7 @@ @router.get('/', response_model=typing.List[sign_schema.SignKey]) async def get_sign_keys( - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user=Depends(get_current_user), ): return await sign_key.get_sign_keys(db, user) @@ -35,7 +36,8 @@ async def get_sign_keys( status_code=status.HTTP_201_CREATED, ) async def create_sign_key( - payload: sign_schema.SignKeyCreate, db: database.Session = Depends(get_db) + payload: sign_schema.SignKeyCreate, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): try: return await sign_key.create_sign_key(db, payload) @@ -47,6 +49,6 @@ async def create_sign_key( async def modify_sign_key( sign_key_id: int, payload: sign_schema.SignKeyUpdate, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await sign_key.update_sign_key(db, sign_key_id, payload) diff --git a/alws/routers/sign_task.py b/alws/routers/sign_task.py index 9118ae209..0484bf278 100644 --- a/alws/routers/sign_task.py +++ b/alws/routers/sign_task.py @@ -4,12 +4,14 @@ import uuid from fastapi import APIRouter, Depends, WebSocket +from fastapi_sqla import AsyncSessionDependency from redis import asyncio as aioredis +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database, dramatiq +from alws import dramatiq from alws.auth import get_current_user from alws.crud import sign_task -from alws.dependencies import get_db, get_redis +from alws.dependencies import get_async_db_key, get_redis from alws.schemas import sign_schema router = APIRouter( @@ -26,7 +28,8 @@ @public_router.get('/', response_model=typing.List[sign_schema.SignTask]) async def get_sign_tasks( - build_id: int = None, db: database.Session = Depends(get_db) + build_id: int = None, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await sign_task.get_sign_tasks(db, build_id=build_id) @@ -34,7 +37,7 @@ async def get_sign_tasks( @router.post('/', response_model=sign_schema.SignTask) async def create_sign_task( payload: sign_schema.SignTaskCreate, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), user=Depends(get_current_user), ): return await sign_task.create_sign_task(db, payload, user.id) @@ -45,15 +48,13 @@ async def create_sign_task( response_model=typing.Union[dict, sign_schema.AvailableSignTask], ) async def get_available_sign_task( - payload: sign_schema.SignTaskGet, db: database.Session = Depends(get_db) + payload: sign_schema.SignTaskGet, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): result = await sign_task.get_available_sign_task(db, payload.key_ids) - if any( - [ - not result.get(item) - for item in ['build_id', 'id', 'keyid', 'packages'] - ] - ): + if any([ + not result.get(item) for item in ['build_id', 'id', 'keyid', 'packages'] + ]): return {} return result @@ -65,11 +66,11 @@ async def get_available_sign_task( async def complete_sign_task( sign_task_id: int, payload: sign_schema.SignTaskComplete, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): task = await sign_task.get_sign_task(db, sign_task_id) task.ts = datetime.datetime.utcnow() + datetime.timedelta(hours=2) - await db.commit() + await db.flush() dramatiq.sign_task.complete_sign_task.send( sign_task_id, payload.model_dump() ) @@ -130,7 +131,9 @@ async def iter_sync_sign_tasks( '/community/get_gen_sign_key_task/', response_model=typing.Union[dict, sign_schema.AvailableGenKeyTask], ) -async def get_avaiable_gen_key_task(db: database.Session = Depends(get_db)): +async def get_avaiable_gen_key_task( + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): gen_key_task = await sign_task.get_available_gen_key_task(db) if gen_key_task: return { @@ -150,7 +153,7 @@ async def get_avaiable_gen_key_task(db: database.Session = Depends(get_db)): async def complete_gen_key_task( gen_key_task_id: int, payload: sign_schema.GenKeyTaskComplete, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): sign_key = await sign_task.complete_gen_key_task( gen_key_task_id=gen_key_task_id, diff --git a/alws/routers/teams.py b/alws/routers/teams.py index 3c488d45e..6230435f2 100644 --- a/alws/routers/teams.py +++ b/alws/routers/teams.py @@ -6,19 +6,19 @@ HTTPException, status, ) +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.auth import get_current_superuser from alws.crud import teams -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.errors import TeamError from alws.schemas import team_schema - router = APIRouter( prefix='/teams', tags=['teams'], - dependencies=[Depends(get_current_superuser)] + dependencies=[Depends(get_current_superuser)], ) public_router = APIRouter( @@ -27,11 +27,15 @@ ) -@public_router.get('/', response_model=typing.Union[ - typing.List[team_schema.Team], team_schema.TeamResponse]) +@public_router.get( + '/', + response_model=typing.Union[ + typing.List[team_schema.Team], team_schema.TeamResponse + ], +) async def get_teams( pageNumber: int = None, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await teams.get_teams(db, page_number=pageNumber) @@ -39,7 +43,7 @@ async def get_teams( @public_router.get('/{team_id}/', response_model=team_schema.Team) async def get_team( team_id: int, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await teams.get_teams(db, team_id=team_id) @@ -48,7 +52,7 @@ async def get_team( async def add_members( team_id: int, payload: team_schema.TeamMembersUpdate, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): try: db_team = await teams.update_members(db, payload, team_id, 'add') @@ -64,7 +68,7 @@ async def add_members( async def remove_members( team_id: int, payload: team_schema.TeamMembersUpdate, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): try: db_team = await teams.update_members(db, payload, team_id, 'remove') @@ -79,7 +83,7 @@ async def remove_members( @router.post('/create/', response_model=team_schema.Team) async def create_team( payload: team_schema.TeamCreate, - db: database.Session = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): try: db_team = await teams.create_team(db, payload) @@ -94,7 +98,7 @@ async def create_team( @router.delete('/{team_id}/remove/', status_code=status.HTTP_202_ACCEPTED) async def remove_team( team_id: int, - db: database.Session = Depends(get_db) + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): try: await teams.remove_team(db, team_id) diff --git a/alws/routers/test_repositories.py b/alws/routers/test_repositories.py index 20789d78c..32361309a 100644 --- a/alws/routers/test_repositories.py +++ b/alws/routers/test_repositories.py @@ -1,11 +1,12 @@ import typing from fastapi import APIRouter, Depends, HTTPException, status +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws.auth import get_current_user from alws.crud import test_repository -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.errors import DataNotFoundError, TestRepositoryError from alws.schemas import test_repository_schema @@ -26,7 +27,9 @@ async def get_repositories( pageNumber: typing.Optional[int] = None, name: typing.Optional[str] = None, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): return await test_repository.get_repositories( session, @@ -41,7 +44,9 @@ async def get_repositories( ) async def get_repository( repository_id: int, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): return await test_repository.get_repositories( session, @@ -52,7 +57,9 @@ async def get_repository( @router.post('/create/', response_model=test_repository_schema.TestRepository) async def create_repository( payload: test_repository_schema.TestRepositoryCreate, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): try: db_repo = await test_repository.create_repository(session, payload) @@ -75,7 +82,9 @@ async def create_repository( async def update_test_repository( repository_id: int, payload: test_repository_schema.TestRepositoryUpdate, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): try: await test_repository.update_repository( @@ -96,7 +105,9 @@ async def update_test_repository( ) async def remove_test_repository( repository_id: int, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): try: await test_repository.delete_repository(session, repository_id) @@ -114,7 +125,9 @@ async def remove_test_repository( async def create_package_mapping( repository_id: int, payload: test_repository_schema.PackageTestRepositoryCreate, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): try: await test_repository.create_package_mapping( @@ -136,7 +149,9 @@ async def create_package_mapping( async def bulk_create_package_mapping( repository_id: int, payload: typing.List[test_repository_schema.PackageTestRepositoryCreate], - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): try: await test_repository.bulk_create_package_mapping( @@ -157,7 +172,9 @@ async def bulk_create_package_mapping( ) async def remove_package_mapping( package_id: int, - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): try: await test_repository.delete_package_mapping(session, package_id) @@ -175,7 +192,9 @@ async def remove_package_mapping( async def bulk_delete_package_mapping( repository_id: int, package_ids: typing.List[int], - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): await test_repository.bulk_delete_package_mapping( session=session, diff --git a/alws/routers/tests.py b/alws/routers/tests.py index bc7a72b3d..98bb30aab 100644 --- a/alws/routers/tests.py +++ b/alws/routers/tests.py @@ -1,12 +1,13 @@ from typing import List from fastapi import APIRouter, Depends +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws import dramatiq from alws.auth import get_current_user from alws.crud import test -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.schemas import test_schema router = APIRouter( @@ -34,14 +35,18 @@ async def update_test_task_result( '/get_test_tasks/', response_model=List[test_schema.TestTaskPayload], ) -async def get_test_tasks(session: AsyncSession = Depends(get_db)): +async def get_test_tasks( + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), +): return await test.get_available_test_tasks(session) @router.put('/build/{build_id}/restart') async def restart_build_tests( build_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): await test.restart_build_tests(db, build_id) return {'ok': True} @@ -50,7 +55,7 @@ async def restart_build_tests( @router.put('/build_task/{build_task_id}/restart') async def restart_build_task_tests( build_task_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): await test.restart_build_task_tests(db, build_task_id) return {'ok': True} @@ -59,7 +64,7 @@ async def restart_build_task_tests( @router.put('/build/{build_id}/cancel') async def cancel_build_tests( build_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): await test.cancel_build_tests(db, build_id) return {'ok': True} @@ -71,7 +76,7 @@ async def cancel_build_tests( ) async def get_latest_test_results( build_task_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await test.get_test_tasks_by_build_task(db, build_task_id) @@ -82,7 +87,7 @@ async def get_latest_test_results( ) async def get_test_logs( build_task_id: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await test.get_test_logs(build_task_id, db) @@ -94,7 +99,7 @@ async def get_test_logs( async def get_latest_test_results_by_revision( build_task_id: int, revision: int, - db: AsyncSession = Depends(get_db), + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), ): return await test.get_test_tasks_by_build_task( db, diff --git a/alws/routers/uploads.py b/alws/routers/uploads.py index 4bc71416e..3765c5e08 100644 --- a/alws/routers/uploads.py +++ b/alws/routers/uploads.py @@ -1,10 +1,11 @@ import typing from fastapi import APIRouter, Depends, Form, UploadFile +from fastapi_sqla import AsyncSessionDependency from sqlalchemy.ext.asyncio import AsyncSession from alws.auth import get_current_user -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.utils.uploader import MetadataUploader router = APIRouter( @@ -19,7 +20,9 @@ async def upload_repometada( modules: typing.Optional[UploadFile] = None, comps: typing.Optional[UploadFile] = None, repository: str = Form(...), - session: AsyncSession = Depends(get_db), + session: AsyncSession = Depends( + AsyncSessionDependency(key=get_async_db_key()) + ), ): msg = "" uploader = MetadataUploader(session, repository) diff --git a/alws/routers/users.py b/alws/routers/users.py index b802f8a9b..bc2ed3ee1 100644 --- a/alws/routers/users.py +++ b/alws/routers/users.py @@ -6,15 +6,15 @@ HTTPException, status, ) +from fastapi_sqla import AsyncSessionDependency +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.auth import get_current_superuser, get_current_user from alws.crud import user as user_crud -from alws.dependencies import get_db -from alws.errors import UserError, PermissionDenied -from alws.schemas import user_schema, role_schema +from alws.dependencies import get_async_db_key +from alws.errors import PermissionDenied, UserError from alws.models import User - +from alws.schemas import role_schema, user_schema router = APIRouter( prefix='/users', @@ -26,21 +26,22 @@ '/all_users', response_model=typing.List[user_schema.User], ) -async def get_all_users(db: database.Session = Depends(get_db)): +async def get_all_users( + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), +): return await user_crud.get_all_users(db) -@router.put( - '/{user_id}', - response_model=user_schema.UserOpResult - ) -async def modify_user(user_id: int, payload: user_schema.UserUpdate, - db: database.Session = Depends(get_db), - _=Depends(get_current_superuser) - ) -> user_schema.UserOpResult: +@router.put('/{user_id}', response_model=user_schema.UserOpResult) +async def modify_user( + user_id: int, + payload: user_schema.UserUpdate, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), + _=Depends(get_current_superuser), +) -> user_schema.UserOpResult: try: - await user_crud.update_user(db, user_id, payload) - return user_schema.UserOpResult(success=True) + await user_crud.update_user(db, user_id, payload) + return user_schema.UserOpResult(success=True) except UserError as err: raise HTTPException( detail=str(err), @@ -49,14 +50,17 @@ async def modify_user(user_id: int, payload: user_schema.UserUpdate, @router.delete('/{user_id}/remove') -async def remove_user(user_id: int, db: database.Session = Depends(get_db), - _=Depends(get_current_superuser) - ) -> user_schema.UserOpResult: +async def remove_user( + user_id: int, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), + _=Depends(get_current_superuser), +) -> user_schema.UserOpResult: try: await user_crud.remove_user(user_id, db) return user_schema.UserOpResult( success=True, - message=f'User with id {user_id} has been queued for removal') + message=f'User with id {user_id} has been queued for removal', + ) except UserError as err: raise HTTPException( detail=str(err), @@ -64,63 +68,61 @@ async def remove_user(user_id: int, db: database.Session = Depends(get_db), ) -@router.get( - '/{user_id}/roles', - response_model=typing.List[role_schema.Role] -) -async def get_user_roles(user_id: int, - db: database.Session = Depends(get_db), - _=Depends(get_current_user) - ): +@router.get('/{user_id}/roles', response_model=typing.List[role_schema.Role]) +async def get_user_roles( + user_id: int, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), + _=Depends(get_current_user), +): return await user_crud.get_user_roles(db, user_id) -@router.patch( - '/{user_id}/roles/add', - response_model=user_schema.UserOpResult - ) -async def add_roles(user_id: int, roles_ids: typing.List[int], - db: database.Session = Depends(get_db), - current_user: User = Depends(get_current_user) - ) -> user_schema.UserOpResult: +@router.patch('/{user_id}/roles/add', response_model=user_schema.UserOpResult) +async def add_roles( + user_id: int, + roles_ids: typing.List[int], + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), + current_user: User = Depends(get_current_user), +) -> user_schema.UserOpResult: try: await user_crud.add_roles(db, user_id, roles_ids, current_user.id) return user_schema.UserOpResult( success=True, - message=f'Successfully added roles {roles_ids} to {user_id}') + message=f'Successfully added roles {roles_ids} to {user_id}', + ) except (PermissionDenied, Exception) as exc: raise HTTPException( - detail=str(exc), - status_code=status.HTTP_400_BAD_REQUEST + detail=str(exc), status_code=status.HTTP_400_BAD_REQUEST ) @router.patch( - '/{user_id}/roles/remove', - response_model=user_schema.UserOpResult - ) -async def remove_roles(user_id: int, roles_ids: typing.List[int], - db: database.Session = Depends(get_db), - current_user: User = Depends(get_current_user) - ) -> user_schema.UserOpResult: + '/{user_id}/roles/remove', response_model=user_schema.UserOpResult +) +async def remove_roles( + user_id: int, + roles_ids: typing.List[int], + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), + current_user: User = Depends(get_current_user), +) -> user_schema.UserOpResult: try: await user_crud.remove_roles(db, user_id, roles_ids, current_user.id) return user_schema.UserOpResult( success=True, - message=f'Successfully removed roles {roles_ids} from {user_id}') + message=f'Successfully removed roles {roles_ids} from {user_id}', + ) except (PermissionDenied, Exception) as exc: raise HTTPException( - detail=str(exc), - status_code=status.HTTP_400_BAD_REQUEST + detail=str(exc), status_code=status.HTTP_400_BAD_REQUEST ) @router.get( - '/{user_id}/teams', - response_model=typing.List[user_schema.UserTeam] + '/{user_id}/teams', response_model=typing.List[user_schema.UserTeam] ) -async def get_user_teams(user_id: int, - db: database.Session = Depends(get_db), - _=Depends(get_current_user) - ): +async def get_user_teams( + user_id: int, + db: AsyncSession = Depends(AsyncSessionDependency(key=get_async_db_key())), + _=Depends(get_current_user), +): return await user_crud.get_user_teams(db, user_id) diff --git a/alws/utils/exporter.py b/alws/utils/exporter.py index 57c62f499..301872e89 100644 --- a/alws/utils/exporter.py +++ b/alws/utils/exporter.py @@ -1,22 +1,25 @@ import os import typing -import aiohttp -import aiofiles import urllib from pathlib import Path +import aiofiles +import aiohttp from lxml.html import document_fromstring +from sqlalchemy.ext.asyncio import AsyncSession -from alws import database from alws.crud import repo_exporter -async def fs_export_repository(repository_ids: typing.List[int], - db: database.Session): +async def fs_export_repository( + repository_ids: typing.List[int], db: AsyncSession +): export_task = await repo_exporter.create_pulp_exporters_to_fs( - db, repository_ids) + db, repository_ids + ) export_data = await repo_exporter.execute_pulp_exporters_to_fs( - db, export_task) + db, export_task + ) export_paths = list(export_data.keys()) for repo_elem, repo_data in export_data.items(): repo_url = urllib.parse.urljoin(repo_data, 'repodata/') @@ -34,8 +37,7 @@ async def get_repodata_file_links(base_url: str): response.raise_for_status() content = await response.text() doc = document_fromstring(content) - children_urls = [base_url + a.get('href') - for a in doc.xpath('//a')] + children_urls = [base_url + a.get('href') for a in doc.xpath('//a')] return children_urls diff --git a/alws/utils/fastapi_sqla_setup.py b/alws/utils/fastapi_sqla_setup.py new file mode 100644 index 000000000..883a30c5b --- /dev/null +++ b/alws/utils/fastapi_sqla_setup.py @@ -0,0 +1,25 @@ +from fastapi import FastAPI +from fastapi_sqla import setup +from fastapi_sqla.async_sqla import startup as async_startup +from fastapi_sqla.sqla import _DEFAULT_SESSION_KEY, startup + +app = FastAPI() +setup(app) + +sync_keys = ['pulp', _DEFAULT_SESSION_KEY] +async_keys = ['async', 'pulp_async'] + + +async def setup_all(): + sync_setup() + await async_setup() + + +async def async_setup(): + for key in async_keys: + await async_startup(key) + + +def sync_setup(): + for key in sync_keys: + startup(key) diff --git a/alws/utils/pulp_utils.py b/alws/utils/pulp_utils.py index dc5eba0b9..c86b7ad47 100644 --- a/alws/utils/pulp_utils.py +++ b/alws/utils/pulp_utils.py @@ -1,11 +1,11 @@ import typing import uuid +from fastapi_sqla import open_session from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload, load_only -from alws.database import PulpAsyncSession -from alws.dependencies import get_pulp_db from alws.models import RpmModule from alws.pulp_models import ( CoreArtifact, @@ -35,10 +35,8 @@ def get_rpm_module_packages_from_repository( pkg_epochs: typing.Optional[typing.List[str]] = None, ) -> typing.List[RpmPackage]: result = [] - repo_query = select(CoreRepository).where( - CoreRepository.pulp_id == repo_id - ) - with get_pulp_db() as pulp_db: + repo_query = select(CoreRepository).where(CoreRepository.pulp_id == repo_id) + with open_session(key="pulp") as pulp_db: repo = pulp_db.execute(repo_query).scalars().first() repo_name = repo.name @@ -118,7 +116,7 @@ def get_rpm_module_packages_from_repository( ]) query = select(RpmPackage).where(*conditions) - with get_pulp_db() as pulp_db: + with open_session(key="pulp") as pulp_db: result = pulp_db.execute(query).scalars().all() return result @@ -159,7 +157,7 @@ def get_rpm_packages_from_repositories( ) ) ) - with get_pulp_db() as pulp_db: + with open_session(key="pulp") as pulp_db: return pulp_db.execute(query).scalars().unique().all() @@ -205,7 +203,7 @@ def get_rpm_packages_from_repository( conditions.append(RpmPackage.arch.in_(pkg_arches)) query = select(RpmPackage).where(*conditions) - with get_pulp_db() as pulp_db: + with open_session(key="pulp") as pulp_db: return pulp_db.execute(query).scalars().all() @@ -214,7 +212,8 @@ def get_rpm_packages_by_ids( pkg_fields: typing.List[typing.Any], ) -> typing.Dict[str, RpmPackage]: result = {} - with get_pulp_db() as pulp_db: + with open_session(key="pulp") as pulp_db: + pulp_db.expire_on_commit = False pulp_pkgs = ( pulp_db.execute( select(RpmPackage) @@ -241,7 +240,7 @@ def get_rpm_packages_by_checksums( pkg_checksums: typing.List[str], ) -> typing.Dict[str, RpmPackage]: result = {} - with get_pulp_db() as pulp_db: + with open_session(key="pulp") as pulp_db: pulp_pkgs = ( pulp_db.execute( select(RpmPackage) @@ -265,7 +264,7 @@ def get_rpm_packages_by_checksums( async def get_module_from_pulp_db( - pulp_db: PulpAsyncSession, + pulp_db: AsyncSession, module: RpmModule, ) -> typing.Optional[RpmModulemd]: return ( diff --git a/alws/utils/uploader.py b/alws/utils/uploader.py index 78eb84bb2..0a9c8786d 100644 --- a/alws/utils/uploader.py +++ b/alws/utils/uploader.py @@ -8,13 +8,13 @@ from aiohttp.client_exceptions import ClientResponseError from fastapi import UploadFile, status +from fastapi_sqla import open_session from sqlalchemy import select, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload from alws import models from alws.config import settings -from alws.dependencies import get_pulp_db from alws.errors import UploadError from alws.pulp_models import CoreContent, RpmModulemd, RpmModulemdDefaults from alws.utils.modularity import IndexWrapper @@ -75,7 +75,7 @@ async def upload_modules( module_hrefs = [] defaults_hrefs = [] _index = IndexWrapper.from_template(module_content) - with get_pulp_db() as pulp_session: + with open_session('pulp') as pulp_session: for module in _index.iter_modules(): defaults_snippet = _index.get_module_defaults_as_str( module.name @@ -216,10 +216,9 @@ async def upload_modules( ) if href: defaults_hrefs.append(href) - pulp_session.commit() if db_modules and not dry_run: self.session.add_all(db_modules) - await self.session.commit() + await self.session.flush() # we need to update module if we update template in build repo re_result = re.search( # AlmaLinux-8-s390x-0000-debug-br @@ -248,7 +247,7 @@ async def upload_modules( for task in build_tasks: task.rpm_modules = db_modules self.session.add(task) - await self.session.commit() + await self.session.flush() final_additions = module_hrefs.copy() if defaults_hrefs: diff --git a/pyproject.toml b/pyproject.toml index 01d087d59..1f2ab3dc2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ profile = "black" split_on_trailing_comma = true [tool.black] -line-length = 88 +line-length = 80 skip-string-normalization = true # see https://black.readthedocs.io/en/stable/the_black_code_style/future_style.html#preview-style preview = true @@ -11,7 +11,7 @@ enable-unstable-feature = ["hug_parens_with_braces_and_square_brackets"] extend-exclude = 'alws/alembic' [tool.pylint] -max-line-length = 88 +max-line-length = 80 # Minimum line length for functions/classes that require docstrings docstring-min-length = 50 diff --git a/requirements.txt b/requirements.txt index 352e29330..0d438b42a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,6 +10,9 @@ asyncpg==0.29.0 dramatiq[rabbitmq, watch]==1.17.0 email-validator>=2.0 # Can be updated only after migration to SQLAlchemy > 2.0 +# these 2 requirements are for fastapi-sqla +fastapi-sqla==3.1.2 +structlog==24.1.0 fastapi-users-db-sqlalchemy==6.0.1 fastapi-users[all]==13.0.0 fastapi==0.111.0 diff --git a/scripts/add_release_status_errata.py b/scripts/add_release_status_errata.py index c4074ad6a..bc9f9b3be 100644 --- a/scripts/add_release_status_errata.py +++ b/scripts/add_release_status_errata.py @@ -1,18 +1,17 @@ import logging -import typing - import os import sys +import typing sys.path.append(os.path.dirname(os.path.dirname(__file__))) +from fastapi_sqla import open_session from sqlalchemy import select from alws.constants import ErrataReleaseStatus -from alws.database import PulpSession, SyncSession from alws.models import ErrataRecord from alws.pulp_models import UpdateRecord - +from alws.utils.fastapi_sqla_setup import sync_setup logging.basicConfig( level=logging.INFO, @@ -24,19 +23,23 @@ def main(): logging.info("Start checking release status for ALBS errata records") - with PulpSession() as pulp_db, SyncSession() as albs_db, albs_db.begin(): + sync_setup() + with open_session(key="pulp") as pulp_db, open_session() as albs_db: pulp_records: typing.List[UpdateRecord.id] = ( pulp_db.execute(select(UpdateRecord.id)).scalars().all() ) - for albs_record in albs_db.execute(select(ErrataRecord)).scalars().all(): + for albs_record in ( + albs_db.execute(select(ErrataRecord)).scalars().all() + ): albs_record: ErrataRecord if albs_record.id not in pulp_records: albs_record.release_status = ErrataReleaseStatus.NOT_RELEASED - logging.info("Record %s marked as 'not released'", albs_record.id) + logging.info( + "Record %s marked as 'not released'", albs_record.id + ) continue albs_record.release_status = ErrataReleaseStatus.RELEASED logging.info("Record %s marked as 'released'", albs_record.id) - albs_db.commit() logging.info("Checking ALBS errata records is complete") diff --git a/scripts/add_teams_to_community_products.py b/scripts/add_teams_to_community_products.py index 870a56b8f..32599c401 100644 --- a/scripts/add_teams_to_community_products.py +++ b/scripts/add_teams_to_community_products.py @@ -2,25 +2,40 @@ import os import sys +from fastapi_sqla import open_async_session from sqlalchemy.future import select from sqlalchemy.orm import selectinload sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws import database, models +from alws import models from alws.crud.teams import create_team, create_team_roles +from alws.dependencies import get_async_db_key from alws.schemas.team_schema import TeamCreate +from alws.utils.fastapi_sqla_setup import setup_all async def main(): - async with database.Session() as session, session.begin(): - products = (await session.execute(select(models.Product).where( - models.Product.is_community.is_(True)).options( - selectinload(models.Product.team).selectinload( - models.Team.roles), - selectinload(models.Product.owner).selectinload( - models.User.roles), - ))).scalars().all() + await setup_all() + async with open_async_session(get_async_db_key()) as session: + products = ( + ( + await session.execute( + select(models.Product) + .where(models.Product.is_community.is_(True)) + .options( + selectinload(models.Product.team).selectinload( + models.Team.roles + ), + selectinload(models.Product.owner).selectinload( + models.User.roles + ), + ) + ) + ) + .scalars() + .all() + ) add_items = [] for product in products: print(f'Processing product "{product.name}"') @@ -39,7 +54,6 @@ async def main(): print('Team is created successfully') session.add_all(add_items) - await session.commit() if __name__ == '__main__': diff --git a/scripts/albs-1003-fixes.py b/scripts/albs-1003-fixes.py index c6c4049eb..69b974880 100644 --- a/scripts/albs-1003-fixes.py +++ b/scripts/albs-1003-fixes.py @@ -3,16 +3,18 @@ import logging import os import sys -from contextlib import asynccontextmanager from sqlalchemy import select sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws.dependencies import get_db, get_pulp_db +from fastapi_sqla import open_async_session, open_session + +from alws.dependencies import get_async_db_key from alws.models import ErrataRecord from alws.pulp_models import UpdateRecord from alws.utils.errata import debrand_description_and_title +from alws.utils.fastapi_sqla_setup import setup_all logging.basicConfig( format="%(asctime)s %(levelname)-8s %(message)s", @@ -27,7 +29,8 @@ async def main(): affected_updateinfos = {} - async with asynccontextmanager(get_db)() as session, session.begin(): + await setup_all() + async with open_async_session(get_async_db_key()) as session: records = await session.execute( select(ErrataRecord).where( ErrataRecord.original_description.like("%[rhel%") @@ -39,9 +42,9 @@ async def main(): ) record.original_description = debranded_description affected_updateinfos[record.id] = debranded_description - await session.commit() - with get_pulp_db() as pulp_session: - records = session.execute( + + with open_session(key="pulp") as pulp_session: + records = pulp_session.execute( select(UpdateRecord).where( UpdateRecord.id.in_(list(affected_errata_ids)), ) @@ -52,7 +55,6 @@ async def main(): ) record.description = affected_updateinfos[record.id] session.flush() - session.commit() if __name__ == "__main__": diff --git a/scripts/albs-1147.py b/scripts/albs-1147.py index 4c926fc36..075c99cb6 100644 --- a/scripts/albs-1147.py +++ b/scripts/albs-1147.py @@ -6,14 +6,13 @@ import os import sys -from contextlib import asynccontextmanager - -from sqlalchemy import select, or_, not_, distinct +from fastapi_sqla import open_async_session, open_session +from sqlalchemy import distinct, not_, or_, select from alws.config import settings -from alws.dependencies import get_pulp_db, get_db -from alws.utils.errata import debrand_description_and_title +from alws.dependencies import get_async_db_key from alws.utils import pulp_client +from alws.utils.errata import debrand_description_and_title sys.path.append(os.path.dirname(os.path.dirname(__file__))) @@ -22,6 +21,7 @@ CoreRepositoryContent, UpdateRecord, ) +from alws.utils.fastapi_sqla_setup import setup_all logging.basicConfig( format="%(message)s", @@ -48,15 +48,23 @@ def log_differences(original: str, new: str): start_context = max(0, i1 - 3) end_context = min(len(original_words), i2 + 3) - original_context = ' '.join(original_words[start_context:i1]) \ - + ' 【' + original_change + '】 ' \ - + ' '.join(original_words[i2:end_context]) + original_context = ( + ' '.join(original_words[start_context:i1]) + + ' 【' + + original_change + + '】 ' + + ' '.join(original_words[i2:end_context]) + ) start_context = max(0, j1 - 3) end_context = min(len(new_words), j2 + 3) - new_context = ' '.join(new_words[start_context:j1]) \ - + ' 【' + new_change + '】 ' \ - + ' '.join(new_words[j2:end_context]) + new_context = ( + ' '.join(new_words[start_context:j1]) + + ' 【' + + new_change + + '】 ' + + ' '.join(new_words[j2:end_context]) + ) log.info(f'... {original_context} ➔ {new_context}') @@ -78,13 +86,9 @@ async def main(write=False): 'lorax-templates-rhel', 'redhat-rpm-config', 'kmod-redhat-oracleasm', - 'rhel8 stream' - ] - search_parts = [ - 'rhel', - 'red hat', - 'redhat' + 'rhel8 stream', ] + search_parts = ['rhel', 'red hat', 'redhat'] affected_records = {} affected_records_content_ids = [] repos_to_publicate = set() @@ -92,29 +96,46 @@ async def main(write=False): pulp = pulp_client.PulpClient( settings.pulp_host, settings.pulp_user, settings.pulp_password ) + await setup_all() - with get_pulp_db() as session: + with open_session(key="pulp") as session: result = session.execute( select(UpdateRecord).where( or_( - *[UpdateRecord.description.ilike(f'%{part}%') for part in search_parts], - *[UpdateRecord.title.ilike(f'%{part}%') for part in search_parts], + *[ + UpdateRecord.description.ilike(f'%{part}%') + for part in search_parts + ], + *[ + UpdateRecord.title.ilike(f'%{part}%') + for part in search_parts + ], ), not_( or_( - *[UpdateRecord.description.ilike(f'%{part}%') for part in ignore_parts], - *[UpdateRecord.title.ilike(f'%{part}%') for part in ignore_parts], + *[ + UpdateRecord.description.ilike(f'%{part}%') + for part in ignore_parts + ], + *[ + UpdateRecord.title.ilike(f'%{part}%') + for part in ignore_parts + ], ) - ) + ), ) ) records = result.scalars().all() - log.info(f'Found {len(records)} records in Pulp\'s \'rpm_updaterecord\' table.') + log.info( + f'Found {len(records)} records in Pulp\'s \'rpm_updaterecord\' table.' + ) for record in records: log.info(f'{record.id} - {record.title}') debranded_title = debrand_description_and_title(record.title) - debranded_description = debrand_description_and_title(record.description) + debranded_description = debrand_description_and_title( + record.description + ) log_differences(record.title, debranded_title) log_differences(record.description, debranded_description) record.title = debranded_title @@ -125,16 +146,13 @@ async def main(write=False): affected_records[record.id] = { 'title': record.title, 'description': record.description, - 'content_ptr_id': record.content_ptr_id + 'content_ptr_id': record.content_ptr_id, } affected_records_content_ids.append(record.content_ptr_id) - query = ( - select(distinct(CoreRepositoryContent.repository_id)) - .where( - CoreRepositoryContent.version_removed_id.is_(None), - CoreRepositoryContent.content_id.in_(affected_records_content_ids) - ) + query = select(distinct(CoreRepositoryContent.repository_id)).where( + CoreRepositoryContent.version_removed_id.is_(None), + CoreRepositoryContent.content_id.in_(affected_records_content_ids), ) repos = session.execute(query).scalars().all() for repo in repos: @@ -143,17 +161,19 @@ async def main(write=False): if write: session.add_all(records) - session.commit() + session.flush() publication_tasks = [] for repo_href in repos_to_publicate: publication_tasks.append(pulp.create_rpm_publication(repo_href)) log.info("Publishing updated repos. This may take a while") await asyncio.gather(*publication_tasks) - log.info(f'{os.linesep * 2}Looking for records in almalinux\'s ' - f'\'errata_records\' table...') + log.info( + f'{os.linesep * 2}Looking for records in almalinux\'s ' + f'\'errata_records\' table...' + ) - async with asynccontextmanager(get_db)() as session, session.begin(): + async with open_async_session(get_async_db_key()) as session: result = await session.execute( select(ErrataRecord).where( ErrataRecord.id.in_(list(affected_records.keys())) @@ -164,28 +184,41 @@ async def main(write=False): for record in records: log.info(f'{record.id} - {record.title or record.original_title}') - log_differences(record.original_title, affected_records[record.id]['title']) - log_differences(record.original_description, affected_records[record.id]['description']) + log_differences( + record.original_title, affected_records[record.id]['title'] + ) + log_differences( + record.original_description, + affected_records[record.id]['description'], + ) record.original_title = affected_records[record.id]['title'] - record.original_description = affected_records[record.id]['description'] + record.original_description = affected_records[record.id][ + 'description' + ] record.updated_date = datetime.datetime.utcnow() if write: session.add_all(records) - await session.commit() + await session.flush() def confirm(): - confirmation = input(f"WARNING: Are you sure you want to write changes? {os.linesep}" - "This may cause issues if you haven't run a dry check. (y/N): ") + confirmation = input( + f"WARNING: Are you sure you want to write changes? {os.linesep}" + "This may cause issues if you haven't run a dry check. (y/N): " + ) if confirmation.lower() != 'y': print("Write operation cancelled.") exit(1) if __name__ == "__main__": - parser = argparse.ArgumentParser(description='This script fixes errata branding issues from Pulp\'s perspective') - parser.add_argument('--write', action='store_true', help='Allow write changes to database') + parser = argparse.ArgumentParser( + description='This script fixes errata branding issues from Pulp\'s perspective' + ) + parser.add_argument( + '--write', action='store_true', help='Allow write changes to database' + ) args = parser.parse_args() if args.write: confirm() diff --git a/scripts/albs-682-fixes.py b/scripts/albs-682-fixes.py index c3aa5e002..100592053 100644 --- a/scripts/albs-682-fixes.py +++ b/scripts/albs-682-fixes.py @@ -20,25 +20,24 @@ # import asyncio import logging +import os +import re +import sys import typing +import urllib.parse import yaml -from sqlalchemy import select, delete +from fastapi_sqla import open_session +from sqlalchemy import delete, select from sqlalchemy.orm import selectinload -import sys -import os -import re -import urllib.parse - sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws.utils import pulp_client -from alws.database import PulpSession, SyncSession from alws.config import settings -from alws.pulp_models import UpdateRecord, UpdatePackage from alws.models import ErrataRecord, Platform, Repository - +from alws.pulp_models import UpdatePackage, UpdateRecord +from alws.utils import pulp_client +from alws.utils.fastapi_sqla_setup import setup_all logging.basicConfig( format="%(asctime)s %(levelname)-8s %(message)s", @@ -55,7 +54,7 @@ async def prepare_albs_errata_cache(): logging.info("Collecting errata records from ALBS DB") - with SyncSession() as albs_db: + with open_session() as albs_db: albs_records = albs_db.execute( select(ErrataRecord).options(selectinload(ErrataRecord.platform)) ) @@ -101,7 +100,7 @@ async def delete_unmatched_packages(): pulp = pulp_client.PulpClient( settings.pulp_host, settings.pulp_user, settings.pulp_password ) - with PulpSession() as pulp_db, SyncSession() as albs_db, pulp_db.begin(): + with open_session(key="pulp") as pulp_db, open_session() as albs_db: albs_records = albs_db.execute( select(ErrataRecord) # we shouldn't delete unmatched packages @@ -133,14 +132,15 @@ async def delete_unmatched_packages(): ) ) pulp_db.flush() - pulp_db.commit() async def delete_pulp_advisory(pulp_href_id): - with PulpSession() as pulp_db, pulp_db.begin(): + with open_session(key="pulp") as pulp_db: pulp_record = ( pulp_db.execute( - select(UpdateRecord).where(UpdateRecord.content_ptr_id == pulp_href_id) + select(UpdateRecord).where( + UpdateRecord.content_ptr_id == pulp_href_id + ) ) .scalars() .first() @@ -173,7 +173,8 @@ async def get_repo_latest_version( repo_version = await pulp.get_repo_latest_version(repo.pulp_href) except Exception: logging.exception( - "Cannot get latest repo_version for %s", f"{repo.name}-{repo.arch}" + "Cannot get latest repo_version for %s", + f"{repo.name}-{repo.arch}", ) return repo, repo_version @@ -210,7 +211,7 @@ def update_result(response: typing.List[dict]): break return result - with SyncSession() as albs_db: + with open_session() as albs_db: db_platforms: typing.List[Platform] = ( albs_db.execute( select(Platform) @@ -228,9 +229,11 @@ def update_result(response: typing.List[dict]): platform = "AlmaLinux-9" opposite_platform = "AlmaLinux-8" - latest_repo_versions = await asyncio.gather( - *(get_repo_latest_version(repo) for repo in db_platform.repos if repo) - ) + latest_repo_versions = await asyncio.gather(*( + get_repo_latest_version(repo) + for repo in db_platform.repos + if repo + )) for repo, repo_href in latest_repo_versions: if not repo or not repo_href: continue @@ -255,7 +258,7 @@ def update_result(response: typing.List[dict]): async def delete_wrong_packages(): logging.info("Deleting wrong packages from advisories") - with PulpSession() as pulp_db, SyncSession() as albs_db, pulp_db.begin(): + with open_session(key="pulp") as pulp_db, open_session() as albs_db: albs_records = albs_db.execute(select(ErrataRecord)) for albs_record in albs_records.scalars().all(): distr_version = albs_record.platform.distr_version @@ -265,7 +268,9 @@ async def delete_wrong_packages(): pulp_records = ( pulp_db.execute( - select(UpdateRecord).where(UpdateRecord.id == albs_record.id) + select(UpdateRecord).where( + UpdateRecord.id == albs_record.id + ) ) .scalars() .all() @@ -283,23 +288,21 @@ async def delete_wrong_packages(): f"albs_record_release '{albs_record_release}'" ) pulp_db.delete(pkg) - pulp_db.commit() async def delete_packages_prefix(): logging.info("Removing 'Packages/' prefix from filenames") - with PulpSession() as pulp_db, pulp_db.begin(): + with open_session(key="pulp") as pulp_db: advisory_pkgs = pulp_db.execute(select(UpdatePackage)).scalars().all() for pkg in advisory_pkgs: if pkg.filename.startswith("Packages/"): logging.info( - "Removing 'Packages/' prefix from %s filename", pkg.filename + "Removing 'Packages/' prefix from %s filename", + pkg.filename, ) pkg.filename = pkg.filename.replace("Packages/", "") - pulp_db.commit() - async def update_pulp_repo(repo, pulp): repo_name = repo["name"] + "-" + repo["arch"] @@ -316,7 +319,9 @@ async def update_pulp_repos(): pulp = pulp_client.PulpClient( settings.pulp_host, settings.pulp_user, settings.pulp_password ) - platforms = yaml.safe_load(open("reference_data/platforms.yaml", "r").read()) + platforms = yaml.safe_load( + open("reference_data/platforms.yaml", "r").read() + ) tasks = [] for platform in platforms: for repo in platform["repositories"]: @@ -327,6 +332,7 @@ async def update_pulp_repos(): async def main(): + await setup_all() pulp_client.PULP_SEMAPHORE = asyncio.Semaphore(10) await prepare_albs_errata_cache() diff --git a/scripts/albs-952_fill_errata_module.py b/scripts/albs-952_fill_errata_module.py index 4f37838d5..db8254cb6 100644 --- a/scripts/albs-952_fill_errata_module.py +++ b/scripts/albs-952_fill_errata_module.py @@ -4,26 +4,31 @@ import sys from contextlib import asynccontextmanager +from fastapi_sqla import open_async_session from sqlalchemy.future import select + sys.path.append(os.path.dirname(os.path.dirname(__file__))) from alws import models -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key +from alws.utils.fastapi_sqla_setup import setup_all + async def main(): module_regex = re.compile('Module ([\d\w\-\_]+:[\d\.\w]+) is enabled') updated_records = [] - async with asynccontextmanager(get_db)() as db, db.begin(): - errata_records = (await db.execute( - select(models.ErrataRecord))).scalars().all() + await setup_all() + async with open_async_session(key=get_async_db_key()) as db: + errata_records = ( + (await db.execute(select(models.ErrataRecord))).scalars().all() + ) for record in errata_records: match = module_regex.findall(str(record.original_criteria)) if match: record.module = match[0] updated_records.append(record) db.add_all(updated_records) - await db.commit() if __name__ == '__main__': diff --git a/scripts/bootstrap_permissions.py b/scripts/bootstrap_permissions.py index a5c768241..81cc2aa85 100644 --- a/scripts/bootstrap_permissions.py +++ b/scripts/bootstrap_permissions.py @@ -2,12 +2,14 @@ import os import sys +from fastapi_sqla import open_async_session from sqlalchemy import update +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws import database, models +from alws import models from alws.constants import ( DEFAULT_PRODUCT, DEFAULT_TEAM, @@ -15,11 +17,13 @@ ) from alws.crud.products import create_product from alws.crud.teams import create_team +from alws.dependencies import get_async_db_key from alws.schemas.product_schema import ProductCreate from alws.schemas.team_schema import TeamCreate +from alws.utils.fastapi_sqla_setup import setup_all -async def ensure_system_user_exists(session: database.Session) -> models.User: +async def ensure_system_user_exists(session: AsyncSession) -> models.User: user = ( ( await session.execute( @@ -46,7 +50,8 @@ async def ensure_system_user_exists(session: database.Session) -> models.User: async def main(): - async with database.Session() as db, db.begin(): + await setup_all() + async with open_async_session(get_async_db_key()) as db: system_user = await ensure_system_user_exists(db) alma_team = await create_team( session=db, diff --git a/scripts/bootstrap_repositories.py b/scripts/bootstrap_repositories.py index e80c3824a..75539d310 100644 --- a/scripts/bootstrap_repositories.py +++ b/scripts/bootstrap_repositories.py @@ -8,12 +8,14 @@ import logging import yaml +from fastapi_sqla import open_async_session from syncer import sync -from alws import database from alws.crud import platform as pl_crud from alws.crud import repository as repo_crud +from alws.dependencies import get_async_db_key from alws.schemas import platform_schema, remote_schema, repository_schema +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.pulp_client import PulpClient REPO_CACHE = {} @@ -74,7 +76,7 @@ async def get_repository( production: bool, logger: logging.Logger, ): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: if production: repo_payload = repo_info.copy() repo_payload.pop("remote_url") @@ -121,11 +123,9 @@ async def get_repository( repository = await repo_crud.update_repository( db=db, repository_id=repository.id, - payload=repository_schema.RepositoryUpdate( - **{ - 'pulp_href': repo_href, - } - ), + payload=repository_schema.RepositoryUpdate(**{ + 'pulp_href': repo_href, + }), ) else: @@ -142,7 +142,7 @@ async def get_repository( async def get_remote(repo_info: dict, remote_sync_policy: str): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: remote_payload = repo_info.copy() remote_payload["name"] = f'{repo_info["name"]}-{repo_info["arch"]}' remote_payload.pop("type", None) @@ -157,7 +157,7 @@ async def get_remote(repo_info: dict, remote_sync_policy: str): async def update_remote(remote_id, remote_data: dict): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: return await repo_crud.update_repository_remote( db=db, remote_id=remote_id, @@ -166,21 +166,21 @@ async def update_remote(remote_id, remote_data: dict): async def update_platform(platform_data: dict): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: await pl_crud.modify_platform( db, platform_schema.PlatformModify(**platform_data) ) async def update_repository(repo_id: int, repo_data: dict): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: await repo_crud.update_repository( db, repo_id, repository_schema.RepositoryUpdate(**repo_data) ) async def get_repositories_for_update(platform_name: str): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: return await repo_crud.get_repositories_by_platform_name( db, platform_name ) @@ -191,7 +191,7 @@ async def add_repositories_to_platform( ): platform_name = platform_data.get("name") platform_instance = None - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: for platform in await pl_crud.get_platforms( db, is_reference=platform_data.get("is_reference", False) ): @@ -224,6 +224,8 @@ def main(): pulp_client = PulpClient(pulp_host, pulp_user, pulp_password) + sync(setup_all()) + for platform_data in platforms_data: if args.only_update: sync(update_platform(platform_data)) @@ -271,7 +273,10 @@ def main(): if repo: distro = sync(pulp_client.get_rpm_distro(repo_name)) if repo and distro: - REPO_CACHE[repo_name] = (distro["base_url"], repo["pulp_href"]) + REPO_CACHE[repo_name] = ( + distro["base_url"], + repo["pulp_href"], + ) for repo_info in repositories_data: logger.info( @@ -323,9 +328,7 @@ def main(): ) ) if args.no_sync: - logger.info( - "Synchronization from remote is disabled, skipping" - ) + logger.info("Synchronization from remote is disabled, skipping") continue try: logger.info("Syncing %s from %s...", repository, remote) diff --git a/scripts/compare_beta_to_stable.py b/scripts/compare_beta_to_stable.py index 58f92dcd2..b033a427b 100644 --- a/scripts/compare_beta_to_stable.py +++ b/scripts/compare_beta_to_stable.py @@ -7,26 +7,26 @@ import sys import typing +from fastapi_sqla import open_async_session from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload sys.path.append(os.path.dirname(os.path.dirname(__file__))) +from alws.config import settings +from alws.dependencies import get_async_db_key from alws.models import ( Platform, PlatformFlavour, Product, Repository, ) -from alws.config import settings -from alws.database import Session from alws.utils import pulp_client from alws.utils.debuginfo import is_debuginfo +from alws.utils.fastapi_sqla_setup import setup_all - -SUPPORTED_ARCHES = ("src", "aarch64", "i686", "ppc64le", - "s390x", "x86_64") +SUPPORTED_ARCHES = ("src", "aarch64", "i686", "ppc64le", "s390x", "x86_64") MODEL_MAPPING = { "flavor": PlatformFlavour, "platform": Platform, @@ -37,35 +37,46 @@ def parse_args(): parser = argparse.ArgumentParser() parser.add_argument( - "-s", "--stable-platform", type=str, required=True, - help="Stable platform name to compare beta with" + "-s", + "--stable-platform", + type=str, + required=True, + help="Stable platform name to compare beta with", ) parser.add_argument( - "-b", "--beta-entity", type=str, required=True, - help="Beta entity name to compare stable with" + "-b", + "--beta-entity", + type=str, + required=True, + help="Beta entity name to compare stable with", ) parser.add_argument( - "--beta-type", choices=("flavor", "product"), type=str, + "--beta-type", + choices=("flavor", "product"), + type=str, default="flavor", - help="Beta entity type to query for repositories" + help="Beta entity type to query for repositories", ) parser.add_argument( - "-a", "--arch", - choices=SUPPORTED_ARCHES, type=str, default="", - help="Check a specific architecture only" + "-a", + "--arch", + choices=SUPPORTED_ARCHES, + type=str, + default="", + help="Check a specific architecture only", ) parser.add_argument( - "-v", "--verbose", action="store_true", - help="Enable verbose output" + "-v", "--verbose", action="store_true", help="Enable verbose output" ) return parser.parse_args() async def get_repositories( - session: AsyncSession, - entity_name: str, - model_type: str = "platform", - arch: typing.Optional[str] = None) -> typing.List[Repository]: + session: AsyncSession, + entity_name: str, + model_type: str = "platform", + arch: typing.Optional[str] = None, +) -> typing.List[Repository]: model = MODEL_MAPPING[model_type] entity_class_name = model.__class__.__name__ @@ -81,35 +92,41 @@ async def get_repositories( f"from the provided entity: {entity_class_name}" ) entity = ( - await session.execute(select(model).where( - model.name == entity_name).options( - selectinload(repositories_field) - )) - ).scalars().first() + ( + await session.execute( + select(model) + .where(model.name == entity_name) + .options(selectinload(repositories_field)) + ) + ) + .scalars() + .first() + ) if not entity: - raise ValueError(f"Cannot find {entity_class_name.lower()} " - f"with name {entity_name}") + raise ValueError( + f"Cannot find {entity_class_name.lower()} " + f"with name {entity_name}" + ) if arch: - return [r for r in getattr(entity, field_name, []) - if r.arch == arch] + return [r for r in getattr(entity, field_name, []) if r.arch == arch] return list(getattr(entity, field_name, [])) class PackagesComparator: def __init__(self): self.pulp = pulp_client.PulpClient( - settings.pulp_host, settings.pulp_user, - settings.pulp_password + settings.pulp_host, settings.pulp_user, settings.pulp_password ) async def retrieve_all_packages_from_repo( - self, - repository: Repository, - arch: typing.Optional[str] = None + self, repository: Repository, arch: typing.Optional[str] = None ) -> typing.Tuple[str, typing.List[typing.Dict[str, str]]]: - logging.info("Getting packages from repository %s %s", - repository.name, repository.arch) + logging.info( + "Getting packages from repository %s %s", + repository.name, + repository.arch, + ) search_params = {} if arch: if arch == "src": @@ -121,26 +138,28 @@ async def retrieve_all_packages_from_repo( include_fields=["sha256", "location_href"], **search_params, ) - logging.info("Got all packages from repository %s %s", - repository.name, repository.arch) + logging.info( + "Got all packages from repository %s %s", + repository.name, + repository.arch, + ) return repository.name, packages async def get_packages_list( - self, - repositories: typing.List[Repository], - arch: typing.Optional[str] = None + self, + repositories: typing.List[Repository], + arch: typing.Optional[str] = None, ) -> typing.Tuple[typing.Set[str], typing.Set[str]]: tasks = [] for repository in repositories: if repository.pulp_href: tasks.append( - self.retrieve_all_packages_from_repo( - repository, arch=arch - ) + self.retrieve_all_packages_from_repo(repository, arch=arch) ) else: - logging.warning("Repository %s does not have Pulp HREF", - str(repository)) + logging.warning( + "Repository %s does not have Pulp HREF", str(repository) + ) packages = await asyncio.gather(*tasks) debuginfo_packages = [] @@ -149,8 +168,9 @@ async def get_packages_list( logging.debug("All packages: %s", pprint.pformat(packages)) for repo_name, packages in packages: - package_names = [p["location_href"].split("/")[-1] - for p in packages] + package_names = [ + p["location_href"].split("/")[-1] for p in packages + ] packages_list = ( debuginfo_packages if is_debuginfo(repo_name) @@ -161,33 +181,39 @@ async def get_packages_list( return set(usual_packages), set(debuginfo_packages) async def run(self, args): - async with Session() as session: + async with open_async_session(key=get_async_db_key()) as session: stable_repositories = await get_repositories( session, args.stable_platform, arch=args.arch ) beta_repositories = await get_repositories( - session, args.beta_entity, model_type=args.beta_type, - arch=args.arch + session, + args.beta_entity, + model_type=args.beta_type, + arch=args.arch, ) stable_usual_packages, stable_debuginfo_packages = ( - await self.get_packages_list( - stable_repositories, arch=args.arch) + await self.get_packages_list(stable_repositories, arch=args.arch) ) beta_usual_packages, beta_debuginfo_packages = ( - await self.get_packages_list( - beta_repositories, arch=args.arch) + await self.get_packages_list(beta_repositories, arch=args.arch) ) usual_diff = set(stable_usual_packages) & set(beta_usual_packages) - debuginfo_diff = set(stable_debuginfo_packages) & set(beta_debuginfo_packages) + debuginfo_diff = set(stable_debuginfo_packages) & set( + beta_debuginfo_packages + ) if usual_diff: - logging.error("Beta packages have intersections " - "with stable: %s", pprint.pformat(usual_diff)) + logging.error( + "Beta packages have intersections " "with stable: %s", + pprint.pformat(usual_diff), + ) if debuginfo_diff: - logging.error("Beta debuginfo packages have intersections " - "with stable: %s", pprint.pformat(debuginfo_diff)) + logging.error( + "Beta debuginfo packages have intersections " "with stable: %s", + pprint.pformat(debuginfo_diff), + ) async def main(): @@ -202,6 +228,7 @@ async def main(): logging.FileHandler(f"stable_beta_comparator.{current_ts}.log"), ], ) + await setup_all() comparator = PackagesComparator() await comparator.run(args) diff --git a/scripts/errata_fix_script.py b/scripts/errata_fix_script.py index 1422f9341..6c8bb29b8 100644 --- a/scripts/errata_fix_script.py +++ b/scripts/errata_fix_script.py @@ -1,21 +1,21 @@ import asyncio -from contextlib import asynccontextmanager import logging import re import typing +import createrepo_c as cr import yaml +from fastapi_sqla import open_async_session, open_session from sqlalchemy import select -import createrepo_c as cr +from sqlalchemy.orm import Session -from alws.utils import pulp_client -from alws.database import PulpSession, SyncSession from alws.config import settings -from alws.constants import ErrataReferenceType, ErrataPackageStatus -from alws.pulp_models import UpdateRecord, UpdatePackage +from alws.constants import ErrataPackageStatus, ErrataReferenceType +from alws.dependencies import get_async_db_key from alws.models import ErrataRecord, ErrataReference -from alws.dependencies import get_db - +from alws.pulp_models import UpdatePackage, UpdateRecord +from alws.utils import pulp_client +from alws.utils.fastapi_sqla_setup import setup_all logging.basicConfig( level=logging.DEBUG, @@ -35,7 +35,7 @@ async def update_pulp_repo(repo, pulp): async def prepare_albs_packages_cache( - albs_db: SyncSession, + albs_db: Session, pulp: pulp_client.PulpClient, record_id: str, ) -> typing.Dict[str, typing.Any]: @@ -53,10 +53,13 @@ async def prepare_albs_packages_cache( 'sha256', 'checksum_type', ] - albs_record: typing.Optional[ErrataRecord] = albs_db.execute( - select(ErrataRecord) - .where(ErrataRecord.id == record_id) - ).scalars().first() + albs_record: typing.Optional[ErrataRecord] = ( + albs_db.execute( + select(ErrataRecord).where(ErrataRecord.id == record_id) + ) + .scalars() + .first() + ) if not albs_record: return albs_packages_cache logging.info('Collecting pulp_packages for record %s', record_id) @@ -71,9 +74,7 @@ async def prepare_albs_packages_cache( pulp_pkg['reboot_suggested'] = ( albs_package.errata_package.reboot_suggested ) - location_href = re.sub( - r'^Packages/', '', pulp_pkg['location_href'] - ) + location_href = re.sub(r'^Packages/', '', pulp_pkg['location_href']) pulp_pkg['location_href'] = location_href albs_packages_cache[location_href] = pulp_pkg return albs_packages_cache @@ -87,11 +88,12 @@ async def update_pulp_db(): albs_packages_cache = {} latest_record_id = '' logging.info('updating pulp db records') - with PulpSession() as pulp_db, SyncSession() as albs_db: - records: typing.List[UpdateRecord] = pulp_db.execute( - select(UpdateRecord) - .order_by(UpdateRecord.id) - ).scalars().all() + with open_session(key="pulp") as pulp_db, open_session() as albs_db: + records: typing.List[UpdateRecord] = ( + pulp_db.execute(select(UpdateRecord).order_by(UpdateRecord.id)) + .scalars() + .all() + ) for record in records: logging.info("Processing errata %s", record.id) if latest_record_id != record.id: @@ -99,8 +101,9 @@ async def update_pulp_db(): albs_db, pulp, record.id ) if not albs_packages_cache: - logging.info('Skipping record %s, there is no ErrataRecord', - record.id) + logging.info( + 'Skipping record %s, there is no ErrataRecord', record.id + ) continue collection = record.collections[0] collection_arch = re.search( @@ -113,10 +116,7 @@ async def update_pulp_db(): for location_href, pkg in albs_packages_cache.items() if pkg['arch'] in (collection_arch, 'noarch') } - errata_packages = { - pkg.filename - for pkg in collection.packages - } + errata_packages = {pkg.filename for pkg in collection.packages} missing_filenames = albs_packages.difference(errata_packages) if not missing_filenames: logging.info('Errata %s is ok', record.id) @@ -132,20 +132,23 @@ async def update_pulp_db(): filename, ) pulp_pkg['sha256'] = pulp_pkg['pkgId'] - collection.packages.append(UpdatePackage( - name=pulp_pkg['name'], - filename=pulp_pkg['location_href'], - arch=pulp_pkg['arch'], - version=pulp_pkg['version'], - release=pulp_pkg['release'], - epoch=str(pulp_pkg['epoch']), - reboot_suggested=pulp_pkg['reboot_suggested'], - src=pulp_pkg['rpm_sourcerpm'], - sum=pulp_pkg['sha256'], - sum_type=cr.checksum_type('sha256'), - )) - logging.info('Added package %s for errata %s', - filename, record.id) + collection.packages.append( + UpdatePackage( + name=pulp_pkg['name'], + filename=pulp_pkg['location_href'], + arch=pulp_pkg['arch'], + version=pulp_pkg['version'], + release=pulp_pkg['release'], + epoch=str(pulp_pkg['epoch']), + reboot_suggested=pulp_pkg['reboot_suggested'], + src=pulp_pkg['rpm_sourcerpm'], + sum=pulp_pkg['sha256'], + sum_type=cr.checksum_type('sha256'), + ) + ) + logging.info( + 'Added package %s for errata %s', filename, record.id + ) logging.info('Start checking record %s references', record.id) for reference in record.references: if ( @@ -156,7 +159,6 @@ async def update_pulp_db(): reference.title = reference.ref_id logging.info('Fixed ref_title for ref_id %s', reference.ref_id) latest_record_id = record.id - pulp_db.commit() logging.info('pulp db records updated') platforms = yaml.safe_load( open("reference_data/platforms.yaml", "r").read() @@ -173,14 +175,11 @@ async def update_pulp_db(): async def update_albs_db(): logging.info("Update albs db started") - query = ( - select(ErrataReference) - .where( - ErrataReference.ref_type != ErrataReferenceType.bugzilla, - ErrataReference.title == '', - ) + query = select(ErrataReference).where( + ErrataReference.ref_type != ErrataReferenceType.bugzilla, + ErrataReference.title == '', ) - async with asynccontextmanager(get_db)() as db, db.begin(): + async with open_async_session(key=get_async_db_key()) as db: for reference in (await db.execute(query)).scalars().all(): reference.title = reference.ref_id logging.info("Update albs db is done") @@ -192,6 +191,7 @@ async def main(): update_pulp_db(), update_albs_db(), ] + await setup_all() await asyncio.gather(*tasks) diff --git a/scripts/errata_pkgs_matcher.py b/scripts/errata_pkgs_matcher.py index 105f93517..01fdbebd3 100644 --- a/scripts/errata_pkgs_matcher.py +++ b/scripts/errata_pkgs_matcher.py @@ -5,13 +5,14 @@ import sys from contextlib import asynccontextmanager +from fastapi_sqla import open_async_session from sqlalchemy import select from sqlalchemy.orm import joinedload sys.path.append(os.path.dirname(os.path.dirname(__file__))) from alws.constants import ErrataPackageStatus -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.models import ( BuildTask, BuildTaskArtifact, @@ -20,6 +21,7 @@ ErrataToALBSPackage, ) from alws.pulp_models import RpmPackage +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.parsing import clean_release, parse_rpm_nevra from alws.utils.pulp_utils import ( get_rpm_packages_by_ids, @@ -47,7 +49,8 @@ async def main( advisory_id, ) added = not_found = 0 - async with asynccontextmanager(get_db)() as db, db.begin(): + await setup_all() + async with open_async_session(key=get_async_db_key()) as db: advisory = ( ( await db.execute( @@ -144,7 +147,6 @@ async def main( ) not_found += 1 db.add_all(new_entites) - await db.commit() logging.info('Total: added %d, not found %d', added, not_found) diff --git a/scripts/fix_releases_products.py b/scripts/fix_releases_products.py index f5caad50a..e4eacbe94 100644 --- a/scripts/fix_releases_products.py +++ b/scripts/fix_releases_products.py @@ -1,8 +1,8 @@ import asyncio import os import sys -from contextlib import asynccontextmanager +from fastapi_sqla import open_async_session from sqlalchemy import update from sqlalchemy.future import select @@ -10,20 +10,33 @@ from alws import models from alws.constants import DEFAULT_PRODUCT -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key +from alws.utils.fastapi_sqla_setup import setup_all async def main(): - async with asynccontextmanager(get_db)() as db, db.begin(): - product_id = (await db.execute(select(models.Product.id).where( - models.Product.name == DEFAULT_PRODUCT))).scalar() + await setup_all() + async with open_async_session(get_async_db_key()) as db: + product_id = ( + await db.execute( + select(models.Product.id).where( + models.Product.name == DEFAULT_PRODUCT + ) + ) + ).scalar() # Assign all previous releases to AlmaLinux product - await db.execute(update(models.Release).where( - models.Release.product_id.is_(None)).values(product_id=product_id)) + await db.execute( + update(models.Release) + .where(models.Release.product_id.is_(None)) + .values(product_id=product_id) + ) # Set is_community flag for AlmaLinux product to False # to use usual release logic - await db.execute(update(models.Product).where( - models.Product.name == DEFAULT_PRODUCT).values(is_community=False)) + await db.execute( + update(models.Product) + .where(models.Product.name == DEFAULT_PRODUCT) + .values(is_community=False) + ) if __name__ == '__main__': diff --git a/scripts/generate_errata_title.py b/scripts/generate_errata_title.py index b2b5f7f2a..53bc47a39 100644 --- a/scripts/generate_errata_title.py +++ b/scripts/generate_errata_title.py @@ -2,29 +2,34 @@ import os import re import sys -from contextlib import asynccontextmanager +from fastapi_sqla import open_async_session from sqlalchemy.future import select + sys.path.append(os.path.dirname(os.path.dirname(__file__))) from alws import models -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key +from alws.utils.fastapi_sqla_setup import setup_all async def main(): severity_regex = re.compile('^(Important|Critical|Moderate|Low): ') updated_records = [] - async with asynccontextmanager(get_db)() as db, db.begin(): - errata_records = (await db.execute( - select(models.ErrataRecord))).scalars().all() + await setup_all() + async with open_async_session(key=get_async_db_key()) as db: + errata_records = ( + (await db.execute(select(models.ErrataRecord))).scalars().all() + ) for record in errata_records: clean_title = severity_regex.sub('', record.original_title) record.title = f'{record.id}: {clean_title} ({record.severity})' - record.oval_title = f'{record.id}: {clean_title} ({record.severity})' + record.oval_title = ( + f'{record.id}: {clean_title} ({record.severity})' + ) updated_records.append(record) db.add_all(updated_records) - await db.commit() if __name__ == '__main__': diff --git a/scripts/generate_token.py b/scripts/generate_token.py index 88b2d9f39..1ac90b20e 100644 --- a/scripts/generate_token.py +++ b/scripts/generate_token.py @@ -1,36 +1,50 @@ import argparse import asyncio import contextlib -import sys import os +import sys +from fastapi_sqla import open_async_session from fastapi_users.authentication.strategy import JWTStrategy from sqlalchemy.future import select sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws.dependencies import get_db -from alws.auth.dependencies import get_user_db +from alws.auth.dependencies import get_async_db_key, get_user_db from alws.auth.user_manager import get_user_manager from alws.models import User +from alws.utils.fastapi_sqla_setup import setup_all def parse_args(): parser = argparse.ArgumentParser() - parser.add_argument('-e', '--email', required=False, type=str, - default=None, - help='User e-mail') - parser.add_argument('-u', '--username', required=False, type=str, - default=None, - help='User e-mail') - parser.add_argument('-s', '--secret', required=True, type=str, - help='JWT secret') + parser.add_argument( + '-e', + '--email', + required=False, + type=str, + default=None, + help='User e-mail', + ) + parser.add_argument( + '-u', + '--username', + required=False, + type=str, + default=None, + help='User e-mail', + ) + parser.add_argument( + '-s', '--secret', required=True, type=str, help='JWT secret' + ) return parser.parse_args(sys.argv[1:]) async def gen_token(secret: str, email: str = None, username: str = None): + await setup_all + strategy = JWTStrategy(secret, lifetime_seconds=1 * 31557600) - get_async_session_context = contextlib.asynccontextmanager(get_db) + get_async_session_context = open_async_session(key=get_async_db_key()) get_user_db_context = contextlib.asynccontextmanager(get_user_db) get_user_manager_context = contextlib.asynccontextmanager(get_user_manager) async with get_async_session_context() as session: @@ -41,7 +55,11 @@ async def gen_token(secret: str, email: str = None, username: str = None): conds.append(User.username == username) if not email and not username: raise ValueError("Need to specify either email or username") - user = (await session.execute(select(User).where(*conds))).scalars().first() + user = ( + (await session.execute(select(User).where(*conds))) + .scalars() + .first() + ) async with get_user_db_context(session) as user_db: async with get_user_manager_context(user_db): res = await strategy.write_token(user) @@ -50,8 +68,13 @@ async def gen_token(secret: str, email: str = None, username: str = None): def main(): arguments = parse_args() - asyncio.run(gen_token(arguments.secret, email=arguments.email, - username=arguments.username)) + asyncio.run( + gen_token( + arguments.secret, + email=arguments.email, + username=arguments.username, + ) + ) if __name__ == '__main__': diff --git a/scripts/manage_flavours.py b/scripts/manage_flavours.py index fd74c5530..5f02a910b 100644 --- a/scripts/manage_flavours.py +++ b/scripts/manage_flavours.py @@ -7,12 +7,14 @@ import logging import yaml +from fastapi_sqla import open_async_session from syncer import sync -from alws import database from alws.crud import platform_flavors as pf_crud from alws.crud import repository as repo_crud +from alws.dependencies import get_async_db_key from alws.schemas import platform_flavors_schema, repository_schema +from alws.utils.fastapi_sqla_setup import setup_all def parse_args(): @@ -62,7 +64,7 @@ def parse_args(): async def update_flavour(flavour_data: dict, logger: logging.Logger): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: data = platform_flavors_schema.UpdateFlavour(**flavour_data) flavor = await pf_crud.update_flavour(db, data) if not flavor: @@ -74,7 +76,7 @@ async def update_flavour(flavour_data: dict, logger: logging.Logger): async def prune_flavours( flavours_data: [], logger: logging.Logger, confirmation_yes ): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: flavour_names_in_config = [e.get('name') for e in flavours_data] flavours_in_db = await pf_crud.list_flavours(db) @@ -116,7 +118,7 @@ async def prune_flavours( async def add_flavor(flavor_data: dict, logger: logging.Logger): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: flavour = await pf_crud.find_flavour_by_name(db, flavor_data["name"]) if flavour: logger.error("Flavor %s is already added", flavor_data["name"]) @@ -134,10 +136,13 @@ def main(): else: logging.basicConfig(level=logging.INFO) config_path = os.path.expanduser(os.path.expandvars(args.config)) + with open(config_path, "rt") as f: loader = yaml.Loader(f) flavours_data = loader.get_data() + sync(setup_all()) + if args.prune: logger.info("Start to prune") sync(prune_flavours(flavours_data, logger, args.yes)) diff --git a/scripts/manage_users.py b/scripts/manage_users.py index 0ff1965dd..1b5abd5e3 100644 --- a/scripts/manage_users.py +++ b/scripts/manage_users.py @@ -2,8 +2,8 @@ import asyncio import os import sys -from contextlib import asynccontextmanager +from fastapi_sqla import open_async_session from sqlalchemy import delete from sqlalchemy.future import select from sqlalchemy.orm import selectinload @@ -11,61 +11,115 @@ sys.path.append(os.path.dirname(os.path.dirname(__file__))) from alws import models -from alws.crud import ( - actions as action_crud, - roles as role_crud, - teams as team_crud, user as user_crud, -) -from alws.dependencies import get_db +from alws.crud import actions as action_crud +from alws.crud import roles as role_crud +from alws.crud import teams as team_crud +from alws.crud import user as user_crud +from alws.dependencies import get_async_db_key from alws.perms.roles import RolesList +from alws.utils.fastapi_sqla_setup import setup_all def parse_args(): role_names = [r.name for r in RolesList] parser = argparse.ArgumentParser( 'manage_users', - description='Script to manage users and roles for them') - parser.add_argument('-e', '--email', required=True, type=str, - help='User e-mail') - parser.add_argument('-t', '--team-name', required=False, type=str, - help='Team name') - parser.add_argument('-a', '--add-role', required=False, action='append', - dest='add_roles', type=str, help='Add role(s)', - choices=role_names) - parser.add_argument('-r', '--remove-role', required=False, action='append', - dest='remove_roles', type=str, help='Remove role(s)', - choices=role_names) - parser.add_argument('-v', '--verify', required=False, action='store_true', - help='Verify user') - parser.add_argument('-d', '--deactivate', required=False, - action='store_true', help='Deactivate user') - parser.add_argument('-S', '--superuser', required=False, - action='store_true', help='Make user a superuser') - parser.add_argument('-u', '--usual-user', required=False, - action='store_true', help='Make user a usual one') - parser.add_argument('-f', '--fix', required=False, action='store_true', - help='Fix roles and actions') + description='Script to manage users and roles for them', + ) + parser.add_argument( + '-e', '--email', required=True, type=str, help='User e-mail' + ) + parser.add_argument( + '-t', '--team-name', required=False, type=str, help='Team name' + ) + parser.add_argument( + '-a', + '--add-role', + required=False, + action='append', + dest='add_roles', + type=str, + help='Add role(s)', + choices=role_names, + ) + parser.add_argument( + '-r', + '--remove-role', + required=False, + action='append', + dest='remove_roles', + type=str, + help='Remove role(s)', + choices=role_names, + ) + parser.add_argument( + '-v', + '--verify', + required=False, + action='store_true', + help='Verify user', + ) + parser.add_argument( + '-d', + '--deactivate', + required=False, + action='store_true', + help='Deactivate user', + ) + parser.add_argument( + '-S', + '--superuser', + required=False, + action='store_true', + help='Make user a superuser', + ) + parser.add_argument( + '-u', + '--usual-user', + required=False, + action='store_true', + help='Make user a usual one', + ) + parser.add_argument( + '-f', + '--fix', + required=False, + action='store_true', + help='Fix roles and actions', + ) return parser.parse_args() async def main() -> int: arguments = parse_args() - async with asynccontextmanager(get_db)() as db, db.begin(): - user = (await db.execute(select(models.User).where( - models.User.email == arguments.email).options( - selectinload(models.User.roles), - selectinload(models.User.oauth_accounts), - ))).scalars().first() + await setup_all() + async with open_async_session(key=get_async_db_key()) as db: + user = ( + ( + await db.execute( + select(models.User) + .where(models.User.email == arguments.email) + .options( + selectinload(models.User.roles), + selectinload(models.User.oauth_accounts), + ) + ) + ) + .scalars() + .first() + ) if not user: - raise ValueError(f'No such user in the system: ' - f'{arguments.email}') + raise ValueError( + f'No such user in the system: ' f'{arguments.email}' + ) if arguments.verify and arguments.deactivate: raise ValueError('Cannot both activate and deactivate user') if arguments.superuser and arguments.usual_user: - raise ValueError('Cannot both make user a superuser ' - 'and usual one') + raise ValueError( + 'Cannot both make user a superuser ' 'and usual one' + ) if arguments.fix: await action_crud.ensure_all_actions_exist(db) @@ -86,42 +140,73 @@ async def main() -> int: if arguments.usual_user: await user_crud.make_usual_user(user.id, db) - if (arguments.add_roles or - arguments.remove_roles) and not arguments.team_name: + if ( + arguments.add_roles or arguments.remove_roles + ) and not arguments.team_name: print('Cannot assign roles without team specified, exiting') return 1 - team = (await db.execute(select(models.Team).where( - models.Team.name == arguments.team_name))).scalars().first() + team = ( + ( + await db.execute( + select(models.Team).where( + models.Team.name == arguments.team_name + ) + ) + ) + .scalars() + .first() + ) if not team: - raise ValueError(f'No such team in the system: ' - f'{arguments.team_name}') + raise ValueError( + f'No such team in the system: ' f'{arguments.team_name}' + ) add_roles = None remove_roles = None if arguments.add_roles: - add_roles = (await db.execute(select(models.UserRole).where( - models.UserRole.name.in_( - [team_crud.get_team_role_name(team.name, r) - for r in arguments.add_roles])) - )).scalars().all() + add_roles = ( + ( + await db.execute( + select(models.UserRole).where( + models.UserRole.name.in_([ + team_crud.get_team_role_name(team.name, r) + for r in arguments.add_roles + ]) + ) + ) + ) + .scalars() + .all() + ) if arguments.remove_roles: - remove_roles = (await db.execute(select(models.UserRole).where( - models.UserRole.name.in_( - [team_crud.get_team_role_name(team.name, r) - for r in arguments.remove_roles])) - )).scalars().all() + remove_roles = ( + ( + await db.execute( + select(models.UserRole).where( + models.UserRole.name.in_([ + team_crud.get_team_role_name(team.name, r) + for r in arguments.remove_roles + ]) + ) + ) + ) + .scalars() + .all() + ) if add_roles: user.roles.extend(add_roles) if remove_roles: roles_ids = [r.id for r in remove_roles] - await db.execute(delete(models.UserRoleMapping).where( - models.UserRoleMapping.c.role_id.in_(roles_ids), - models.UserRoleMapping.c.user_id == user.id - )) + await db.execute( + delete(models.UserRoleMapping).where( + models.UserRoleMapping.c.role_id.in_(roles_ids), + models.UserRoleMapping.c.user_id == user.id, + ) + ) db.add(user) return 0 diff --git a/scripts/migrate_old_distros.py b/scripts/migrate_old_distros.py index d65367c52..80c682d69 100644 --- a/scripts/migrate_old_distros.py +++ b/scripts/migrate_old_distros.py @@ -4,57 +4,58 @@ sys.path.append(os.path.dirname(os.path.dirname(__file__))) +from fastapi_sqla import open_session from sqlalchemy import select from sqlalchemy.orm import selectinload from alws import models -from alws.database import SyncSession +from alws.utils.fastapi_sqla_setup import sync_setup def migrate_old_records(): logging.basicConfig(level=logging.DEBUG) - with SyncSession() as session: - with session.begin(): - if not hasattr(models, 'Distribution'): - logging.debug('Distribution model already deleted') - return - items_to_insert = [] - old_db_records = ( - session.execute( - select(models.Distribution).options( - selectinload(models.Distribution.builds), - selectinload(models.Distribution.platforms), - selectinload(models.Distribution.repositories), - selectinload(models.Distribution.owner), - selectinload(models.Distribution.team), - ), - ) - .scalars() - .all() + with open_session() as session: + if not hasattr(models, 'Distribution'): + logging.debug('Distribution model already deleted') + return + items_to_insert = [] + old_db_records = ( + session.execute( + select(models.Distribution).options( + selectinload(models.Distribution.builds), + selectinload(models.Distribution.platforms), + selectinload(models.Distribution.repositories), + selectinload(models.Distribution.owner), + selectinload(models.Distribution.team), + ), ) - logging.debug("Total distributions: %s", len(old_db_records)) - for old_db_record in old_db_records: - logging.debug("Proccessing distribution with id=%s", - old_db_record.id) - new_db_record = models.Product( - name=old_db_record.name, - owner=old_db_record.owner, - team=old_db_record.team, - title=old_db_record.name, - ) - for attr in ("builds", "platforms", "repositories"): - old_db_collection = getattr(old_db_record, attr, []) - new_db_collection = getattr(new_db_record, attr, []) - for coll_item in old_db_collection: - new_db_collection.append(coll_item) - items_to_insert.append(new_db_record) - logging.debug( - "Proccessing distribution with id=%s is finished", - old_db_record.id, - ) - session.add_all(items_to_insert) - session.commit() + .scalars() + .all() + ) + logging.debug("Total distributions: %s", len(old_db_records)) + for old_db_record in old_db_records: + logging.debug( + "Proccessing distribution with id=%s", old_db_record.id + ) + new_db_record = models.Product( + name=old_db_record.name, + owner=old_db_record.owner, + team=old_db_record.team, + title=old_db_record.name, + ) + for attr in ("builds", "platforms", "repositories"): + old_db_collection = getattr(old_db_record, attr, []) + new_db_collection = getattr(new_db_record, attr, []) + for coll_item in old_db_collection: + new_db_collection.append(coll_item) + items_to_insert.append(new_db_record) + logging.debug( + "Proccessing distribution with id=%s is finished", + old_db_record.id, + ) + session.add_all(items_to_insert) if __name__ == '__main__': + sync_setup() migrate_old_records() diff --git a/scripts/migrate_pulp_modularity.py b/scripts/migrate_pulp_modularity.py index bb7857ad4..89c918202 100644 --- a/scripts/migrate_pulp_modularity.py +++ b/scripts/migrate_pulp_modularity.py @@ -13,18 +13,18 @@ import requests import yaml from aiohttp import ClientResponseError +from fastapi_sqla import open_session from hawkey import NEVRA from requests.auth import HTTPBasicAuth from sqlalchemy import select -from alws.database import PulpSession +sys.path.append(os.path.dirname(os.path.dirname(__file__))) + from alws.pulp_models import RpmModulemd, RpmModulemdPackages +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.parsing import parse_rpm_nevra from alws.utils.pulp_client import PulpClient -sys.path.append(os.path.dirname(os.path.dirname(__file__))) - - ROOT_FOLDER = '/srv/pulp/media/' @@ -221,12 +221,10 @@ def filter_debug_and_src_artifacts(artifacts: list[NEVRA]) -> list[NEVRA]: return [ artifact for artifact in artifacts - if all( - [ - artifact.arch != 'src', - 'debug' not in artifact.name, - ] - ) + if all([ + artifact.arch != 'src', + 'debug' not in artifact.name, + ]) ] @@ -351,6 +349,7 @@ async def main(): logging.StreamHandler(), ], ) + await setup_all() time1 = time.time() step = 100 pulp_host = os.environ["PULP_HOST"] @@ -378,23 +377,21 @@ async def main(): logging.info('Artifact %s by path %s', j + i, artifact) migrated_modules.extend( result - for results in await asyncio.gather( - *( - process_module_data( - pulp_client, - j + i, - artifact, - len(artifacts), - args.pulp_storage_path, - args.dry_run, - ) - for j, artifact in enumerate(artifacts[0 + i : i + step]) + for results in await asyncio.gather(*( + process_module_data( + pulp_client, + j + i, + artifact, + len(artifacts), + args.pulp_storage_path, + args.dry_run, ) - ) + for j, artifact in enumerate(artifacts[0 + i : i + step]) + )) for result in results ) - with PulpSession() as pulp_db, pulp_db.begin(): + with open_session(key="pulp") as pulp_db: query = select(RpmModulemd) result = pulp_db.execute(query).scalars().all() all_modules = {i.nsvca: i for i in result} @@ -406,8 +403,7 @@ async def main(): modulemd_packages = ( pulp_db.execute( select(RpmModulemdPackages).where( - RpmModulemdPackages.modulemd_id - == module.content_ptr_id + RpmModulemdPackages.modulemd_id == module.content_ptr_id ) ) .scalars() @@ -417,7 +413,6 @@ async def main(): for modulemd_package in modulemd_packages: pulp_db.delete(modulemd_package) pulp_db.delete(module) - pulp_db.commit() logging.info('Total time: %s', time.time() - time1) diff --git a/scripts/move_logs_to_new_repos.py b/scripts/move_logs_to_new_repos.py index cda7f4a4c..8a25c094f 100644 --- a/scripts/move_logs_to_new_repos.py +++ b/scripts/move_logs_to_new_repos.py @@ -11,11 +11,12 @@ sys.path.append(os.path.dirname(os.path.dirname(__file__))) import requests +from fastapi_sqla import open_async_session, open_session from sqlalchemy import delete, select from sqlalchemy.orm import selectinload from alws.config import settings -from alws.dependencies import get_db, get_pulp_db +from alws.dependencies import get_async_db_key from alws.models import ( Build, BuildTask, @@ -29,6 +30,7 @@ CoreRepositoryContent, ) from alws.utils import pulp_client +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.file_utils import hash_content logging.basicConfig( @@ -52,6 +54,8 @@ async def main(): settings.pulp_password, ) + await setup_all() + def get_log_names_from_repo(repo: Repository): result = {} response = requests.get(repo.url) @@ -93,8 +97,8 @@ async def safe_delete(href: str): href, ) - async with asynccontextmanager(get_db)() as session: - with get_pulp_db() as pulp_session: + async with open_async_session(key=get_async_db_key()) as session: + with open_session(key="pulp") as pulp_session: builds = ( ( await session.execute( @@ -311,7 +315,6 @@ async def safe_delete(href: str): Repository.id.in_(repo_ids_to_remove), ) ) - await session.commit() if __name__ == "__main__": diff --git a/scripts/noarch_checker.py b/scripts/noarch_checker.py index a8d5b9e57..86ab26f6c 100644 --- a/scripts/noarch_checker.py +++ b/scripts/noarch_checker.py @@ -1,6 +1,5 @@ import argparse import asyncio -from contextlib import asynccontextmanager import datetime import logging import os @@ -8,16 +7,18 @@ import typing import urllib.parse +from fastapi_sqla import open_async_session from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import Query, selectinload sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws.models import Platform, Product, Repository from alws.config import settings -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key +from alws.models import Platform, Product, Repository from alws.utils import pulp_client +from alws.utils.fastapi_sqla_setup import setup_all class NoarchProcessor: @@ -97,7 +98,8 @@ async def copy_noarch_packages_from_source( destination_repo_href: str, ) -> None: self.logger.info( - 'Collecting packages from: "%s"', destination_repo_name, + 'Collecting packages from: "%s"', + destination_repo_name, ) destination_repo_packages = await self.retrieve_all_packages_from_repo( await self.pulp.get_repo_latest_version(destination_repo_href) @@ -109,28 +111,37 @@ async def copy_noarch_packages_from_source( replace_msg = 'Package "%s" replaced in "%s" repo from "%s" repo' if self.only_check: add_msg = 'Package "%s" can be added from "%s" repo into "%s" repo' - replace_msg = 'Package "%s" can be replaced in "%s" repo from "%s" repo' + replace_msg = ( + 'Package "%s" can be replaced in "%s" repo from "%s" repo' + ) for package_dict in source_repo_packages: pkg_name = package_dict["name"] pkg_version = package_dict["version"] pkg_release = package_dict["release"] is_modular = ".module_el" in pkg_release full_name = f"{pkg_name}-{pkg_version}-{pkg_release}.noarch.rpm" - compared_pkg = next(( - pkg for pkg in destination_repo_packages - if all(( - pkg["name"] == pkg_name, - pkg["version"] == pkg_version, - pkg["release"] == pkg_release, - )) - ), None) + compared_pkg = next( + ( + pkg + for pkg in destination_repo_packages + if all(( + pkg["name"] == pkg_name, + pkg["version"] == pkg_version, + pkg["release"] == pkg_release, + )) + ), + None, + ) if compared_pkg is None and not self.only_replace: if is_modular or self.show_diff: continue if not self.only_check: packages_to_add.append(package_dict["pulp_href"]) self.logger.info( - add_msg, full_name, source_repo_name, destination_repo_name + add_msg, + full_name, + source_repo_name, + destination_repo_name, ) continue if ( @@ -142,8 +153,10 @@ async def copy_noarch_packages_from_source( packages_to_remove.append(compared_pkg["pulp_href"]) packages_to_add.append(package_dict["pulp_href"]) self.logger.info( - replace_msg, full_name, - destination_repo_name, source_repo_name + replace_msg, + full_name, + destination_repo_name, + source_repo_name, ) if packages_to_add and not self.only_check: @@ -165,7 +178,8 @@ async def prepare_and_execute_async_tasks( for source_repo_name, repo_data in source_repo_dict.items(): repo_href, source_is_debug = repo_data self.logger.info( - 'Collecting packages from: "%s"', source_repo_name, + 'Collecting packages from: "%s"', + source_repo_name, ) source_repo_packages = await self.retrieve_all_packages_from_repo( await self.pulp.get_repo_latest_version(repo_href), @@ -174,12 +188,14 @@ async def prepare_and_execute_async_tasks( dest_repo_href, dest_repo_is_debug = dest_repo_data if source_is_debug != dest_repo_is_debug: continue - tasks.append(self.copy_noarch_packages_from_source( - source_repo_name=source_repo_name, - source_repo_packages=source_repo_packages, - destination_repo_name=dest_repo_name, - destination_repo_href=dest_repo_href, - )) + tasks.append( + self.copy_noarch_packages_from_source( + source_repo_name=source_repo_name, + source_repo_packages=source_repo_packages, + destination_repo_name=dest_repo_name, + destination_repo_href=dest_repo_href, + ) + ) self.logger.info('Start checking noarch packages in repos') await asyncio.gather(*tasks) self.logger.info('Checking noarch packages in repos is done') @@ -191,20 +207,12 @@ def get_model_query(self, is_dest=False) -> Query: raise ValueError(f"Wrong model type: {model_type}") if model_type == "platform": model = Platform - conditions = ( - Platform.name == obj_name, - ) - options = ( - selectinload(Platform.repos), - ) + conditions = (Platform.name == obj_name,) + options = (selectinload(Platform.repos),) if model_type == "product": model = Product - conditions = ( - Product.name == obj_name, - ) - options = ( - selectinload(Product.repositories), - ) + conditions = (Product.name == obj_name,) + options = (selectinload(Product.repositories),) return select(model).where(*conditions).options(*options) @staticmethod @@ -327,7 +335,8 @@ def parse_args(): async def main(): args = parse_args() pulp_client.PULP_SEMAPHORE = asyncio.Semaphore(10) - async with asynccontextmanager(get_db)() as session: + await setup_all() + async with open_async_session(key=get_async_db_key()) as session: processor = NoarchProcessor( session=session, source_obj_name=args.source, diff --git a/scripts/packages_exporter.py b/scripts/packages_exporter.py index d92b41176..4c98843e8 100644 --- a/scripts/packages_exporter.py +++ b/scripts/packages_exporter.py @@ -21,6 +21,7 @@ import rpm import sentry_sdk import sqlalchemy +from fastapi_sqla import open_async_session # Required for generating RSS from feedgen.feed import FeedGenerator @@ -32,9 +33,10 @@ sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws import database, models +from alws import models from alws.config import settings from alws.constants import SignStatusEnum +from alws.dependencies import get_async_db_key from alws.utils.errata import ( extract_errata_metadata, extract_errata_metadata_modern, @@ -45,6 +47,7 @@ merge_errata_records_modern, ) from alws.utils.exporter import download_file, get_repodata_file_links +from alws.utils.fastapi_sqla_setup import setup_all from alws.utils.osv import export_errata_to_osv from alws.utils.pulp_client import PulpClient @@ -304,7 +307,7 @@ async def get_exporter_data( repo_exporter_dict["publication_href"] = publication_href return fs_exporter_href, repo_exporter_dict - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: query = select(models.Repository).where( models.Repository.id.in_(repository_ids) ) @@ -317,9 +320,7 @@ async def get_exporter_data( return list(dict(results).values()) - async def sign_repomd_xml( - self, path_to_file: str, key_id: str, token: str - ): + async def sign_repomd_xml(self, path_to_file: str, key_id: str, token: str): endpoint = "sign" result = {"asc_content": None, "error": None} try: @@ -582,7 +583,7 @@ async def export_repos_from_pulp( selectinload(models.Platform.sign_keys), ) ) - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: db_platforms = await db.execute(query) db_platforms = db_platforms.scalars().all() @@ -637,7 +638,7 @@ async def export_repos_from_release( self.logger.info( "Start exporting packages from release id=%s", release_id ) - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: db_release = await db.execute( select(models.Release).where(models.Release.id == release_id) ) @@ -776,6 +777,7 @@ def repo_post_processing(exporter: Exporter, repo_path: str): def main(): args = parse_args() init_sentry() + sync(setup_all()) platforms_dict = {} key_id_by_platform = None @@ -948,9 +950,7 @@ def main(): platform_errata_cache[platform]["modern_cache"], ) ) - with open( - os.path.join(platform_path, "errata.rss"), "w" - ) as fd: + with open(os.path.join(platform_path, "errata.rss"), "w") as fd: fd.write(rss) exporter.logger.debug(f"RSS generation for {platform} is done") except Exception: diff --git a/scripts/remove_unnecessary_versions_of_repositories.py b/scripts/remove_unnecessary_versions_of_repositories.py index 5e894bf3e..e3d6a2fb1 100644 --- a/scripts/remove_unnecessary_versions_of_repositories.py +++ b/scripts/remove_unnecessary_versions_of_repositories.py @@ -18,25 +18,29 @@ # REMEMBER: Do Pulp storoge backup before running the script in # production just in case anything goes wrong. # +import argparse import asyncio +import datetime +import logging import os +import re import sys -import argparse import typing -import re -import logging import urllib.parse -import datetime + +from fastapi_sqla import open_async_session from sqlalchemy.future import select + sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws import database from alws import models from alws.crud import build as build_crud -from alws.utils.pulp_client import PulpClient +from alws.dependencies import get_async_db_key from alws.errors import ( BuildError, DataNotFoundError, ) +from alws.utils.fastapi_sqla_setup import setup_all +from alws.utils.pulp_client import PulpClient def parse_args(): @@ -57,7 +61,7 @@ def parse_args(): # Get old unsigned, unreleased and unrelated builds async def get_old_unsigned_builds(logger: logging.Logger): - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: build_dependency = select( models.BuildDependency.c.build_dependency ).scalar_subquery() @@ -83,7 +87,7 @@ async def get_old_unsigned_builds(logger: logging.Logger): async def remove_builds(builds: list, logger: logging.Logger): for build in builds: - async with database.Session() as db: + async with open_async_session(key=get_async_db_key()) as db: try: logger.debug("Delete build with id: %s", build.id) await build_crud.remove_build_job(db, build.id) @@ -116,7 +120,7 @@ async def remove_unnecessary_versions( result = await pulp_client.request("PATCH", endpoint, json=params) logger.debug( "Create task to back retain repo versions to %s", - params["retain_repo_versions"] + params["retain_repo_versions"], ) logger.debug(result) @@ -165,6 +169,7 @@ async def main(): logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) + await setup_all() pulp_client = PulpClient(pulp_host, pulp_user, pulp_password) logger.info("Get old unsigned builds") builds = await get_old_unsigned_builds(logger) diff --git a/scripts/upload_repository_metadata.py b/scripts/upload_repository_metadata.py index 7e79ddeac..91f294083 100644 --- a/scripts/upload_repository_metadata.py +++ b/scripts/upload_repository_metadata.py @@ -6,9 +6,10 @@ from io import BytesIO from fastapi import UploadFile +from fastapi_sqla import open_async_session from syncer import sync -from alws.dependencies import get_db +from alws.dependencies import get_async_db_key from alws.utils.uploader import MetadataUploader @@ -43,7 +44,7 @@ async def main(): os.path.abspath(os.path.expanduser(args.comps_file)), 'rt' ) as f: comps_content = UploadFile(BytesIO(f.read().encode('utf-8'))) - async with asynccontextmanager(get_db)() as session: + async with open_async_session(get_async_db_key()) as session: uploader = MetadataUploader(session, args.repo_name) await uploader.process_uploaded_files( module_content, comps_content, dry_run=args.dry_run diff --git a/scripts/wrong_cas_hash_fixes.py b/scripts/wrong_cas_hash_fixes.py index 6c075403b..d20280c18 100644 --- a/scripts/wrong_cas_hash_fixes.py +++ b/scripts/wrong_cas_hash_fixes.py @@ -3,32 +3,37 @@ import sys import uuid +from fastapi_sqla import open_session from sqlalchemy import select - sys.path.append(os.path.dirname(os.path.dirname(__file__))) -from alws.database import PulpSession, SyncSession from alws.models import Build, BuildTask, BuildTaskArtifact from alws.pulp_models import CoreArtifact, CoreContentArtifact +from alws.utils.fastapi_sqla_setup import sync_setup def main(): + sync_setup() + first_subq = ( select(Build.id) # commit date that brings wrong cas_hashes - .where(Build.created_at >= datetime.datetime(2022, 10, 30)) - .scalar_subquery() + .where( + Build.created_at >= datetime.datetime(2022, 10, 30) + ).scalar_subquery() ) second_subq = ( - select(BuildTask.id).where(BuildTask.build_id.in_(first_subq)).scalar_subquery() + select(BuildTask.id) + .where(BuildTask.build_id.in_(first_subq)) + .scalar_subquery() ) query = select(BuildTaskArtifact).where( BuildTaskArtifact.build_task_id.in_(second_subq), BuildTaskArtifact.cas_hash.is_not(None), BuildTaskArtifact.type == "rpm", ) - with SyncSession() as session, PulpSession() as pulp_session, session.begin(): + with open_session() as session, open_session(key="pulp") as pulp_session: alma_artifacts_mapping = {} for artifact in session.execute(query).scalars().all(): key = uuid.UUID(artifact.href.split("/")[-2]) @@ -53,7 +58,6 @@ def main(): if alma_artifact.cas_hash == pulp_artifact.sha256: continue alma_artifact.cas_hash = pulp_artifact.sha256 - session.commit() if __name__ == "__main__": diff --git a/tests/fixtures/builds.py b/tests/fixtures/builds.py index 498d49b16..fb9981153 100644 --- a/tests/fixtures/builds.py +++ b/tests/fixtures/builds.py @@ -2,8 +2,10 @@ import typing import pytest +from fastapi_sqla import open_async_session from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.session import Session from alws.crud.build import create_build, get_builds from alws.dramatiq.build import _start_build @@ -948,66 +950,76 @@ def build_payload() -> typing.Dict[str, typing.Any]: @pytest.mark.anyio @pytest.fixture async def modular_build( - session: AsyncSession, + async_session: AsyncSession, modular_build_payload: dict, ) -> typing.AsyncIterable[Build]: - yield await create_build( - session, + build = await create_build( + async_session, BuildCreate(**modular_build_payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() + yield build @pytest.mark.anyio @pytest.fixture async def virt_modular_build( - session: AsyncSession, + async_session: AsyncSession, virt_build_payload: dict, ) -> typing.AsyncIterable: - yield await create_build( - session, + build = await create_build( + async_session, BuildCreate(**virt_build_payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() + yield build @pytest.mark.anyio @pytest.fixture async def ruby_modular_build( - session: AsyncSession, + async_session: AsyncSession, ruby_build_payload: dict, ) -> typing.AsyncIterable: - yield await create_build( - session, + build = await create_build( + async_session, BuildCreate(**ruby_build_payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() + yield build @pytest.mark.anyio @pytest.fixture async def subversion_modular_build( - session: AsyncSession, + async_session: AsyncSession, subversion_build_payload: dict, ) -> typing.AsyncIterable: - yield await create_build( - session, + build = await create_build( + async_session, BuildCreate(**subversion_build_payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() + yield build @pytest.mark.anyio @pytest.fixture async def llvm_modular_build( - session: AsyncSession, + async_session: AsyncSession, llvm_build_payload: dict, ) -> typing.AsyncIterable: - yield await create_build( - session, + build = await create_build( + async_session, BuildCreate(**llvm_build_payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() + yield build @pytest.mark.anyio @@ -1015,20 +1027,22 @@ async def llvm_modular_build( async def regular_build( base_platform, base_product, - session: AsyncSession, + async_session: AsyncSession, build_payload: dict, ) -> typing.AsyncIterable[Build]: - yield await create_build( - session, + build = await create_build( + async_session, BuildCreate(**build_payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() + yield build @pytest.mark.anyio @pytest.fixture async def regular_build_with_user_product( - session: AsyncSession, + async_session: AsyncSession, build_payload: dict, create_build_rpm_repo, create_log_repo, @@ -1037,7 +1051,7 @@ async def regular_build_with_user_product( payload = copy.deepcopy(build_payload) user_product_id = ( ( - await session.execute( + await async_session.execute( select(Product.id).where(Product.is_community.is_(True)) ) ) @@ -1046,12 +1060,13 @@ async def regular_build_with_user_product( ) payload['product_id'] = user_product_id build = await create_build( - session, + async_session, BuildCreate(**payload), user_id=ADMIN_USER_ID, ) + await async_session.commit() await _start_build(build.id, BuildCreate(**payload)) - yield await get_builds(session, build_id=build.id) + yield await get_builds(async_session, build_id=build.id) @pytest.fixture @@ -1077,16 +1092,16 @@ async def func(arg, arg2): @pytest.mark.anyio @pytest.fixture async def build_for_release( - session: AsyncSession, + async_session: AsyncSession, regular_build: Build, ) -> typing.AsyncIterable[Build]: - yield await get_builds(session, build_id=regular_build.id) + yield await get_builds(async_session, build_id=regular_build.id) @pytest.mark.anyio @pytest.fixture async def modular_build_for_release( - session: AsyncSession, + async_session: AsyncSession, modular_build: Build, ) -> typing.AsyncIterable[Build]: - yield await get_builds(session, build_id=modular_build.id) + yield await get_builds(async_session, build_id=modular_build.id) diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 0ac99627b..092a53274 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -1,40 +1,41 @@ import os import typing from contextlib import asynccontextmanager +from unittest.mock import patch import pytest -from sqlalchemy import insert, select +from fastapi_sqla import open_async_session +from sqlalchemy import delete, insert, select from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm.session import sessionmaker from sqlalchemy.pool import NullPool from alws import models from alws.config import settings from alws.database import Base +from alws.dependencies import get_async_db_key +from alws.utils.fastapi_sqla_setup import setup_all from tests.constants import ADMIN_USER_ID, CUSTOM_USER_ID -engine = create_async_engine( - os.getenv('DATABASE_URL', settings.test_database_url), - poolclass=NullPool, - echo_pool=True, -) - -async def get_session(): - async with AsyncSession( - engine, - expire_on_commit=False, - ) as sess: - try: - yield sess - finally: - await sess.close() +@pytest.fixture +def async_session_factory(): + """Fastapi-sqla async_session_factory() fixture overload, disabling expire_on_commit.""" + return sessionmaker(class_=AsyncSession, expire_on_commit=False) @pytest.mark.anyio @pytest.fixture -async def session() -> typing.AsyncIterator[AsyncSession]: - async with asynccontextmanager(get_session)() as db_session: - yield db_session +async def async_session( + async_sqla_connection, + async_session_factory, + async_sqla_reflection, + # patch_new_engine +): + """Fastapi-sqla async_session() fixture overload.""" + session = async_session_factory(bind=async_sqla_connection) + yield session + await session.close() def get_user_data(): @@ -56,7 +57,7 @@ def get_user_data(): ] -async def create_user(data: dict): +async def create_user(async_session: AsyncSession, data: dict): data = { "id": data["id"], "username": data["username"], @@ -64,23 +65,69 @@ async def create_user(data: dict): "is_superuser": data["is_superuser"], "is_verified": data["is_verified"], } - async with asynccontextmanager(get_session)() as db_session: - user = await db_session.execute( - select(models.User).where(models.User.id == data["id"]), - ) - if user.scalars().first(): - return - await db_session.execute(insert(models.User).values(**data)) - await db_session.commit() + user = await async_session.execute( + select(models.User).where(models.User.id == data["id"]), + ) + if user.scalars().first(): + return + await async_session.execute(insert(models.User).values(**data)) + await async_session.commit() @pytest.mark.anyio @pytest.fixture(scope="module", autouse=True) async def create_tables(): + engine = create_async_engine( + os.getenv('DATABASE_URL', settings.fastapi_sqla__async__sqlalchemy_url), + poolclass=NullPool, + echo_pool=True, + ) async with engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) - for user_data in get_user_data(): - await create_user(user_data) + + await setup_all() + async with open_async_session(get_async_db_key()) as async_session: + for user_data in get_user_data(): + await create_user(async_session, user_data) yield + async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) + + +@pytest.fixture +def sqla_modules(): + from alws.models import ( # noqa + Build, + BuildTask, + ErrataRecord, + NewErrataRecord, + Platform, + SignKey, + SignTask, + Team, + TestRepository, + User, + UserAccessToken, + UserAction, + UserOauthAccount, + UserRole, + ) + + +@pytest.fixture(scope="session") +def db_url(): + """Fastapi-sqla fixture. Sync database url.""" + return settings.sqlalchemy_url + + +@pytest.fixture(scope="session") +def async_sqlalchemy_url(): + """Fastapi-sqla fixture. Async database url.""" + return settings.fastapi_sqla__async__sqlalchemy_url + + +@pytest.fixture(scope="session") +def alembic_ini_path(): + """Fastapi-sqla fixture. Path for alembic.ini file.""" + return "./alws/alembic.ini" diff --git a/tests/fixtures/dramatiq.py b/tests/fixtures/dramatiq.py index 667f075d1..80f35428c 100644 --- a/tests/fixtures/dramatiq.py +++ b/tests/fixtures/dramatiq.py @@ -136,7 +136,7 @@ def prepare_build_done_payload( @pytest.mark.anyio @pytest.fixture async def build_done( - session: AsyncSession, + async_session: AsyncSession, regular_build: Build, start_build, create_entity, @@ -144,11 +144,11 @@ async def build_done( mock_get_pulp_packages, get_packages_info_from_pulp, ): - build = await get_builds(db=session, build_id=regular_build.id) - await session.close() + build = await get_builds(db=async_session, build_id=regular_build.id) + await async_session.close() for build_task in build.tasks: await safe_build_done( - session, + async_session, BuildDone( **prepare_build_done_payload( build_task.id, @@ -159,17 +159,19 @@ async def build_done( ) ), ) - build = await get_builds(db=session, build_id=regular_build.id) + await async_session.commit() + build = await get_builds(db=async_session, build_id=regular_build.id) for build_task in build.tasks: assert build_task.status == BuildTaskStatus.COMPLETED - await session.close() - await test.create_test_tasks_for_build_id(session, build.id) + await async_session.close() + await test.create_test_tasks_for_build_id(async_session, build.id) + await async_session.commit() @pytest.mark.anyio @pytest.fixture async def modular_build_done( - session: AsyncSession, + async_session: AsyncSession, modular_build: Build, start_modular_build, create_entity, @@ -177,11 +179,11 @@ async def modular_build_done( get_repo_modules_yaml, get_repo_modules, ): - build = await get_builds(db=session, build_id=modular_build.id) - await session.close() + build = await get_builds(db=async_session, build_id=modular_build.id) + await async_session.close() for build_task in build.tasks: await safe_build_done( - session, + async_session, BuildDone( **prepare_build_done_payload( build_task.id, @@ -192,12 +194,13 @@ async def modular_build_done( ) ), ) + await async_session.commit() @pytest.mark.anyio @pytest.fixture async def virt_build_done( - session: AsyncSession, + async_session: AsyncSession, virt_modular_build: Build, modify_repository, start_modular_virt_build, @@ -206,8 +209,8 @@ async def virt_build_done( get_repo_virt_modules_yaml, get_repo_modules, ): - build = await get_builds(db=session, build_id=virt_modular_build.id) - await session.close() + build = await get_builds(db=async_session, build_id=virt_modular_build.id) + await async_session.close() for build_task in build.tasks: status = "done" packages = [] @@ -244,7 +247,7 @@ async def virt_build_done( status = "excluded" await safe_build_done( - session, + async_session, BuildDone( **prepare_build_done_payload( build_task.id, @@ -253,12 +256,13 @@ async def virt_build_done( ) ), ) + await async_session.commit() @pytest.mark.anyio @pytest.fixture async def ruby_build_done( - session: AsyncSession, + async_session: AsyncSession, ruby_modular_build: Build, modify_repository, start_modular_ruby_build, @@ -267,8 +271,8 @@ async def ruby_build_done( get_repo_ruby_modules_yaml, get_repo_modules, ): - build = await get_builds(db=session, build_id=ruby_modular_build.id) - await session.close() + build = await get_builds(db=async_session, build_id=ruby_modular_build.id) + await async_session.close() for build_task in build.tasks: packages = [ "ruby-3.1.2-141.module_el8.1.0+8+503f6fbd.src.rpm", @@ -288,15 +292,16 @@ async def ruby_build_done( "rubygem-pg-doc-3.3.7-141.module_el8.1.0+8+503f6fbd.noarch.rpm", ] await safe_build_done( - session, + async_session, BuildDone(**prepare_build_done_payload(build_task.id, packages)), ) + await async_session.commit() @pytest.mark.anyio @pytest.fixture async def subversion_build_done( - session: AsyncSession, + async_session: AsyncSession, subversion_modular_build: Build, modify_repository, start_modular_subversion_build, @@ -305,8 +310,10 @@ async def subversion_build_done( get_repo_subversion_modules_yaml, get_repo_modules, ): - build = await get_builds(db=session, build_id=subversion_modular_build.id) - await session.close() + build = await get_builds( + db=async_session, build_id=subversion_modular_build.id + ) + await async_session.close() for build_task in build.tasks: packages = [ "subversion-1.10.2-5.module_el8.6.0+3347+66c1e1d6.src.rpm", @@ -317,15 +324,16 @@ async def subversion_build_done( f"subversion-ruby-1.10.2-5.module_el8.6.0+3347+66c1e1d6.{build_task.arch}.rpm", ] await safe_build_done( - session, + async_session, BuildDone(**prepare_build_done_payload(build_task.id, packages)), ) + await async_session.commit() @pytest.mark.anyio @pytest.fixture async def llvm_build_done( - session: AsyncSession, + async_session: AsyncSession, llvm_modular_build: Build, modify_repository, start_modular_llvm_build, @@ -334,8 +342,8 @@ async def llvm_build_done( get_repo_llvm_modules_yaml, get_repo_modules, ): - build = await get_builds(db=session, build_id=llvm_modular_build.id) - await session.close() + build = await get_builds(db=async_session, build_id=llvm_modular_build.id) + await async_session.close() for build_task in build.tasks: packages = [] if "python" in build_task.ref.url: @@ -349,6 +357,7 @@ async def llvm_build_done( f"llvm-13.0.1-1.module+el8.6.0+14118+d530a951.{build_task.arch}.rpm", ] await safe_build_done( - session, + async_session, BuildDone(**prepare_build_done_payload(build_task.id, packages)), ) + await async_session.commit() diff --git a/tests/fixtures/errata.py b/tests/fixtures/errata.py index 3fa0d3592..7345e24ce 100644 --- a/tests/fixtures/errata.py +++ b/tests/fixtures/errata.py @@ -41,21 +41,19 @@ def errata_create_payload(request) -> typing.Dict[str, typing.Any]: "objects": None, "states": None, "variables": None, - "references": [ - { - "href": f"https://access.redhat.com/errata/{orig_id}", - "ref_id": orig_id, - "ref_type": "rhsa", - "title": orig_id, - "cve": { - "id": "CVE-2022-21618", - "cvss3": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:N", - "cwe": "CWE-120", - "impact": "Moderate", - "public": "2022-10-18T20:00:00Z", - }, - } - ], + "references": [{ + "href": f"https://access.redhat.com/errata/{orig_id}", + "ref_id": orig_id, + "ref_type": "rhsa", + "title": orig_id, + "cve": { + "id": "CVE-2022-21618", + "cvss3": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:N", + "cwe": "CWE-120", + "impact": "Moderate", + "public": "2022-10-18T20:00:00Z", + }, + }], "packages": [ { "name": "usbguard", @@ -107,13 +105,16 @@ def func(*args, **kwargs): @pytest.mark.anyio @pytest.fixture async def create_errata( - session: AsyncSession, + async_session: AsyncSession, errata_create_payload: typing.Dict[str, typing.Any], ): await create_errata_record( - session, + async_session, BaseErrataRecord(**errata_create_payload), ) + await async_session.commit() + yield + await async_session.rollback() @pytest.fixture @@ -135,497 +136,495 @@ def pulp_updateinfos(): "release": "0", "rights": "Copyright 2022 AlmaLinux OS", "pushcount": "1", - "pkglist": [ - { - "name": "almalinux-8-for-x86_64-appstream-rpms__8_7_default", - "shortname": "almalinux-8-for-x86_64-appstream-rpms__8_7_default", - "module": { + "pkglist": [{ + "name": "almalinux-8-for-x86_64-appstream-rpms__8_7_default", + "shortname": "almalinux-8-for-x86_64-appstream-rpms__8_7_default", + "module": { + "arch": "x86_64", + "name": "container-tools", + "stream": "rhel8", + "context": "20125149", + "version": 8070020221108190354, + }, + "packages": [ + { "arch": "x86_64", - "name": "container-tools", - "stream": "rhel8", - "context": "20125149", - "version": 8070020221108190354, - }, - "packages": [ - { - "arch": "x86_64", - "epoch": "0", - "filename": "crun-1.5-1.module_el8.6.0+3336+00d107d5.x86_64.rpm", - "name": "crun", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.6.0+3336+00d107d5", - "src": "crun-1.5-1.module_el8.7.0+3344+5bcd850f.src.rpm", - "sum": "887feb275fc7f5e6453802e86b44d283620ed4dd25d5f92ff3fc247d7a646131", - "sum_type": "sha256", - "version": "1.5", - }, - { - "arch": "noarch", - "epoch": "2", - "filename": "container-selinux-2.189.0-1.module_el8.7.0+3407+95aa0ca9.noarch.rpm", - "name": "container-selinux", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3407+95aa0ca9", - "src": "container-selinux-2.189.0-1.module_el8.7.0+3344+5bcd850f.src.rpm", - "sum": "3f4c1c731d4bd48fd9f34818a4a62d0bec71212b3c6b9a91bb84cc8e3eab2bb0", - "sum_type": "sha256", - "version": "2.189.0", - }, - { - "arch": "noarch", - "epoch": "0", - "filename": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "udica", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3344+484dae7b", - "src": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "1868b279d5d378ca40e974fd5a2ae4b89801acc2fdee204e45e830f79c97f742", - "sum_type": "sha256", - "version": "0.2.6", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "toolbox-tests-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.x86_64.rpm", - "name": "toolbox-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "0.6.module_el8.6.0+3070+1510fbd1", - "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.src.rpm", - "sum": "df0f3e5589cb03efc3ae680492d348c86630b7784587bfaf7650c150a0af873d", - "sum_type": "sha256", - "version": "0.0.99.3", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "toolbox-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.x86_64.rpm", - "name": "toolbox", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "0.6.module_el8.6.0+3070+1510fbd1", - "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.src.rpm", - "sum": "399a604cd826021dad30ca36bfc8559dbe8020ae86f23dc28ef6fbd5cf9d2eb7", - "sum_type": "sha256", - "version": "0.0.99.3", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "slirp4netns-1.2.0-2.module_el8.6.0+3070+1510fbd1.x86_64.rpm", - "name": "slirp4netns", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "2.module_el8.6.0+3070+1510fbd1", - "src": "slirp4netns-1.2.0-2.module_el8.6.0+3070+1510fbd1.src.rpm", - "sum": "52d1d9b87f954bc58c4dd85f64aa89d481f797e50b733d71de653c77cb7f1362", - "sum_type": "sha256", - "version": "1.2.0", - }, - { - "arch": "x86_64", - "epoch": "2", - "filename": "skopeo-tests-1.9.3-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "skopeo-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "615d441934cb88946d5eb112d7ed6af989040dcf4d4556756fa0d26e7d1915c9", - "sum_type": "sha256", - "version": "1.9.3", - }, - { - "arch": "x86_64", - "epoch": "2", - "filename": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "skopeo", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "2853fdb7dca73b9eada3617fc089d05e896a1b3a1b30d57c9b394a0312a265d8", - "sum_type": "sha256", - "version": "1.9.3", - }, - { - "arch": "x86_64", - "epoch": "1", - "filename": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "runc", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "a396c3d8f9a60fba334339601131dabf5883659f2d235cb7f395ef7315e7dd27", - "sum_type": "sha256", - "version": "1.1.4", - }, - { - "arch": "noarch", - "epoch": "0", - "filename": "python3-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "python3-podman", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "python-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "c0d14ac37714db0b672f6aae7f687f1a9f478e993bf56968011897a4b4493911", - "sum_type": "sha256", - "version": "4.2.1", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "python3-criu-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", - "name": "python3-criu", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+2751+06427ca3", - "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "f08f386c12dcfff2595514fbf41e2489520615bbdd59ad1568ca47a15d49f23b", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "podman-tests-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "podman-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "abb3d2983848324d712c942e5b19d2a14db1fabae0b889075bf38fc6b10a1a6b", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "podman-remote-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "podman-remote", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "f8ded2321277b7b6ca19be3286c3a8ce9c3989c96146f06b544056bf00507ea8", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "podman-plugins-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "podman-plugins", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "a81b73330847ddf8944e78b5753aa459355a6562b764131728d7c2f232e0c538", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "podman-gvproxy-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "podman-gvproxy", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "7b7e1d7dbbb427169ebd58c9681e224b433ece44cef4ddff925983cb3923e6d7", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "noarch", - "epoch": "3", - "filename": "podman-docker-4.2.0-4.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "podman-docker", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "91c0f8ae74773578b71f55a980354fa51283c2276fcab65254744c010ee790bd", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "podman-catatonit-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "podman-catatonit", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "8030e9d0197d08c80124087d43b863df97998c998e0ad1176a646c17a0b778f7", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "podman", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "daf283359ee1a4676a969a1afbc4894d72be5b071be12c99fb6185da90c74cff", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.x86_64.rpm", - "name": "oci-seccomp-bpf-hook", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.6.0+3336+00d107d5", - "src": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.src.rpm", - "sum": "6bc0e6f1429d6297d3b5fd74e38a6ccee11b9dec5b207549352f7431bdf6f898", - "sum_type": "sha256", - "version": "1.2.6", - }, - { - "arch": "x86_64", - "epoch": "2", - "filename": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "netavark", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "7.module_el8.7.0+3344+484dae7b", - "src": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "82c889b9fbd4bec98c5f1c5d06de8e4f964354c5a5ab7503d5a3a321acbfe9d9", - "sum_type": "sha256", - "version": "1.1.0", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "libslirp-devel-4.4.0-1.module_el8.6.0+2877+8e437bf5.x86_64.rpm", - "name": "libslirp-devel", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.6.0+2877+8e437bf5", - "src": "libslirp-4.4.0-1.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "2bda7ff20959fc2c6a059846f63836c69a3871794cd7b5154866ecc6a4545b0c", - "sum_type": "sha256", - "version": "4.4.0", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "libslirp-4.4.0-1.module_el8.6.0+2877+8e437bf5.x86_64.rpm", - "name": "libslirp", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.6.0+2877+8e437bf5", - "src": "libslirp-4.4.0-1.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "e79d04839688384f66c8053a605f5b73e43b256bdb77d4027031ebc8909aacd3", - "sum_type": "sha256", - "version": "4.4.0", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "fuse-overlayfs-1.9-1.module_el8.6.0+3070+1510fbd1.x86_64.rpm", - "name": "fuse-overlayfs", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.6.0+3070+1510fbd1", - "src": "fuse-overlayfs-1.9-1.module_el8.7.0+3344+5bcd850f.src.rpm", - "sum": "140933a1038eb7af6360bfd7d7dd5d8d6b0be0e353454f6b9f41cf0b5693c468", - "sum_type": "sha256", - "version": "1.9", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "criu-libs-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", - "name": "criu-libs", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+2751+06427ca3", - "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "04ae80cdd853a5d49d6e648d4fcf0dc73bb25704d7afc24ac3af76b06786b4b1", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "criu-devel-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", - "name": "criu-devel", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+2751+06427ca3", - "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "b79b30367b06961e7514f46ea8b4aa82399fdadd2d9ba7a818d2481fe7677349", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "criu-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", - "name": "criu", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+2751+06427ca3", - "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "60d54bb32f236d7d7029a02278af814324dabcf0992f1ac61f47970e47ceb403", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "x86_64", - "epoch": "0", - "filename": "crit-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", - "name": "crit", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+2751+06427ca3", - "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", - "sum": "4425c86fd1af6b6dc0c8caafbac77caa852b68765d87a84e9eeaa1eeebbfa636", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "x86_64", - "epoch": "2", - "filename": "containers-common-1-43.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "containers-common", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "43.module_el8.7.0+3344+484dae7b", - "src": "containers-common-1-43.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "3c8879f6916c905dfa27219103e19872acbb6a8481c151f09f137a471c02a727", - "sum_type": "sha256", - "version": "1", - }, - { - "arch": "x86_64", - "epoch": "1", - "filename": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3070+1510fbd1.x86_64.rpm", - "name": "containernetworking-plugins", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+3070+1510fbd1", - "src": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3070+1510fbd1.src.rpm", - "sum": "2be734dd2f2fc4797b8a535a26d39fac03a435bae3be7e02e8ebd94e103fd2bf", - "sum_type": "sha256", - "version": "1.1.1", - }, - { - "arch": "x86_64", - "epoch": "3", - "filename": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "conmon", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "973a8421e6da1bd524e0beb00c76157146f8a772572c735b1c5b29bd04aa3af8", - "sum_type": "sha256", - "version": "2.1.4", - }, - { - "arch": "noarch", - "epoch": "0", - "filename": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "cockpit-podman", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "2118d185c4a6cdffcc184c92a7b2b93c5ff0bec02f37be22a685a97bcf783631", - "sum_type": "sha256", - "version": "53", - }, - { - "arch": "x86_64", - "epoch": "1", - "filename": "buildah-tests-1.27.2-2.module_el8.7.0+3348+f3135399.x86_64.rpm", - "name": "buildah-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "2.module_el8.7.0+3348+f3135399", - "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", - "sum": "da6d6f363dd0128bd9518712a915b1f1984b581433f02a5239cbffa2d3e5521b", - "sum_type": "sha256", - "version": "1.27.2", - }, - { - "arch": "x86_64", - "epoch": "1", - "filename": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.x86_64.rpm", - "name": "buildah", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "2.module_el8.7.0+3348+f3135399", - "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", - "sum": "c7828c855fbe97d201e9a97698ab06ed8c8ccbc24089af3c46936fb4a3e185aa", - "sum_type": "sha256", - "version": "1.27.2", - }, - { - "arch": "x86_64", - "epoch": "2", - "filename": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.x86_64.rpm", - "name": "aardvark-dns", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "5.module_el8.7.0+3344+484dae7b", - "src": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "3876ae39058fa4e3c5543f79626c092401d55e94dc57068c982c8c51f682ca67", - "sum_type": "sha256", - "version": "1.1.0", - }, - ], - } - ], + "epoch": "0", + "filename": "crun-1.5-1.module_el8.6.0+3336+00d107d5.x86_64.rpm", + "name": "crun", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.6.0+3336+00d107d5", + "src": "crun-1.5-1.module_el8.7.0+3344+5bcd850f.src.rpm", + "sum": "887feb275fc7f5e6453802e86b44d283620ed4dd25d5f92ff3fc247d7a646131", + "sum_type": "sha256", + "version": "1.5", + }, + { + "arch": "noarch", + "epoch": "2", + "filename": "container-selinux-2.189.0-1.module_el8.7.0+3407+95aa0ca9.noarch.rpm", + "name": "container-selinux", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3407+95aa0ca9", + "src": "container-selinux-2.189.0-1.module_el8.7.0+3344+5bcd850f.src.rpm", + "sum": "3f4c1c731d4bd48fd9f34818a4a62d0bec71212b3c6b9a91bb84cc8e3eab2bb0", + "sum_type": "sha256", + "version": "2.189.0", + }, + { + "arch": "noarch", + "epoch": "0", + "filename": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "udica", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3344+484dae7b", + "src": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "1868b279d5d378ca40e974fd5a2ae4b89801acc2fdee204e45e830f79c97f742", + "sum_type": "sha256", + "version": "0.2.6", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "toolbox-tests-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.x86_64.rpm", + "name": "toolbox-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "0.6.module_el8.6.0+3070+1510fbd1", + "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.src.rpm", + "sum": "df0f3e5589cb03efc3ae680492d348c86630b7784587bfaf7650c150a0af873d", + "sum_type": "sha256", + "version": "0.0.99.3", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "toolbox-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.x86_64.rpm", + "name": "toolbox", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "0.6.module_el8.6.0+3070+1510fbd1", + "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3070+1510fbd1.src.rpm", + "sum": "399a604cd826021dad30ca36bfc8559dbe8020ae86f23dc28ef6fbd5cf9d2eb7", + "sum_type": "sha256", + "version": "0.0.99.3", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "slirp4netns-1.2.0-2.module_el8.6.0+3070+1510fbd1.x86_64.rpm", + "name": "slirp4netns", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "2.module_el8.6.0+3070+1510fbd1", + "src": "slirp4netns-1.2.0-2.module_el8.6.0+3070+1510fbd1.src.rpm", + "sum": "52d1d9b87f954bc58c4dd85f64aa89d481f797e50b733d71de653c77cb7f1362", + "sum_type": "sha256", + "version": "1.2.0", + }, + { + "arch": "x86_64", + "epoch": "2", + "filename": "skopeo-tests-1.9.3-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "skopeo-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "615d441934cb88946d5eb112d7ed6af989040dcf4d4556756fa0d26e7d1915c9", + "sum_type": "sha256", + "version": "1.9.3", + }, + { + "arch": "x86_64", + "epoch": "2", + "filename": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "skopeo", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "2853fdb7dca73b9eada3617fc089d05e896a1b3a1b30d57c9b394a0312a265d8", + "sum_type": "sha256", + "version": "1.9.3", + }, + { + "arch": "x86_64", + "epoch": "1", + "filename": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "runc", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "a396c3d8f9a60fba334339601131dabf5883659f2d235cb7f395ef7315e7dd27", + "sum_type": "sha256", + "version": "1.1.4", + }, + { + "arch": "noarch", + "epoch": "0", + "filename": "python3-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "python3-podman", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "python-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "c0d14ac37714db0b672f6aae7f687f1a9f478e993bf56968011897a4b4493911", + "sum_type": "sha256", + "version": "4.2.1", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "python3-criu-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", + "name": "python3-criu", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+2751+06427ca3", + "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "f08f386c12dcfff2595514fbf41e2489520615bbdd59ad1568ca47a15d49f23b", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "podman-tests-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "podman-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "abb3d2983848324d712c942e5b19d2a14db1fabae0b889075bf38fc6b10a1a6b", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "podman-remote-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "podman-remote", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "f8ded2321277b7b6ca19be3286c3a8ce9c3989c96146f06b544056bf00507ea8", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "podman-plugins-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "podman-plugins", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "a81b73330847ddf8944e78b5753aa459355a6562b764131728d7c2f232e0c538", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "podman-gvproxy-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "podman-gvproxy", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "7b7e1d7dbbb427169ebd58c9681e224b433ece44cef4ddff925983cb3923e6d7", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "noarch", + "epoch": "3", + "filename": "podman-docker-4.2.0-4.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "podman-docker", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "91c0f8ae74773578b71f55a980354fa51283c2276fcab65254744c010ee790bd", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "podman-catatonit-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "podman-catatonit", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "8030e9d0197d08c80124087d43b863df97998c998e0ad1176a646c17a0b778f7", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "podman", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "daf283359ee1a4676a969a1afbc4894d72be5b071be12c99fb6185da90c74cff", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.x86_64.rpm", + "name": "oci-seccomp-bpf-hook", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.6.0+3336+00d107d5", + "src": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.src.rpm", + "sum": "6bc0e6f1429d6297d3b5fd74e38a6ccee11b9dec5b207549352f7431bdf6f898", + "sum_type": "sha256", + "version": "1.2.6", + }, + { + "arch": "x86_64", + "epoch": "2", + "filename": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "netavark", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "7.module_el8.7.0+3344+484dae7b", + "src": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "82c889b9fbd4bec98c5f1c5d06de8e4f964354c5a5ab7503d5a3a321acbfe9d9", + "sum_type": "sha256", + "version": "1.1.0", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "libslirp-devel-4.4.0-1.module_el8.6.0+2877+8e437bf5.x86_64.rpm", + "name": "libslirp-devel", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.6.0+2877+8e437bf5", + "src": "libslirp-4.4.0-1.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "2bda7ff20959fc2c6a059846f63836c69a3871794cd7b5154866ecc6a4545b0c", + "sum_type": "sha256", + "version": "4.4.0", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "libslirp-4.4.0-1.module_el8.6.0+2877+8e437bf5.x86_64.rpm", + "name": "libslirp", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.6.0+2877+8e437bf5", + "src": "libslirp-4.4.0-1.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "e79d04839688384f66c8053a605f5b73e43b256bdb77d4027031ebc8909aacd3", + "sum_type": "sha256", + "version": "4.4.0", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "fuse-overlayfs-1.9-1.module_el8.6.0+3070+1510fbd1.x86_64.rpm", + "name": "fuse-overlayfs", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.6.0+3070+1510fbd1", + "src": "fuse-overlayfs-1.9-1.module_el8.7.0+3344+5bcd850f.src.rpm", + "sum": "140933a1038eb7af6360bfd7d7dd5d8d6b0be0e353454f6b9f41cf0b5693c468", + "sum_type": "sha256", + "version": "1.9", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "criu-libs-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", + "name": "criu-libs", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+2751+06427ca3", + "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "04ae80cdd853a5d49d6e648d4fcf0dc73bb25704d7afc24ac3af76b06786b4b1", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "criu-devel-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", + "name": "criu-devel", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+2751+06427ca3", + "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "b79b30367b06961e7514f46ea8b4aa82399fdadd2d9ba7a818d2481fe7677349", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "criu-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", + "name": "criu", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+2751+06427ca3", + "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "60d54bb32f236d7d7029a02278af814324dabcf0992f1ac61f47970e47ceb403", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "x86_64", + "epoch": "0", + "filename": "crit-3.15-3.module_el8.6.0+2751+06427ca3.x86_64.rpm", + "name": "crit", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+2751+06427ca3", + "src": "criu-3.15-3.module_el8.6.0+2877+8e437bf5.src.rpm", + "sum": "4425c86fd1af6b6dc0c8caafbac77caa852b68765d87a84e9eeaa1eeebbfa636", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "x86_64", + "epoch": "2", + "filename": "containers-common-1-43.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "containers-common", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "43.module_el8.7.0+3344+484dae7b", + "src": "containers-common-1-43.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "3c8879f6916c905dfa27219103e19872acbb6a8481c151f09f137a471c02a727", + "sum_type": "sha256", + "version": "1", + }, + { + "arch": "x86_64", + "epoch": "1", + "filename": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3070+1510fbd1.x86_64.rpm", + "name": "containernetworking-plugins", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+3070+1510fbd1", + "src": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3070+1510fbd1.src.rpm", + "sum": "2be734dd2f2fc4797b8a535a26d39fac03a435bae3be7e02e8ebd94e103fd2bf", + "sum_type": "sha256", + "version": "1.1.1", + }, + { + "arch": "x86_64", + "epoch": "3", + "filename": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "conmon", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "973a8421e6da1bd524e0beb00c76157146f8a772572c735b1c5b29bd04aa3af8", + "sum_type": "sha256", + "version": "2.1.4", + }, + { + "arch": "noarch", + "epoch": "0", + "filename": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "cockpit-podman", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "2118d185c4a6cdffcc184c92a7b2b93c5ff0bec02f37be22a685a97bcf783631", + "sum_type": "sha256", + "version": "53", + }, + { + "arch": "x86_64", + "epoch": "1", + "filename": "buildah-tests-1.27.2-2.module_el8.7.0+3348+f3135399.x86_64.rpm", + "name": "buildah-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "2.module_el8.7.0+3348+f3135399", + "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", + "sum": "da6d6f363dd0128bd9518712a915b1f1984b581433f02a5239cbffa2d3e5521b", + "sum_type": "sha256", + "version": "1.27.2", + }, + { + "arch": "x86_64", + "epoch": "1", + "filename": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.x86_64.rpm", + "name": "buildah", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "2.module_el8.7.0+3348+f3135399", + "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", + "sum": "c7828c855fbe97d201e9a97698ab06ed8c8ccbc24089af3c46936fb4a3e185aa", + "sum_type": "sha256", + "version": "1.27.2", + }, + { + "arch": "x86_64", + "epoch": "2", + "filename": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.x86_64.rpm", + "name": "aardvark-dns", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "5.module_el8.7.0+3344+484dae7b", + "src": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "3876ae39058fa4e3c5543f79626c092401d55e94dc57068c982c8c51f682ca67", + "sum_type": "sha256", + "version": "1.1.0", + }, + ], + }], "references": [ { "href": "https://errata.almalinux.org/8/ALSA-2022-7822.html", @@ -682,497 +681,495 @@ def pulp_updateinfos(): "release": "0", "rights": "Copyright 2022 AlmaLinux OS", "pushcount": "1", - "pkglist": [ - { - "name": "almalinux-8-for-s390x-appstream-rpms__8_7_default", - "shortname": "almalinux-8-for-s390x-appstream-rpms__8_7_default", - "module": { + "pkglist": [{ + "name": "almalinux-8-for-s390x-appstream-rpms__8_7_default", + "shortname": "almalinux-8-for-s390x-appstream-rpms__8_7_default", + "module": { + "arch": "s390x", + "name": "container-tools", + "stream": "rhel8", + "context": "20125149", + "version": 8070020221108190354, + }, + "packages": [ + { + "arch": "noarch", + "epoch": "2", + "filename": "container-selinux-2.189.0-1.module_el8.7.0+3407+95aa0ca9.noarch.rpm", + "name": "container-selinux", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3407+95aa0ca9", + "src": "container-selinux-2.189.0-1.module_el8.7.0+3344+5bcd850f.src.rpm", + "sum": "3f4c1c731d4bd48fd9f34818a4a62d0bec71212b3c6b9a91bb84cc8e3eab2bb0", + "sum_type": "sha256", + "version": "2.189.0", + }, + { + "arch": "noarch", + "epoch": "0", + "filename": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "udica", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3344+484dae7b", + "src": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "1868b279d5d378ca40e974fd5a2ae4b89801acc2fdee204e45e830f79c97f742", + "sum_type": "sha256", + "version": "0.2.6", + }, + { "arch": "s390x", - "name": "container-tools", - "stream": "rhel8", - "context": "20125149", - "version": 8070020221108190354, - }, - "packages": [ - { - "arch": "noarch", - "epoch": "2", - "filename": "container-selinux-2.189.0-1.module_el8.7.0+3407+95aa0ca9.noarch.rpm", - "name": "container-selinux", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3407+95aa0ca9", - "src": "container-selinux-2.189.0-1.module_el8.7.0+3344+5bcd850f.src.rpm", - "sum": "3f4c1c731d4bd48fd9f34818a4a62d0bec71212b3c6b9a91bb84cc8e3eab2bb0", - "sum_type": "sha256", - "version": "2.189.0", - }, - { - "arch": "noarch", - "epoch": "0", - "filename": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "udica", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3344+484dae7b", - "src": "udica-0.2.6-3.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "1868b279d5d378ca40e974fd5a2ae4b89801acc2fdee204e45e830f79c97f742", - "sum_type": "sha256", - "version": "0.2.6", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "toolbox-tests-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.s390x.rpm", - "name": "toolbox-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "0.6.module_el8.6.0+3128+1510fbd1", - "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.src.rpm", - "sum": "915e61d58e88be3f80dae6065e51f522ac53c230c96ddfd9396c7b0a30d67e7e", - "sum_type": "sha256", - "version": "0.0.99.3", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "toolbox-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.s390x.rpm", - "name": "toolbox", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "0.6.module_el8.6.0+3128+1510fbd1", - "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.src.rpm", - "sum": "77f2a4f87aaf23c10877b58fdf4f10c94e59b11548f1085abf85419049fc43b7", - "sum_type": "sha256", - "version": "0.0.99.3", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "slirp4netns-1.2.0-2.module_el8.6.0+3128+1510fbd1.s390x.rpm", - "name": "slirp4netns", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "2.module_el8.6.0+3128+1510fbd1", - "src": "slirp4netns-1.2.0-2.module_el8.6.0+3128+1510fbd1.src.rpm", - "sum": "badb290a72081a08ee84125fad9e55ef35ae9f5afb6bb0b06e9aab628ab70bcd", - "sum_type": "sha256", - "version": "1.2.0", - }, - { - "arch": "s390x", - "epoch": "2", - "filename": "skopeo-tests-1.9.3-1.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "skopeo-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "fed44373bf544a56e7bbae05d14b5409351929323e650fae1de6d67069277604", - "sum_type": "sha256", - "version": "1.9.3", - }, - { - "arch": "s390x", - "epoch": "2", - "filename": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "skopeo", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "0c2e471825f9a1e8efdacdde36de9a55f8f1bcf2479b73f77128996f062eddaf", - "sum_type": "sha256", - "version": "1.9.3", - }, - { - "arch": "s390x", - "epoch": "1", - "filename": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "runc", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "533384a92eaf1feb65434ec5dbad4c9ba501383482642277aed4825598a0fa1e", - "sum_type": "sha256", - "version": "1.1.4", - }, - { - "arch": "noarch", - "epoch": "0", - "filename": "python3-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "python3-podman", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "python-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "c0d14ac37714db0b672f6aae7f687f1a9f478e993bf56968011897a4b4493911", - "sum_type": "sha256", - "version": "4.2.1", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "python3-criu-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "python3-criu", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3407+95aa0ca9", - "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "caf0c9cad2a7ce540719cef8131f61df93e037a1abaef0248183a670e9bf50fa", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "podman-tests-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "podman-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "8dda932cdace7da45a13da6d1447bd388be02a050801f6b7f0412f684dc8f043", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "podman-remote-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "podman-remote", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "9040d2767200e72276813ce701d492989246ebb9fe07dfe0f2b6816892c0cc47", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "podman-plugins-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "podman-plugins", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "c3eebcc8da7b41bedc9b5d63b55b765cac166bb787a48d714be04a812285ac70", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "podman-gvproxy-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "podman-gvproxy", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "b294e7104260f648a7dfd53fbab4ff6f3f56a9b7ae0b0188aee172b8b366d12c", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "noarch", - "epoch": "3", - "filename": "podman-docker-4.2.0-4.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "podman-docker", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "91c0f8ae74773578b71f55a980354fa51283c2276fcab65254744c010ee790bd", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "podman-catatonit-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "podman-catatonit", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "2a59e44dbdb99537610fc6de683f5423df21639b7c99b58fb436758f040cf36d", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "podman", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "4.module_el8.7.0+3344+484dae7b", - "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "93cf35a8f1b0c31bafbc52cb6e1dfca144946a16ff2662289d404267e6a1010e", - "sum_type": "sha256", - "version": "4.2.0", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.s390x.rpm", - "name": "oci-seccomp-bpf-hook", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.6.0+3336+00d107d5", - "src": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.src.rpm", - "sum": "ff517da5901cb54cae20b21c3197080bd049549d534105b70318edcb3cc8ae8b", - "sum_type": "sha256", - "version": "1.2.6", - }, - { - "arch": "s390x", - "epoch": "2", - "filename": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "netavark", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "7.module_el8.7.0+3344+484dae7b", - "src": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "310b3167cf4ec0675072162edd89e70d16737e8e2570b532f3b08ac17b488092", - "sum_type": "sha256", - "version": "1.1.0", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "libslirp-devel-4.4.0-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "libslirp-devel", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3407+95aa0ca9", - "src": "libslirp-4.4.0-1.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "fc314bbef4e4807252caff16892c2d3fe260b828899c933719c618b9dfb07443", - "sum_type": "sha256", - "version": "4.4.0", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "libslirp-4.4.0-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "libslirp", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3407+95aa0ca9", - "src": "libslirp-4.4.0-1.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "e3cf6bbda53346ef0244a58972b3a341818eb28b2d8ebf604953aed23ff228a0", - "sum_type": "sha256", - "version": "4.4.0", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "fuse-overlayfs-1.9-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "fuse-overlayfs", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3407+95aa0ca9", - "src": "fuse-overlayfs-1.9-1.module_el8.7.0+3344+5bcd850f.src.rpm", - "sum": "da0d165707a50722031a02e4fc918f80f5c2bf71dbcf6b3654f774ed41bbe933", - "sum_type": "sha256", - "version": "1.9", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "crun-1.5-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "crun", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3407+95aa0ca9", - "src": "crun-1.5-1.module_el8.7.0+3344+5bcd850f.src.rpm", - "sum": "5e8afc3b2aaaf1d20fc3f403370440335684c8a68ac9ca32c1647d7e4d63b5dc", - "sum_type": "sha256", - "version": "1.5", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "criu-libs-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "criu-libs", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3407+95aa0ca9", - "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "46a7f577f55a6e9a2da9d31ef02030ce687a7664516772ee9aca3d805ba91a90", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "criu-devel-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "criu-devel", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3407+95aa0ca9", - "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "ab919af780bfdb79571a0f29bfe39aa9e76b34f49d63b6529f15def1ce4e9d93", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "criu-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "criu", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3407+95aa0ca9", - "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "f4d794e8544cab3266b44e64daacabcbb9999da62623e4b15e2b328f63c7ffb5", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "s390x", - "epoch": "0", - "filename": "crit-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", - "name": "crit", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.7.0+3407+95aa0ca9", - "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", - "sum": "8d693fabb52e50d93dc0d9a39aa7fb39a096557a7c3c7d2b81ed9da97a3aa687", - "sum_type": "sha256", - "version": "3.15", - }, - { - "arch": "s390x", - "epoch": "2", - "filename": "containers-common-1-43.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "containers-common", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "43.module_el8.7.0+3344+484dae7b", - "src": "containers-common-1-43.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "2046fd821dd3f637a9cc925cce010df4c8419deb4ef0ad7cce5c02887dd5e7f9", - "sum_type": "sha256", - "version": "1", - }, - { - "arch": "s390x", - "epoch": "1", - "filename": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3128+1510fbd1.s390x.rpm", - "name": "containernetworking-plugins", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "3.module_el8.6.0+3128+1510fbd1", - "src": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3128+1510fbd1.src.rpm", - "sum": "f6b836a5fad0abe8a79ee5c244c7c9a5a82f84039fb5a5f919516de72620db4f", - "sum_type": "sha256", - "version": "1.1.1", - }, - { - "arch": "s390x", - "epoch": "3", - "filename": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "conmon", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "f836a926d7d85dd57d45fa60655aa528ecd057f2b4c949c607612fb07f34e539", - "sum_type": "sha256", - "version": "2.1.4", - }, - { - "arch": "noarch", - "epoch": "0", - "filename": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.noarch.rpm", - "name": "cockpit-podman", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "1.module_el8.7.0+3344+484dae7b", - "src": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "2118d185c4a6cdffcc184c92a7b2b93c5ff0bec02f37be22a685a97bcf783631", - "sum_type": "sha256", - "version": "53", - }, - { - "arch": "s390x", - "epoch": "1", - "filename": "buildah-tests-1.27.2-2.module_el8.7.0+3348+f3135399.s390x.rpm", - "name": "buildah-tests", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "2.module_el8.7.0+3348+f3135399", - "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", - "sum": "650ce2876b213da05d1cf74b32be005a06d174a9f4d44a8caa440627bd335fe9", - "sum_type": "sha256", - "version": "1.27.2", - }, - { - "arch": "s390x", - "epoch": "1", - "filename": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.s390x.rpm", - "name": "buildah", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "2.module_el8.7.0+3348+f3135399", - "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", - "sum": "45295a2b3348ec0a39e2e4b1cbca53fa566e51110284e0ef25c9642d441909b2", - "sum_type": "sha256", - "version": "1.27.2", - }, - { - "arch": "s390x", - "epoch": "2", - "filename": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.s390x.rpm", - "name": "aardvark-dns", - "reboot_suggested": False, - "relogin_suggested": False, - "restart_suggested": False, - "release": "5.module_el8.7.0+3344+484dae7b", - "src": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.src.rpm", - "sum": "a93b253e451d78ef381e620ee61043e0e6ae0cfde241e94fbf6facb57a27037f", - "sum_type": "sha256", - "version": "1.1.0", - }, - ], - } - ], + "epoch": "0", + "filename": "toolbox-tests-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.s390x.rpm", + "name": "toolbox-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "0.6.module_el8.6.0+3128+1510fbd1", + "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.src.rpm", + "sum": "915e61d58e88be3f80dae6065e51f522ac53c230c96ddfd9396c7b0a30d67e7e", + "sum_type": "sha256", + "version": "0.0.99.3", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "toolbox-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.s390x.rpm", + "name": "toolbox", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "0.6.module_el8.6.0+3128+1510fbd1", + "src": "toolbox-0.0.99.3-0.6.module_el8.6.0+3128+1510fbd1.src.rpm", + "sum": "77f2a4f87aaf23c10877b58fdf4f10c94e59b11548f1085abf85419049fc43b7", + "sum_type": "sha256", + "version": "0.0.99.3", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "slirp4netns-1.2.0-2.module_el8.6.0+3128+1510fbd1.s390x.rpm", + "name": "slirp4netns", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "2.module_el8.6.0+3128+1510fbd1", + "src": "slirp4netns-1.2.0-2.module_el8.6.0+3128+1510fbd1.src.rpm", + "sum": "badb290a72081a08ee84125fad9e55ef35ae9f5afb6bb0b06e9aab628ab70bcd", + "sum_type": "sha256", + "version": "1.2.0", + }, + { + "arch": "s390x", + "epoch": "2", + "filename": "skopeo-tests-1.9.3-1.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "skopeo-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "fed44373bf544a56e7bbae05d14b5409351929323e650fae1de6d67069277604", + "sum_type": "sha256", + "version": "1.9.3", + }, + { + "arch": "s390x", + "epoch": "2", + "filename": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "skopeo", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "skopeo-1.9.3-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "0c2e471825f9a1e8efdacdde36de9a55f8f1bcf2479b73f77128996f062eddaf", + "sum_type": "sha256", + "version": "1.9.3", + }, + { + "arch": "s390x", + "epoch": "1", + "filename": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "runc", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "runc-1.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "533384a92eaf1feb65434ec5dbad4c9ba501383482642277aed4825598a0fa1e", + "sum_type": "sha256", + "version": "1.1.4", + }, + { + "arch": "noarch", + "epoch": "0", + "filename": "python3-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "python3-podman", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "python-podman-4.2.1-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "c0d14ac37714db0b672f6aae7f687f1a9f478e993bf56968011897a4b4493911", + "sum_type": "sha256", + "version": "4.2.1", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "python3-criu-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "python3-criu", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3407+95aa0ca9", + "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "caf0c9cad2a7ce540719cef8131f61df93e037a1abaef0248183a670e9bf50fa", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "podman-tests-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "podman-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "8dda932cdace7da45a13da6d1447bd388be02a050801f6b7f0412f684dc8f043", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "podman-remote-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "podman-remote", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "9040d2767200e72276813ce701d492989246ebb9fe07dfe0f2b6816892c0cc47", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "podman-plugins-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "podman-plugins", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "c3eebcc8da7b41bedc9b5d63b55b765cac166bb787a48d714be04a812285ac70", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "podman-gvproxy-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "podman-gvproxy", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "b294e7104260f648a7dfd53fbab4ff6f3f56a9b7ae0b0188aee172b8b366d12c", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "noarch", + "epoch": "3", + "filename": "podman-docker-4.2.0-4.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "podman-docker", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "91c0f8ae74773578b71f55a980354fa51283c2276fcab65254744c010ee790bd", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "podman-catatonit-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "podman-catatonit", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "2a59e44dbdb99537610fc6de683f5423df21639b7c99b58fb436758f040cf36d", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "podman", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "4.module_el8.7.0+3344+484dae7b", + "src": "podman-4.2.0-4.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "93cf35a8f1b0c31bafbc52cb6e1dfca144946a16ff2662289d404267e6a1010e", + "sum_type": "sha256", + "version": "4.2.0", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.s390x.rpm", + "name": "oci-seccomp-bpf-hook", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.6.0+3336+00d107d5", + "src": "oci-seccomp-bpf-hook-1.2.6-1.module_el8.6.0+3336+00d107d5.src.rpm", + "sum": "ff517da5901cb54cae20b21c3197080bd049549d534105b70318edcb3cc8ae8b", + "sum_type": "sha256", + "version": "1.2.6", + }, + { + "arch": "s390x", + "epoch": "2", + "filename": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "netavark", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "7.module_el8.7.0+3344+484dae7b", + "src": "netavark-1.1.0-7.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "310b3167cf4ec0675072162edd89e70d16737e8e2570b532f3b08ac17b488092", + "sum_type": "sha256", + "version": "1.1.0", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "libslirp-devel-4.4.0-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "libslirp-devel", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3407+95aa0ca9", + "src": "libslirp-4.4.0-1.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "fc314bbef4e4807252caff16892c2d3fe260b828899c933719c618b9dfb07443", + "sum_type": "sha256", + "version": "4.4.0", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "libslirp-4.4.0-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "libslirp", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3407+95aa0ca9", + "src": "libslirp-4.4.0-1.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "e3cf6bbda53346ef0244a58972b3a341818eb28b2d8ebf604953aed23ff228a0", + "sum_type": "sha256", + "version": "4.4.0", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "fuse-overlayfs-1.9-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "fuse-overlayfs", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3407+95aa0ca9", + "src": "fuse-overlayfs-1.9-1.module_el8.7.0+3344+5bcd850f.src.rpm", + "sum": "da0d165707a50722031a02e4fc918f80f5c2bf71dbcf6b3654f774ed41bbe933", + "sum_type": "sha256", + "version": "1.9", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "crun-1.5-1.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "crun", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3407+95aa0ca9", + "src": "crun-1.5-1.module_el8.7.0+3344+5bcd850f.src.rpm", + "sum": "5e8afc3b2aaaf1d20fc3f403370440335684c8a68ac9ca32c1647d7e4d63b5dc", + "sum_type": "sha256", + "version": "1.5", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "criu-libs-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "criu-libs", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3407+95aa0ca9", + "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "46a7f577f55a6e9a2da9d31ef02030ce687a7664516772ee9aca3d805ba91a90", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "criu-devel-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "criu-devel", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3407+95aa0ca9", + "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "ab919af780bfdb79571a0f29bfe39aa9e76b34f49d63b6529f15def1ce4e9d93", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "criu-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "criu", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3407+95aa0ca9", + "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "f4d794e8544cab3266b44e64daacabcbb9999da62623e4b15e2b328f63c7ffb5", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "s390x", + "epoch": "0", + "filename": "crit-3.15-3.module_el8.7.0+3407+95aa0ca9.s390x.rpm", + "name": "crit", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.7.0+3407+95aa0ca9", + "src": "criu-3.15-3.module_el8.6.0+3137+d33c3efb.src.rpm", + "sum": "8d693fabb52e50d93dc0d9a39aa7fb39a096557a7c3c7d2b81ed9da97a3aa687", + "sum_type": "sha256", + "version": "3.15", + }, + { + "arch": "s390x", + "epoch": "2", + "filename": "containers-common-1-43.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "containers-common", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "43.module_el8.7.0+3344+484dae7b", + "src": "containers-common-1-43.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "2046fd821dd3f637a9cc925cce010df4c8419deb4ef0ad7cce5c02887dd5e7f9", + "sum_type": "sha256", + "version": "1", + }, + { + "arch": "s390x", + "epoch": "1", + "filename": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3128+1510fbd1.s390x.rpm", + "name": "containernetworking-plugins", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "3.module_el8.6.0+3128+1510fbd1", + "src": "containernetworking-plugins-1.1.1-3.module_el8.6.0+3128+1510fbd1.src.rpm", + "sum": "f6b836a5fad0abe8a79ee5c244c7c9a5a82f84039fb5a5f919516de72620db4f", + "sum_type": "sha256", + "version": "1.1.1", + }, + { + "arch": "s390x", + "epoch": "3", + "filename": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "conmon", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "conmon-2.1.4-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "f836a926d7d85dd57d45fa60655aa528ecd057f2b4c949c607612fb07f34e539", + "sum_type": "sha256", + "version": "2.1.4", + }, + { + "arch": "noarch", + "epoch": "0", + "filename": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.noarch.rpm", + "name": "cockpit-podman", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "1.module_el8.7.0+3344+484dae7b", + "src": "cockpit-podman-53-1.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "2118d185c4a6cdffcc184c92a7b2b93c5ff0bec02f37be22a685a97bcf783631", + "sum_type": "sha256", + "version": "53", + }, + { + "arch": "s390x", + "epoch": "1", + "filename": "buildah-tests-1.27.2-2.module_el8.7.0+3348+f3135399.s390x.rpm", + "name": "buildah-tests", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "2.module_el8.7.0+3348+f3135399", + "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", + "sum": "650ce2876b213da05d1cf74b32be005a06d174a9f4d44a8caa440627bd335fe9", + "sum_type": "sha256", + "version": "1.27.2", + }, + { + "arch": "s390x", + "epoch": "1", + "filename": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.s390x.rpm", + "name": "buildah", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "2.module_el8.7.0+3348+f3135399", + "src": "buildah-1.27.2-2.module_el8.7.0+3348+f3135399.src.rpm", + "sum": "45295a2b3348ec0a39e2e4b1cbca53fa566e51110284e0ef25c9642d441909b2", + "sum_type": "sha256", + "version": "1.27.2", + }, + { + "arch": "s390x", + "epoch": "2", + "filename": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.s390x.rpm", + "name": "aardvark-dns", + "reboot_suggested": False, + "relogin_suggested": False, + "restart_suggested": False, + "release": "5.module_el8.7.0+3344+484dae7b", + "src": "aardvark-dns-1.1.0-5.module_el8.7.0+3344+484dae7b.src.rpm", + "sum": "a93b253e451d78ef381e620ee61043e0e6ae0cfde241e94fbf6facb57a27037f", + "sum_type": "sha256", + "version": "1.1.0", + }, + ], + }], "references": [ { "href": "https://errata.almalinux.org/8/ALSA-2022-7822.html", diff --git a/tests/fixtures/platforms.py b/tests/fixtures/platforms.py index d13d91d7d..57f01bea2 100644 --- a/tests/fixtures/platforms.py +++ b/tests/fixtures/platforms.py @@ -13,7 +13,7 @@ @pytest.mark.anyio @pytest.fixture async def base_platform( - session: AsyncSession, + async_session: AsyncSession, ) -> AsyncIterable[models.Platform]: with open("reference_data/platforms.yaml", "rt") as file: loader = yaml.Loader(file) @@ -22,7 +22,7 @@ async def base_platform( schema["repos"] = [] platform = ( ( - await session.execute( + await async_session.execute( select(models.Platform).where( models.Platform.name == schema["name"], ) @@ -40,6 +40,6 @@ async def base_platform( **repository_schema.RepositoryCreate(**repo).model_dump() ) platform.repos.append(repository) - session.add(platform) - await session.commit() + async_session.add(platform) + await async_session.commit() yield platform diff --git a/tests/fixtures/products.py b/tests/fixtures/products.py index 33267bb71..4a61277a2 100644 --- a/tests/fixtures/products.py +++ b/tests/fixtures/products.py @@ -75,11 +75,11 @@ def user_product_create_payload(request) -> dict: @pytest.mark.anyio @pytest.fixture async def base_product( - session: AsyncSession, product_create_payload: dict, create_repo + async_session: AsyncSession, product_create_payload: dict, create_repo ) -> AsyncIterable[Product]: product = ( ( - await session.execute( + await async_session.execute( select(Product).where( Product.name == product_create_payload["name"], ), @@ -90,23 +90,24 @@ async def base_product( ) if not product: product = await create_product( - session, + async_session, ProductCreate(**product_create_payload), ) + await async_session.commit() yield product @pytest.mark.anyio @pytest.fixture async def user_product( - session: AsyncSession, + async_session: AsyncSession, user_product_create_payload: dict, create_repo, create_file_repository, ) -> AsyncIterable[Product]: product = ( ( - await session.execute( + await async_session.execute( select(Product).where( Product.name == user_product_create_payload["name"], ), @@ -117,9 +118,8 @@ async def user_product( ) if not product: product = await create_product( - session, + async_session, ProductCreate(**user_product_create_payload), ) - session.add(product) - await session.commit() + await async_session.commit() yield product diff --git a/tests/fixtures/sign_keys.py b/tests/fixtures/sign_keys.py index 0df831f98..d85d41ca6 100644 --- a/tests/fixtures/sign_keys.py +++ b/tests/fixtures/sign_keys.py @@ -1,6 +1,7 @@ import typing import pytest +from fastapi_sqla import open_async_session from sqlalchemy import delete, select from sqlalchemy.ext.asyncio import AsyncSession @@ -20,9 +21,11 @@ def basic_sign_key_payload() -> dict: } -async def __create_sign_key(session: AsyncSession, payload: dict) -> SignKey: - await create_sign_key(session, SignKeyCreate(**payload)) - sign_key_cursor = await session.execute( +async def __create_sign_key( + async_session: AsyncSession, payload: dict +) -> SignKey: + await create_sign_key(async_session, SignKeyCreate(**payload)) + sign_key_cursor = await async_session.execute( select(SignKey).where(SignKey.keyid == payload['keyid']) ) sign_key = sign_key_cursor.scalars().first() @@ -32,10 +35,11 @@ async def __create_sign_key(session: AsyncSession, payload: dict) -> SignKey: @pytest.mark.anyio @pytest.fixture async def sign_key( - session: AsyncSession, + async_session: AsyncSession, basic_sign_key_payload, ) -> typing.AsyncIterable[SignKey]: - sign_key = await __create_sign_key(session, basic_sign_key_payload) + sign_key = await __create_sign_key(async_session, basic_sign_key_payload) + await async_session.commit() yield sign_key - await session.execute(delete(SignKey)) - await session.commit() + await async_session.execute(delete(SignKey)) + await async_session.commit() diff --git a/tests/mock_classes.py b/tests/mock_classes.py index 19e9dc40f..c02993cae 100644 --- a/tests/mock_classes.py +++ b/tests/mock_classes.py @@ -7,10 +7,8 @@ from alws.app import app from alws.config import settings -from alws.dependencies import get_db from alws.utils import jwt_utils from tests.constants import ADMIN_USER_ID -from tests.fixtures.database import get_session @pytest.mark.anyio @@ -59,13 +57,10 @@ def generate_jwt_token( @classmethod def setup_class(cls): - app.dependency_overrides[get_db] = get_session cls.token = cls.generate_jwt_token(str(cls.user_id)) - cls.headers.update( - { - "Authorization": f"Bearer {cls.token}", - } - ) + cls.headers.update({ + "Authorization": f"Bearer {cls.token}", + }) def get_assertion_message( self, diff --git a/tests/test-vars.env b/tests/test-vars.env index ab3a0dc53..00fb538ba 100644 --- a/tests/test-vars.env +++ b/tests/test-vars.env @@ -7,6 +7,8 @@ FASTAPI_SQLA__ASYNC__SQLALCHEMY_URL="postgresql+asyncpg://postgres:password@test FASTAPI_SQLA__PULP__SQLALCHEMY_POOL_PRE_PING=True FASTAPI_SQLA__PULP__SQLALCHEMY_POOL_RECYCLE=3600 FASTAPI_SQLA__PULP__SQLALCHEMY_URL="postgresql+psycopg2://postgres:password@test_db/test-almalinux-bs" +FASTAPI_SQLA__PULP_ASYNC__SQLALCHEMY_URL="postgresql+asyncpg://postgres:password@test_db/test-almalinux-bs" +FASTAPI_SQLA__PULP_ASYNC__SQLALCHEMY_ECHO_POOL=True GITHUB_CLIENT="secret" GITHUB_CLIENT_SECRET="secret" JWT_SECRET="secret" diff --git a/tests/test_api/test_builds.py b/tests/test_api/test_builds.py index c73101b88..6232c3733 100644 --- a/tests/test_api/test_builds.py +++ b/tests/test_api/test_builds.py @@ -83,6 +83,7 @@ async def test_build_create_without_permissions( assert response.status_code == self.status_codes.HTTP_403_FORBIDDEN self.headers["Authorization"] = old_token + # @pytest.mark.skip(reason="Checking the reason for freezing tests") async def test_build_delete( self, create_errata, @@ -140,9 +141,7 @@ async def test_multilib_virt( module_file.read_text() ) for build_module in build_index.iter_modules(): - artifacts = modules_artifacts[ - f"{build_module.name}:{arch}" - ] + artifacts = modules_artifacts[f"{build_module.name}:{arch}"] assert build_module.get_rpm_artifacts() == artifacts async def test_multilib_ruby( @@ -182,9 +181,7 @@ async def test_multilib_ruby( module_file.read_text() ) for build_module in build_index.iter_modules(): - artifacts = modules_artifacts[ - f"{build_module.name}:{arch}" - ] + artifacts = modules_artifacts[f"{build_module.name}:{arch}"] assert build_module.get_rpm_artifacts() == artifacts async def test_multilib_subversion( @@ -223,9 +220,7 @@ async def test_multilib_subversion( module_file.read_text() ) for build_module in build_index.iter_modules(): - artifacts = modules_artifacts[ - f"{build_module.name}:{arch}" - ] + artifacts = modules_artifacts[f"{build_module.name}:{arch}"] assert build_module.get_rpm_artifacts() == artifacts async def test_multilib_llvm( diff --git a/tests/test_api/test_products.py b/tests/test_api/test_products.py index 369d8921d..f592aa0f3 100644 --- a/tests/test_api/test_products.py +++ b/tests/test_api/test_products.py @@ -33,7 +33,7 @@ async def test_add_to_product( self, regular_build: Build, user_product: Product, - session: AsyncSession, + async_session: AsyncSession, ): product_id = user_product.id product_name = user_product.name @@ -52,9 +52,10 @@ async def test_add_to_product( # In case there's an error in add_to_product, it will be raised and # the test will be reported as failed. await _perform_product_modification(build_id, product_id, "add") + await async_session.commit() db_product = ( ( - await session.execute( + await async_session.execute( select(Product) .where(Product.id == product_id) .options(selectinload(Product.builds)) @@ -69,7 +70,7 @@ async def test_add_to_product( async def test_remove_from_product( self, user_product: Product, - session: AsyncSession, + async_session: AsyncSession, ): product_id = user_product.id product_name = user_product.name @@ -86,7 +87,7 @@ async def test_remove_from_product( await _perform_product_modification(build_id, product_id, "remove") db_product = ( ( - await session.execute( + await async_session.execute( select(Product) .where(Product.id == product_id) .options(selectinload(Product.builds)) @@ -101,7 +102,7 @@ async def test_remove_from_product( async def test_user_product_remove_when_build_is_running( self, - session: AsyncSession, + async_session: AsyncSession, user_product: Product, regular_build_with_user_product: Build, ): @@ -112,9 +113,9 @@ async def test_user_product_remove_when_build_is_running( ), response.text # we need to delete active build for further product deletion for task in regular_build_with_user_product.tasks: - await session.delete(task) - await session.delete(regular_build_with_user_product) - await session.commit() + await async_session.delete(task) + await async_session.delete(regular_build_with_user_product) + await async_session.commit() async def test_user_product_remove( self, diff --git a/tests/test_api/test_releases.py b/tests/test_api/test_releases.py index 97a1e4052..185351a03 100644 --- a/tests/test_api/test_releases.py +++ b/tests/test_api/test_releases.py @@ -71,7 +71,7 @@ async def test_create_community_release( async def test_commit_release( self, - session: AsyncSession, + async_session: AsyncSession, base_product: models.Product, disable_packages_check_in_prod_repos, disable_sign_verify, @@ -95,7 +95,8 @@ async def test_commit_release( ) message = f"Cannot commit release:\n{response.text}" assert response.status_code == self.status_codes.HTTP_200_OK, message - await commit_release(session, release_id, self.user_id) + await commit_release(async_session, release_id, self.user_id) + await async_session.commit() response = await self.make_request( "get", f"/api/v1/releases/{release_id}/", @@ -106,7 +107,7 @@ async def test_commit_release( async def test_commit_community_release( self, - session: AsyncSession, + async_session: AsyncSession, user_product: models.Product, modify_repository, create_rpm_publication, @@ -131,7 +132,8 @@ async def test_commit_community_release( ) message = f"Cannot commit release:\n{response.text}" assert response.status_code == self.status_codes.HTTP_200_OK, message - await commit_release(session, release_id, self.user_id) + await commit_release(async_session, release_id, self.user_id) + await async_session.commit() response = await self.make_request( "get", f"/api/v1/releases/{release_id}/", @@ -158,7 +160,7 @@ async def test_get_release( async def test_revert_release( self, - session: AsyncSession, + async_session: AsyncSession, base_product: models.Product, modify_repository, create_rpm_publication, @@ -174,7 +176,8 @@ async def test_revert_release( for row in response.json() if row["product"]["id"] == base_product.id )["id"] - await revert_release(session, release_id, self.user_id) + await revert_release(async_session, release_id, self.user_id) + await async_session.commit() response = await self.make_request( "get", f"/api/v1/releases/{release_id}/", @@ -184,7 +187,7 @@ async def test_revert_release( assert release["status"] == ReleaseStatus.REVERTED, last_log builds = ( ( - await session.execute( + await async_session.execute( select(models.Build).where( models.Build.release_id == release_id, ), @@ -198,7 +201,7 @@ async def test_revert_release( pkg_dict.get("package", {}).get("artifact_href", "") for pkg_dict in release["plan"].get("packages", []) ] - errata_pkgs = await session.execute( + errata_pkgs = await async_session.execute( select(models.NewErrataToALBSPackage).where( models.NewErrataToALBSPackage.status == ErrataPackageStatus.released, @@ -221,7 +224,7 @@ async def test_revert_release( async def test_revert_community_release( self, - session: AsyncSession, + async_session: AsyncSession, user_product: models.Product, modify_repository, create_rpm_publication, @@ -237,7 +240,8 @@ async def test_revert_community_release( for row in response.json() if row["product"]["id"] == user_product.id )["id"] - await revert_release(session, release_id, self.user_id) + await revert_release(async_session, release_id, self.user_id) + await async_session.commit() response = await self.make_request( "get", f"/api/v1/releases/{release_id}/", @@ -247,7 +251,7 @@ async def test_revert_community_release( assert release["status"] == ReleaseStatus.REVERTED, last_log builds = ( ( - await session.execute( + await async_session.execute( select(models.Build).where( models.Build.release_id == release_id, ), diff --git a/tests/test_api/test_uploads.py b/tests/test_api/test_uploads.py index 6b2d25492..2b27bb9af 100644 --- a/tests/test_api/test_uploads.py +++ b/tests/test_api/test_uploads.py @@ -43,7 +43,7 @@ async def test_module_upload_prod_repo( async def test_module_upload_build_repo( self, - session: AsyncSession, + async_session: AsyncSession, modules_yaml: bytes, base_platform, base_product, @@ -61,7 +61,7 @@ async def test_module_upload_build_repo( rpm_modules = ( ( - await session.execute( + await async_session.execute( select(RpmModule).where( RpmModule.id.in_( select(BuildTask.rpm_module_id) diff --git a/tests/test_unit/test_products.py b/tests/test_unit/test_products.py index 479382242..54719edcf 100644 --- a/tests/test_unit/test_products.py +++ b/tests/test_unit/test_products.py @@ -143,13 +143,11 @@ def _create_build_task_mock(task: dict): ] build = { - "platforms": [ - { - "name": "AlmaLinux-8", - "arch_list": ["x86_64", "i686"], - "parallel_mode_enabled": False, - } - ], + "platforms": [{ + "name": "AlmaLinux-8", + "arch_list": ["x86_64", "i686"], + "parallel_mode_enabled": False, + }], "tasks": [ {"id": 1, "url": "https://build.task.ref#1"}, {"id": 2, "url": "https://build.task.ref#2"}, @@ -189,7 +187,7 @@ async def test_group_tasks_by_ref_id(self, build_tasks, expected): @pytest.fixture() async def create_build_and_artifacts( self, - session: AsyncSession, + async_session: AsyncSession, base_platform, base_product, create_build_rpm_repo, @@ -197,13 +195,14 @@ async def create_build_and_artifacts( modify_repository, ) -> Build: created_build = await create_build( - session, BuildCreate(**build), user_id=ADMIN_USER_ID + async_session, BuildCreate(**build), user_id=ADMIN_USER_ID ) + await async_session.commit() await _start_build(created_build.id, BuildCreate(**build)) db_build = ( ( - await session.execute( + await async_session.execute( select(Build) .where(Build.id == created_build.id) .options(selectinload(Build.tasks)) @@ -215,18 +214,20 @@ async def create_build_and_artifacts( for task, artifact in zip(db_build.tasks, build_task_artifacts): artifact["build_task_id"] = task.id - await session.execute(insert(BuildTaskArtifact).values(**artifact)) - await session.commit() + await async_session.execute( + insert(BuildTaskArtifact).values(**artifact) + ) + await async_session.commit() return db_build @pytest.fixture async def tasks_and_expected_output( - self, session: AsyncSession, create_build_and_artifacts, request + self, async_session: AsyncSession, create_build_and_artifacts, request ) -> Tuple[List[BuildTask], List[str]]: db_build = ( ( - await session.execute( + await async_session.execute( select(Build) .where(Build.id == create_build_and_artifacts.id) .options(selectinload(Build.tasks)) @@ -247,24 +248,20 @@ async def tasks_and_expected_output( ) elif request.param == "first_ref_one_task_completed": db_build.tasks[0].status = BuildTaskStatus.COMPLETED - expected_output = set( - [ - artifact["href"] - for artifact in build_task_artifacts - if not artifact["href"].endswith("ae8b7e237275/") - ] - ) + expected_output = set([ + artifact["href"] + for artifact in build_task_artifacts + if not artifact["href"].endswith("ae8b7e237275/") + ]) elif request.param == "first_and_second_refs_one_task_completed": db_build.tasks[0].status = BuildTaskStatus.COMPLETED db_build.tasks[2].status = BuildTaskStatus.COMPLETED - expected_output = set( - [ - artifact["href"] - for artifact in build_task_artifacts - if not artifact["href"].endswith("ae8b7e237275/") - and not artifact["href"].endswith("4c193ab7f688/") - ] - ) + expected_output = set([ + artifact["href"] + for artifact in build_task_artifacts + if not artifact["href"].endswith("ae8b7e237275/") + and not artifact["href"].endswith("4c193ab7f688/") + ]) return db_build.tasks, expected_output @pytest.mark.parametrize(