From 624c181dcd77b1c473d61db55280e509d0a1caf7 Mon Sep 17 00:00:00 2001 From: powersaudrey25 Date: Wed, 7 Feb 2024 20:01:14 -0600 Subject: [PATCH 1/7] ENH: adding ruff --- .pre-commit-config.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8c682da53..fe3c4e80c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,3 +12,11 @@ repos: rev: 22.10.0 hooks: - id: black +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.2.1 + hooks: + # Run the linter. + - id: ruff + args: [ --fix ] + From 1656a6a339e64dd89a81a6d6afd9d256cb0d3423 Mon Sep 17 00:00:00 2001 From: Ivan Johnson Date: Wed, 7 Feb 2024 20:09:01 -0600 Subject: [PATCH 2/7] STYLE: Ruff formating for cdk dir --- job-monitoring-app/cdk-infra/cdk_infra/cdk_infra_stack.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/job-monitoring-app/cdk-infra/cdk_infra/cdk_infra_stack.py b/job-monitoring-app/cdk-infra/cdk_infra/cdk_infra_stack.py index 53ed9ff96..b3c98a7ba 100644 --- a/job-monitoring-app/cdk-infra/cdk_infra/cdk_infra_stack.py +++ b/job-monitoring-app/cdk-infra/cdk_infra/cdk_infra_stack.py @@ -1,5 +1,3 @@ -import json - import aws_cdk as cdk import aws_cdk.aws_amplify_alpha as aws_amplify import aws_cdk.aws_apigateway as aws_apigateway @@ -139,7 +137,7 @@ def __init__( tracker_amplify_app.node.default_child.platform = "WEB_COMPUTE" # Amplify App Build Trigger on Create - build_trigger = aws_custom_resources.AwsCustomResource( + _ = aws_custom_resources.AwsCustomResource( self, TRACKER_PREFIX + "AmplifyBuildTrigger" From 7b6c87e90f0905da1f59b3f0b8edebf0f2d54c46 Mon Sep 17 00:00:00 2001 From: Ivan Johnson Date: Wed, 7 Feb 2024 20:21:49 -0600 Subject: [PATCH 3/7] STYLE: Ruff Formatting of Backend BUG: IM Stupid and made a mistake --- .pre-commit-config.yaml | 3 +-- .../backend/app/internal/__init__.py | 1 - .../backend/app/models/__init__.py | 8 -------- .../backend/app/models/api_key.py | 2 +- .../backend/app/models/event.py | 2 +- job-monitoring-app/backend/app/models/job.py | 2 +- .../app/models/metadata_configuration.py | 2 +- .../backend/app/routers/__init__.py | 7 ------- .../backend/app/routers/api_keys.py | 3 +-- .../backend/app/routers/events.py | 2 +- .../backend/app/routers/job_configurations.py | 4 ++-- .../backend/app/routers/jobs.py | 4 ++-- .../backend/app/routers/reporting.py | 5 ++--- .../backend/app/schemas/__init__.py | 6 ------ .../backend/app/schemas/event.py | 2 +- job-monitoring-app/backend/app/schemas/job.py | 2 +- .../backend/app/schemas/step_configuration.py | 2 +- .../backend/app/services/api_keys.py | 1 - .../backend/app/services/events.py | 1 - .../backend/app/services/reporting.py | 2 +- job-monitoring-app/backend/config/__init__.py | 1 - job-monitoring-app/backend/conftest.py | 3 --- .../tests/routers/test_job_configurations.py | 20 +++++++++---------- .../backend/tests/routers/test_jobs.py | 2 +- 24 files changed, 28 insertions(+), 59 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe3c4e80c..117ad201c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,5 +18,4 @@ repos: hooks: # Run the linter. - id: ruff - args: [ --fix ] - + args: [ --fix , --ignore , "F403" ] diff --git a/job-monitoring-app/backend/app/internal/__init__.py b/job-monitoring-app/backend/app/internal/__init__.py index 28614722b..e69de29bb 100644 --- a/job-monitoring-app/backend/app/internal/__init__.py +++ b/job-monitoring-app/backend/app/internal/__init__.py @@ -1 +0,0 @@ -from .crypto import get_password_hash, verify_password, generate_apikey diff --git a/job-monitoring-app/backend/app/models/__init__.py b/job-monitoring-app/backend/app/models/__init__.py index 6c9bf450f..e69de29bb 100644 --- a/job-monitoring-app/backend/app/models/__init__.py +++ b/job-monitoring-app/backend/app/models/__init__.py @@ -1,8 +0,0 @@ -from .api_key import Apikey -from .base import Base -from .event import Event -from .job import Job -from .job_configuration import JobConfiguration -from .metadata_configuration import MetadataConfiguration -from .step_configuration import StepConfiguration -from .user import User diff --git a/job-monitoring-app/backend/app/models/api_key.py b/job-monitoring-app/backend/app/models/api_key.py index f0e4ccaf1..707db5da3 100644 --- a/job-monitoring-app/backend/app/models/api_key.py +++ b/job-monitoring-app/backend/app/models/api_key.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, Integer, String, ForeignKey, DateTime, func +from sqlalchemy import Column, Integer, String, ForeignKey, DateTime from sqlalchemy.orm import relationship from .base import Base, DateMixin diff --git a/job-monitoring-app/backend/app/models/event.py b/job-monitoring-app/backend/app/models/event.py index 6f89c9542..6103ec943 100644 --- a/job-monitoring-app/backend/app/models/event.py +++ b/job-monitoring-app/backend/app/models/event.py @@ -1,5 +1,5 @@ from sqlalchemy import Column, ForeignKey -from sqlalchemy.sql.sqltypes import String, Integer, Enum, JSON +from sqlalchemy.sql.sqltypes import String, Integer, Enum from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import relationship diff --git a/job-monitoring-app/backend/app/models/job.py b/job-monitoring-app/backend/app/models/job.py index 9f9c60115..f54a69d2c 100644 --- a/job-monitoring-app/backend/app/models/job.py +++ b/job-monitoring-app/backend/app/models/job.py @@ -1,6 +1,6 @@ from sqlalchemy import Column, ForeignKey, UniqueConstraint from sqlalchemy.sql.sqltypes import String, Integer -from sqlalchemy.orm import relationship, backref +from sqlalchemy.orm import relationship from .base import Base, DateMixin diff --git a/job-monitoring-app/backend/app/models/metadata_configuration.py b/job-monitoring-app/backend/app/models/metadata_configuration.py index 8d44f8e08..208774bb8 100644 --- a/job-monitoring-app/backend/app/models/metadata_configuration.py +++ b/job-monitoring-app/backend/app/models/metadata_configuration.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, Enum, ForeignKey, UniqueConstraint +from sqlalchemy import Column, Enum, ForeignKey from sqlalchemy.orm import relationship from sqlalchemy.sql.sqltypes import Integer, String diff --git a/job-monitoring-app/backend/app/routers/__init__.py b/job-monitoring-app/backend/app/routers/__init__.py index fb5b8285c..e69de29bb 100644 --- a/job-monitoring-app/backend/app/routers/__init__.py +++ b/job-monitoring-app/backend/app/routers/__init__.py @@ -1,7 +0,0 @@ -from .api_keys import router as apikeys_router -from .auth import router as auth_router -from .jobs import router as jobs_router -from .users import router as users_router -from .events import router as events_router -from .job_configurations import router as job_configurations_router -from .reporting import router as reporting_router diff --git a/job-monitoring-app/backend/app/routers/api_keys.py b/job-monitoring-app/backend/app/routers/api_keys.py index 141120474..886831f82 100644 --- a/job-monitoring-app/backend/app/routers/api_keys.py +++ b/job-monitoring-app/backend/app/routers/api_keys.py @@ -2,11 +2,10 @@ get_db, get_user_from_api_key, API_KEY_HEADER_NAME, - get_current_user_from_token, get_current_provider, ) from app import schemas, services -from fastapi import APIRouter, Depends, Query +from fastapi import APIRouter, Depends from sqlalchemy.orm import Session router = APIRouter() diff --git a/job-monitoring-app/backend/app/routers/events.py b/job-monitoring-app/backend/app/routers/events.py index 80483ef23..064e11b9c 100644 --- a/job-monitoring-app/backend/app/routers/events.py +++ b/job-monitoring-app/backend/app/routers/events.py @@ -1,6 +1,6 @@ from app import schemas, services from app.dependencies import get_db, get_user_from_api_key -from fastapi import APIRouter, Depends, HTTPException +from fastapi import APIRouter, Depends from sqlalchemy.orm import Session router = APIRouter() diff --git a/job-monitoring-app/backend/app/routers/job_configurations.py b/job-monitoring-app/backend/app/routers/job_configurations.py index a82f7d2e3..047fd78b3 100644 --- a/job-monitoring-app/backend/app/routers/job_configurations.py +++ b/job-monitoring-app/backend/app/routers/job_configurations.py @@ -38,7 +38,7 @@ def get_job_configuration_by_id( if job_configuration is None: raise HTTPException(status_code=404, detail="Job not found") - if not (provider.id in [job_configuration.provider_id]): + if provider.id not in [job_configuration.provider_id]: raise HTTPException(status_code=403, detail="Not allowed") return job_configuration @@ -53,7 +53,7 @@ def get_job_configurations_by_tag_and_version( ): # case 1: get specific configuration if both tag and version are provided should_get_specific_version_of_tag = tag and ( - type(version) is str and version != "latest" + isinstance(version, str) and version != "latest" ) if should_get_specific_version_of_tag: diff --git a/job-monitoring-app/backend/app/routers/jobs.py b/job-monitoring-app/backend/app/routers/jobs.py index cd322f2be..78b7f4427 100644 --- a/job-monitoring-app/backend/app/routers/jobs.py +++ b/job-monitoring-app/backend/app/routers/jobs.py @@ -42,7 +42,7 @@ def get_job( if job is None: raise HTTPException(status_code=404, detail="Job not found") - if not (user.id in [job.customer_id, job.provider_id]): + if user.id not in [job.customer_id, job.provider_id]: # TODO: add job.provider_id to the list of allowed users that can # access this once we have api key based access? See above comment raise HTTPException(status_code=403, detail="Not allowed") @@ -62,7 +62,7 @@ def get_job_events( if job is None: raise HTTPException(status_code=404, detail="Job not found") - if not (user.id in [job.customer_id, job.provider_id]): + if user.id not in [job.customer_id, job.provider_id]: raise HTTPException(status_code=403, detail="Not allowed") return job.events diff --git a/job-monitoring-app/backend/app/routers/reporting.py b/job-monitoring-app/backend/app/routers/reporting.py index 46d6b002f..debde88b2 100644 --- a/job-monitoring-app/backend/app/routers/reporting.py +++ b/job-monitoring-app/backend/app/routers/reporting.py @@ -1,10 +1,9 @@ from datetime import datetime, timedelta -from app import schemas, services +from app import services from app.dependencies import get_db, get_current_provider -from fastapi import APIRouter, Depends, HTTPException +from fastapi import APIRouter, Depends from sqlalchemy.orm import Session -from fastapi import FastAPI from fastapi.responses import StreamingResponse import io import pandas as pd diff --git a/job-monitoring-app/backend/app/schemas/__init__.py b/job-monitoring-app/backend/app/schemas/__init__.py index d21badbca..8df928f32 100644 --- a/job-monitoring-app/backend/app/schemas/__init__.py +++ b/job-monitoring-app/backend/app/schemas/__init__.py @@ -1,7 +1 @@ from app.schemas.api_key import * -from app.schemas.event import Event, EventCreatePublic, EventPure -from app.schemas.job import Job, JobCreate, JobPure -from app.schemas.job_configuration import JobConfiguration, JobConfigurationCreate -from app.schemas.metadata_configuration import MetadataConfigurationCreate -from app.schemas.step_configuration import StepConfigurationCreate, StepConfiguration -from app.schemas.user import User, UserCreate diff --git a/job-monitoring-app/backend/app/schemas/event.py b/job-monitoring-app/backend/app/schemas/event.py index d55bcebf4..9bcc5b420 100644 --- a/job-monitoring-app/backend/app/schemas/event.py +++ b/job-monitoring-app/backend/app/schemas/event.py @@ -2,7 +2,7 @@ from enum import Enum from typing import Dict, Optional, Union -from pydantic import BaseModel, Json +from pydantic import BaseModel from .step_configuration import StepConfiguration diff --git a/job-monitoring-app/backend/app/schemas/job.py b/job-monitoring-app/backend/app/schemas/job.py index 3ff1eec8b..c4730e407 100644 --- a/job-monitoring-app/backend/app/schemas/job.py +++ b/job-monitoring-app/backend/app/schemas/job.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List -from pydantic import BaseModel, StrictStr +from pydantic import BaseModel from . import Event from .job_configuration import JobConfiguration diff --git a/job-monitoring-app/backend/app/schemas/step_configuration.py b/job-monitoring-app/backend/app/schemas/step_configuration.py index b595b2ba7..494de95db 100644 --- a/job-monitoring-app/backend/app/schemas/step_configuration.py +++ b/job-monitoring-app/backend/app/schemas/step_configuration.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import List, Optional -from pydantic import StrictInt, StrictStr, conlist +from pydantic import StrictInt, StrictStr from .metadata_configuration import MetadataConfiguration, MetadataConfigurationCreate from .unique_tag import UniqueTagModel diff --git a/job-monitoring-app/backend/app/services/api_keys.py b/job-monitoring-app/backend/app/services/api_keys.py index 57dc8d674..5d6f2ee32 100644 --- a/job-monitoring-app/backend/app/services/api_keys.py +++ b/job-monitoring-app/backend/app/services/api_keys.py @@ -10,7 +10,6 @@ from datetime import datetime -from .users import get_user pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") diff --git a/job-monitoring-app/backend/app/services/events.py b/job-monitoring-app/backend/app/services/events.py index 9669a7aad..15b195159 100644 --- a/job-monitoring-app/backend/app/services/events.py +++ b/job-monitoring-app/backend/app/services/events.py @@ -1,5 +1,4 @@ from app import models, schemas -from sqlalchemy import cast from sqlalchemy.orm import Session from .job_configuration import get_step_configuration_by_composite_key diff --git a/job-monitoring-app/backend/app/services/reporting.py b/job-monitoring-app/backend/app/services/reporting.py index 92b4de813..505f140bd 100644 --- a/job-monitoring-app/backend/app/services/reporting.py +++ b/job-monitoring-app/backend/app/services/reporting.py @@ -1,5 +1,5 @@ import json -from datetime import datetime, timedelta +from datetime import datetime from pydantic import BaseModel from sqlalchemy.orm import Session diff --git a/job-monitoring-app/backend/config/__init__.py b/job-monitoring-app/backend/config/__init__.py index 8dcce2096..e69de29bb 100644 --- a/job-monitoring-app/backend/config/__init__.py +++ b/job-monitoring-app/backend/config/__init__.py @@ -1 +0,0 @@ -from .config import config diff --git a/job-monitoring-app/backend/conftest.py b/job-monitoring-app/backend/conftest.py index fe4a24882..c345bc4fc 100644 --- a/job-monitoring-app/backend/conftest.py +++ b/job-monitoring-app/backend/conftest.py @@ -1,6 +1,3 @@ -import os -import random - import pytest from app import schemas, services, models from app.models.base import truncate_all_tables diff --git a/job-monitoring-app/backend/tests/routers/test_job_configurations.py b/job-monitoring-app/backend/tests/routers/test_job_configurations.py index 3fc5d210a..df0c11228 100644 --- a/job-monitoring-app/backend/tests/routers/test_job_configurations.py +++ b/job-monitoring-app/backend/tests/routers/test_job_configurations.py @@ -28,7 +28,7 @@ def test_create_job_configurations(app_client, random_provider_user_with_api_key def test_create_job_configurations_with_new_version( db, app_client, random_provider_user_with_api_key ): - result = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -90,7 +90,7 @@ def test_create_job_configurations_with_new_version( def test_create_job_configuration_with_conflicting_version( db, app_client, random_provider_user_with_api_key ): - result = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -122,7 +122,7 @@ def test_create_job_configuration_with_conflicting_version( def test_create_job_configuration_with_conflicting_version_on_metadata( db, app_client, random_provider_user_with_api_key ): - result = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -286,7 +286,7 @@ def test_get_job_configurations_with_specific_tag_and_version( def test_job_configuration_with_tag_and_latest_version( app_client, db, random_provider_user_with_api_key ): - job_configuration1 = ( + _ = ( services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, @@ -411,7 +411,7 @@ def test_get_all_configurations_for_tag_with_missing_version( def test_get_list_of_latest_versions_for_all_job_configurations_with_version_latest( app_client, db, random_provider_user_with_api_key ): - job_configuration1 = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -422,7 +422,7 @@ def test_get_list_of_latest_versions_for_all_job_configurations_with_version_lat ), ) - job_configuration2 = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -471,7 +471,7 @@ def test_get_list_of_latest_versions_for_all_job_configurations_with_version_lat assert response.status_code == 200 assert len(response.json()) == 2 - + # TODO Check this test, why is only job configuration 3 and 4 used? assert response.json()[0]["id"] == job_configuration3.id assert response.json()[0]["provider_id"] == job_configuration3.provider_id assert response.json()[0]["created_at"] is not None @@ -490,7 +490,7 @@ def test_get_list_of_latest_versions_for_all_job_configurations_with_version_lat def test_get_list_of_latest_versions_for_all_job_configurations_with_empty_query_params( app_client, db, random_provider_user_with_api_key ): - job_configuration1 = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -501,7 +501,7 @@ def test_get_list_of_latest_versions_for_all_job_configurations_with_empty_query ), ) - job_configuration2 = services.create_job_configuration( + _ = services.create_job_configuration( db, provider_id=random_provider_user_with_api_key.id, job_configuration=schemas.JobConfigurationCreate( @@ -544,7 +544,7 @@ def test_get_list_of_latest_versions_for_all_job_configurations_with_empty_query access_token = response.json()["access_token"] response = app_client.get( - f"/job_configurations/", + "/job_configurations/", cookies={"access_token": access_token}, ) diff --git a/job-monitoring-app/backend/tests/routers/test_jobs.py b/job-monitoring-app/backend/tests/routers/test_jobs.py index 7728dbdb5..7fc6d56eb 100644 --- a/job-monitoring-app/backend/tests/routers/test_jobs.py +++ b/job-monitoring-app/backend/tests/routers/test_jobs.py @@ -111,7 +111,7 @@ def test_get_jobs_as_customer( access_token = response.json()["access_token"] # Use access token in the request to get a job - response = app_client.get(f"/jobs", cookies={"access_token": access_token}) + response = app_client.get("/jobs", cookies={"access_token": access_token}) assert response.status_code == 200 assert len(response.json()) == 2 From ebf3a2de7aed7dbaff557edb18de7333c558cf92 Mon Sep 17 00:00:00 2001 From: Ivan Johnson Date: Wed, 7 Feb 2024 20:25:38 -0600 Subject: [PATCH 4/7] STYLE: Trackerapi Ruff formating --- .../trackerapi/tests/test_api.py | 2 +- .../trackerapi/tests/test_schemas.py | 2 +- .../trackerapi/trackerapi/__init__.py | 7 ---- .../trackerapi/trackerapi/schemas.py | 37 +++++++++++++------ 4 files changed, 27 insertions(+), 21 deletions(-) diff --git a/job-monitoring-app/trackerapi/tests/test_api.py b/job-monitoring-app/trackerapi/tests/test_api.py index 0a41f0949..2f2aa34bd 100644 --- a/job-monitoring-app/trackerapi/tests/test_api.py +++ b/job-monitoring-app/trackerapi/tests/test_api.py @@ -13,7 +13,7 @@ def test_init(): def test_init_without_api_key(): with pytest.raises(TypeError): - tracker = TrackerApi() + _ = TrackerApi() def test_requests_made_with_api_key(): diff --git a/job-monitoring-app/trackerapi/tests/test_schemas.py b/job-monitoring-app/trackerapi/tests/test_schemas.py index 11e34447c..553afba05 100644 --- a/job-monitoring-app/trackerapi/tests/test_schemas.py +++ b/job-monitoring-app/trackerapi/tests/test_schemas.py @@ -6,7 +6,7 @@ def test_no_duplicate_steps(): with pytest.raises(ValidationError) as exc: - config = JobConfig( + _ = JobConfig( name="Test Job", tag="test_job", step_configurations=[ diff --git a/job-monitoring-app/trackerapi/trackerapi/__init__.py b/job-monitoring-app/trackerapi/trackerapi/__init__.py index 2aae7b2e0..e69de29bb 100644 --- a/job-monitoring-app/trackerapi/trackerapi/__init__.py +++ b/job-monitoring-app/trackerapi/trackerapi/__init__.py @@ -1,7 +0,0 @@ -from .api import TrackerApi -from .helpers import ( - JobConfigManager, - load_job_configurations_from_json, - load_job_configuration_from_json, -) -from .schemas import JobConfig, StepConfig, JobConfigs diff --git a/job-monitoring-app/trackerapi/trackerapi/schemas.py b/job-monitoring-app/trackerapi/trackerapi/schemas.py index 784fead26..6fd58295e 100644 --- a/job-monitoring-app/trackerapi/trackerapi/schemas.py +++ b/job-monitoring-app/trackerapi/trackerapi/schemas.py @@ -1,5 +1,4 @@ -from abc import ABC, abstractmethod -from typing import Dict, List, Optional +from typing import List, Optional from pydantic import BaseModel, StrictInt, StrictStr, conlist from enum import Enum @@ -33,10 +32,24 @@ class StepConfig(UniqueTagModel): metadata_configurations: Optional[List[MetadataConfig]] = [] - def __init__(self, name: str, tag: str, points: int, metadata_configurations: List[MetadataConfig] = None, - **kwargs): - metadata_configurations = metadata_configurations if metadata_configurations else [] - super().__init__(name=name, tag=tag, points=points, metadata_configurations=metadata_configurations, **kwargs) + def __init__( + self, + name: str, + tag: str, + points: int, + metadata_configurations: List[MetadataConfig] = None, + **kwargs + ): + metadata_configurations = ( + metadata_configurations if metadata_configurations else [] + ) + super().__init__( + name=name, + tag=tag, + points=points, + metadata_configurations=metadata_configurations, + **kwargs + ) class JobConfig(UniqueTagModel): @@ -45,12 +58,12 @@ class JobConfig(UniqueTagModel): version: str def __init__( - self, - name: str, - tag: str, - step_configurations: List[StepConfig], - version: str, - **kwargs + self, + name: str, + tag: str, + step_configurations: List[StepConfig], + version: str, + **kwargs ): super().__init__( name=name, From e81c98b671e16ca5fd0dd449922286028b694a28 Mon Sep 17 00:00:00 2001 From: Ivan Johnson Date: Wed, 7 Feb 2024 20:30:25 -0600 Subject: [PATCH 5/7] STYLE: Ruff formatting for Example tool --- example_tool/brainmask_tool.py | 31 +++++++++++----------- example_tool/cnn_transforms.py | 4 +-- example_tool/pdf_report.py | 5 +--- example_tool/pipeline_functions.py | 41 +++++++++++++++++------------- 4 files changed, 43 insertions(+), 38 deletions(-) diff --git a/example_tool/brainmask_tool.py b/example_tool/brainmask_tool.py index 8060d7e22..568e09db2 100644 --- a/example_tool/brainmask_tool.py +++ b/example_tool/brainmask_tool.py @@ -3,14 +3,12 @@ import argparse import pydicom -from pdf2dcm import Pdf2EncapsDCM, Pdf2RgbSC +from pdf2dcm import Pdf2EncapsDCM from subprocess import run -from pipeline_functions import * +from pipeline_functions import dicom_inference_and_conversion, brainmask_inference from pdf_report import generate_report from pydicom import dcmread from pathlib import Path -from enum import Enum, auto - description = "author: Michal Brzus\nBrainmask Tool\n" @@ -51,7 +49,7 @@ output_path = Path(args.output_dir) -try : +try: nifti_path = dicom_inference_and_conversion( session_dir=session_path.as_posix(), output_dir=output_path.as_posix(), @@ -101,7 +99,9 @@ mask_path = list(Path(brainmask_output_dir).glob("*.nii.gz"))[0] stage_name = "report_generation" try: - pdf_fn = generate_report(im_path.as_posix(), mask_path.as_posix(), report_output_dir) + pdf_fn = generate_report( + im_path.as_posix(), mask_path.as_posix(), report_output_dir + ) print(f"Report created: {pdf_fn}") except Exception as e: print(f"Error in stage: {stage_name}") @@ -114,22 +114,23 @@ try: converter = Pdf2EncapsDCM() - converted_dcm = converter.run(path_pdf=pdf_fn, path_template_dcm=template_dcm.as_posix(), suffix =".dcm")[0] + converted_dcm = converter.run( + path_pdf=pdf_fn, path_template_dcm=template_dcm.as_posix(), suffix=".dcm" + )[0] del report_output_dir, brainmask_output_dir, nifti_path print(f"Report created: {converted_dcm}") # Adding needed metadata to the report """""" - pdf_dcm = dcmread(converted_dcm,stop_before_pixels=True) - + pdf_dcm = dcmread(converted_dcm, stop_before_pixels=True) extra_metadata = [ - ( - "SeriesDescription", - "0008,103e", - f"This is a rough brainmask", - ), + ( + "SeriesDescription", + "0008,103e", + "This is a rough brainmask", + ), ] for info in extra_metadata: title = info[0] @@ -148,4 +149,4 @@ print(f"Successfully finished stage: {stage_name}") -# [ 'tests/test_data/test_file.dcm' ] \ No newline at end of file +# [ 'tests/test_data/test_file.dcm' ] diff --git a/example_tool/cnn_transforms.py b/example_tool/cnn_transforms.py index f5c5e166a..7b2efd9e1 100644 --- a/example_tool/cnn_transforms.py +++ b/example_tool/cnn_transforms.py @@ -150,8 +150,8 @@ def __call__(self, data): return d -unsqueze_lambda = lambda x: x.squeeze(dim=0) -shape_lambda = lambda x: x.shape +# unsqueze_lambda = lambda x: x.squeeze(dim=0) +# shape_lambda = lambda x: x.shape class ResampleMaskToOgd(object): diff --git a/example_tool/pdf_report.py b/example_tool/pdf_report.py index 5a1be7720..24eb4d1ae 100644 --- a/example_tool/pdf_report.py +++ b/example_tool/pdf_report.py @@ -6,8 +6,6 @@ import numpy as np from io import BytesIO import base64 -import subprocess -import platform # CSS Content @@ -102,8 +100,7 @@ def generate_image(im_path, mask_path): return image_base64 -def generate_pdf(brain_volume, image_base64, file_path -): +def generate_pdf(brain_volume, image_base64, file_path): # HTML Content html_string = f""" diff --git a/example_tool/pipeline_functions.py b/example_tool/pipeline_functions.py index 711033ec0..aa722834e 100644 --- a/example_tool/pipeline_functions.py +++ b/example_tool/pipeline_functions.py @@ -1,7 +1,14 @@ -from cnn_transforms import * +from cnn_transforms import ( + LoadITKImaged, + ResampleStartRegionBrainMaskd, + ITKImageToNumpyd, + AddChanneld, + ToITKImaged, + ResampleMaskToOgd, + SaveITKImaged, +) import pytorch_lightning as pl from monai.data import CacheDataset - from monai.networks.layers import Norm from monai.networks.nets import UNet from monai.transforms import ( @@ -9,15 +16,8 @@ ScaleIntensityRangePercentilesd, ToTensord, CopyItemsd, - KeepLargestConnectedComponentd, - FillHolesd ) -from torchmetrics.classification import Dice import torch -from monai.losses.dice import GeneralizedDiceFocalLoss - -itk.MultiThreaderBase.SetGlobalDefaultNumberOfThreads(1) - from pathlib import Path from dcm_classifier.study_processing import ProcessOneDicomStudyToVolumesMappingBase from dcm_classifier.image_type_inference import ImageTypeClassifierBase @@ -25,6 +25,10 @@ import re from pydicom import dcmread from subprocess import run +import itk + + +itk.MultiThreaderBase.SetGlobalDefaultNumberOfThreads(1) def validate_subject_id(subject_id: str) -> str: @@ -82,9 +86,13 @@ def dicom_inference_and_conversion( fname = f"{validate_subject_id(sub)}_{validate_session_id(ses)}_acq-{plane}_{modality}" series_vol_list = series.get_volume_list() if len(series_vol_list) > 1: - print(f"Series {series_number} not supported. More than one volume in series.") + print( + f"Series {series_number} not supported. More than one volume in series." + ) else: - itk_im = itk_read_from_dicomfn_list(series_vol_list[0].get_one_volume_dcm_filenames()) + itk_im = itk_read_from_dicomfn_list( + series_vol_list[0].get_one_volume_dcm_filenames() + ) itk.imwrite(itk_im, f"{sub_ses_dir}/{fname}.nii.gz") return sub_ses_dir @@ -108,7 +116,9 @@ def forward(self, x): return self.model(x) -def brainmask_inference(data: list, model_file: str, out_dir: str, postfix='brainmask') -> None: +def brainmask_inference( + data: list, model_file: str, out_dir: str, postfix="brainmask" +) -> None: print("\nDATA: ", data) model = BrainmaskModel.load_from_checkpoint( checkpoint_path=model_file, @@ -155,12 +165,10 @@ def brainmask_inference(data: list, model_file: str, out_dir: str, postfix='brai with torch.no_grad(): # perform the inference test_output = model.model(item["image"].unsqueeze(dim=0).to(device)) # convert from one hot encoding - out_im = ( - torch.argmax(test_output, dim=1).detach().cpu() - ) + out_im = torch.argmax(test_output, dim=1).detach().cpu() print(out_im.shape) - item["inferred_label"] = out_im #.squeeze(dim=0) + item["inferred_label"] = out_im # .squeeze(dim=0) item["inferred_label_meta_dict"] = item["image_meta_dict"] item["inferred_label_meta_dict"]["filename"] = item["image_meta_dict"][ "filename" @@ -176,4 +184,3 @@ def brainmask_inference(data: list, model_file: str, out_dir: str, postfix='brai ] ) out_transforms(item) - From 77f075044fc8404a82bfb5153ad18ead71a0a9ab Mon Sep 17 00:00:00 2001 From: Ivan Johnson Date: Wed, 7 Feb 2024 20:43:05 -0600 Subject: [PATCH 6/7] BUG: Invalid imports BUG: adding import BUG: updating imports for alembic BUG: updating config import for alembic Revert "BUG: adding import" This reverts commit 5a158bc9087b4c5976c4085fd8ebfb8f612bae7d. FIX: merge conflict Revert "BUG: updating config import for alembic" This reverts commit 41a7f7af BUG: reverting imports and ignore __init__ for ruff WIP: trying a different way of formatting WIP: add two more files to exclude WIP: going back to just excluding init --- .pre-commit-config.yaml | 1 + job-monitoring-app/backend/app/internal/__init__.py | 1 + job-monitoring-app/backend/app/models/__init__.py | 8 ++++++++ job-monitoring-app/backend/app/routers/__init__.py | 7 +++++++ job-monitoring-app/backend/app/schemas/__init__.py | 6 ++++++ job-monitoring-app/backend/config/__init__.py | 1 + .../backend/tests/routers/test_job_configurations.py | 2 +- job-monitoring-app/trackerapi/tests/test_helpers.py | 8 ++++++-- job-monitoring-app/trackerapi/trackerapi/__init__.py | 7 +++++++ 9 files changed, 38 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 117ad201c..9b5e99850 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,3 +19,4 @@ repos: # Run the linter. - id: ruff args: [ --fix , --ignore , "F403" ] + exclude: "__init__" diff --git a/job-monitoring-app/backend/app/internal/__init__.py b/job-monitoring-app/backend/app/internal/__init__.py index e69de29bb..28614722b 100644 --- a/job-monitoring-app/backend/app/internal/__init__.py +++ b/job-monitoring-app/backend/app/internal/__init__.py @@ -0,0 +1 @@ +from .crypto import get_password_hash, verify_password, generate_apikey diff --git a/job-monitoring-app/backend/app/models/__init__.py b/job-monitoring-app/backend/app/models/__init__.py index e69de29bb..6c9bf450f 100644 --- a/job-monitoring-app/backend/app/models/__init__.py +++ b/job-monitoring-app/backend/app/models/__init__.py @@ -0,0 +1,8 @@ +from .api_key import Apikey +from .base import Base +from .event import Event +from .job import Job +from .job_configuration import JobConfiguration +from .metadata_configuration import MetadataConfiguration +from .step_configuration import StepConfiguration +from .user import User diff --git a/job-monitoring-app/backend/app/routers/__init__.py b/job-monitoring-app/backend/app/routers/__init__.py index e69de29bb..fb5b8285c 100644 --- a/job-monitoring-app/backend/app/routers/__init__.py +++ b/job-monitoring-app/backend/app/routers/__init__.py @@ -0,0 +1,7 @@ +from .api_keys import router as apikeys_router +from .auth import router as auth_router +from .jobs import router as jobs_router +from .users import router as users_router +from .events import router as events_router +from .job_configurations import router as job_configurations_router +from .reporting import router as reporting_router diff --git a/job-monitoring-app/backend/app/schemas/__init__.py b/job-monitoring-app/backend/app/schemas/__init__.py index 8df928f32..d21badbca 100644 --- a/job-monitoring-app/backend/app/schemas/__init__.py +++ b/job-monitoring-app/backend/app/schemas/__init__.py @@ -1 +1,7 @@ from app.schemas.api_key import * +from app.schemas.event import Event, EventCreatePublic, EventPure +from app.schemas.job import Job, JobCreate, JobPure +from app.schemas.job_configuration import JobConfiguration, JobConfigurationCreate +from app.schemas.metadata_configuration import MetadataConfigurationCreate +from app.schemas.step_configuration import StepConfigurationCreate, StepConfiguration +from app.schemas.user import User, UserCreate diff --git a/job-monitoring-app/backend/config/__init__.py b/job-monitoring-app/backend/config/__init__.py index e69de29bb..8dcce2096 100644 --- a/job-monitoring-app/backend/config/__init__.py +++ b/job-monitoring-app/backend/config/__init__.py @@ -0,0 +1 @@ +from .config import config diff --git a/job-monitoring-app/backend/tests/routers/test_job_configurations.py b/job-monitoring-app/backend/tests/routers/test_job_configurations.py index df0c11228..d38b80abc 100644 --- a/job-monitoring-app/backend/tests/routers/test_job_configurations.py +++ b/job-monitoring-app/backend/tests/routers/test_job_configurations.py @@ -471,7 +471,7 @@ def test_get_list_of_latest_versions_for_all_job_configurations_with_version_lat assert response.status_code == 200 assert len(response.json()) == 2 - # TODO Check this test, why is only job configuration 3 and 4 used? + assert response.json()[0]["id"] == job_configuration3.id assert response.json()[0]["provider_id"] == job_configuration3.provider_id assert response.json()[0]["created_at"] is not None diff --git a/job-monitoring-app/trackerapi/tests/test_helpers.py b/job-monitoring-app/trackerapi/tests/test_helpers.py index cfea88abd..881a07f98 100644 --- a/job-monitoring-app/trackerapi/tests/test_helpers.py +++ b/job-monitoring-app/trackerapi/tests/test_helpers.py @@ -3,8 +3,12 @@ import pytest -from trackerapi import JobConfig, JobConfigManager, StepConfig -from trackerapi.helpers import DuplicateJobConfigException, MissingJobConfigException +from trackerapi.schemas import JobConfig, StepConfig +from trackerapi.helpers import ( + DuplicateJobConfigException, + MissingJobConfigException, + JobConfigManager, +) test_job_config = JobConfig( name="Test Job", diff --git a/job-monitoring-app/trackerapi/trackerapi/__init__.py b/job-monitoring-app/trackerapi/trackerapi/__init__.py index e69de29bb..2aae7b2e0 100644 --- a/job-monitoring-app/trackerapi/trackerapi/__init__.py +++ b/job-monitoring-app/trackerapi/trackerapi/__init__.py @@ -0,0 +1,7 @@ +from .api import TrackerApi +from .helpers import ( + JobConfigManager, + load_job_configurations_from_json, + load_job_configuration_from_json, +) +from .schemas import JobConfig, StepConfig, JobConfigs From 9dc5a6afe5041e5a926e389bd0688d9ab967b259 Mon Sep 17 00:00:00 2001 From: Michal Brzus Date: Thu, 8 Feb 2024 19:19:46 -0600 Subject: [PATCH 7/7] ENH: rewrite test data creation to decrease duplicates --- .../tests/routers/test_job_configurations.py | 83 ++++++++++--------- 1 file changed, 45 insertions(+), 38 deletions(-) diff --git a/job-monitoring-app/backend/tests/routers/test_job_configurations.py b/job-monitoring-app/backend/tests/routers/test_job_configurations.py index d38b80abc..237aae1ad 100644 --- a/job-monitoring-app/backend/tests/routers/test_job_configurations.py +++ b/job-monitoring-app/backend/tests/routers/test_job_configurations.py @@ -3,6 +3,28 @@ from starlette import status +def create_data_dict(tag, name, version, step_name, points, step_tag, meta_name, units, kind): + return { + "tag": tag, + "name": name, + "version": version, + "step_configurations": [ + { + "name": step_name, + "points": points, + "tag": step_tag, + "metadata_configurations": [ + { + "name": meta_name, + "units": units, + "kind": kind, + } + ], + } + ], + } + + def test_create_job_configurations(app_client, random_provider_user_with_api_key): data = { "tag": "lung_cancer", @@ -50,25 +72,18 @@ def test_create_job_configurations_with_new_version( ), ) - data = { - "tag": "lung_cancer", - "name": "Lung Cancer Again", - "version": "1.0.1", - "step_configurations": [ - { - "name": "Lung Search", - "points": 10, - "tag": "lung_search", - "metadata_configurations": [ - { - "name": "Protein Density", - "units": "gm/cc", - "kind": "number", - } - ], - } - ], - } + data = create_data_dict( + tag="lung_cancer", + name="Lung Cancer Again", + version="1.0.1", + step_name="Lung Search", + points=10, + step_tag="lung_search", + meta_name="Protein Density", # Specify the unique name here + units="gm/cc", + kind="number", + ) + response = app_client.post( "/job_configurations", json=data, @@ -144,25 +159,17 @@ def test_create_job_configuration_with_conflicting_version_on_metadata( ), ) - data = { - "tag": "lung_cancer", - "name": "Lung Cancer", - "version": "1.0.0", - "step_configurations": [ - { - "name": "Lung Search", - "points": 10, - "tag": "lung_search", - "metadata_configurations": [ - { - "name": "Protein Density 2", # New field but same version + tag - "units": "gm/cc", - "kind": "number", - } - ], - } - ], - } + data = create_data_dict( + tag="lung_cancer", + name="Lung Cancer", + version="1.0.0", + step_name="Lung Search", + points=10, + step_tag="lung_search", + meta_name="Protein Density 2", # Specify the unique name here + units="gm/cc", + kind="number", + ) response = app_client.post( "/job_configurations",