diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..e610b3f9
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,17 @@
+# Code Owners
+# Who are the points of contact in your project who are responsible/accountable for the project? This can often be an engineering or design manager or leader, who may or may not be the primary maintainers of the project. List them by GitHub Username
+
+@spopelka-dsac
+@wbprice
+
+# Repo Domains
+# The Repo Domains section of your CODEOWNERS.md file helps manage code review responsibilities efficiently. Each domain represents a different aspect of the repository, such as documentation, frontend, backend, DevOps, testing, etc. In this section, list each domain and assign the appropriate GitHub usernames or teams responsible for that domain. This ensures that pull requests (PRs) are reviewed by the right experts, maintaining high code quality and relevance
+# Not every name on the list is required to review every change, but at least one person named should be review every PR making contributions.
+
+/frontend/ @sachin-panayil @sman-dsac @spopelka-dsac
+
+/backend/ @spopelka-dsac @IsaacMilarky @sachin-panayil
+
+/infrastructure/ @wbprice
+
+/flyway/ @spopelka-dsac
diff --git a/.github/CODEOWNERS.md b/.github/CODEOWNERS.md
deleted file mode 100644
index 50322c94..00000000
--- a/.github/CODEOWNERS.md
+++ /dev/null
@@ -1,35 +0,0 @@
-# Code Owners
-
-
-
-
-- ftrotter-gov
-- spopelka-dsac
-
-
-## Repo Domains
-
-
-
-/docs/ {Git usernames of documentation owners}
-/frontend/ {Git usernames of frontend owners}
diff --git a/.github/workflows/backend-test.yml b/.github/workflows/backend-test.yml
index 647188e0..56b136e9 100644
--- a/.github/workflows/backend-test.yml
+++ b/.github/workflows/backend-test.yml
@@ -6,16 +6,20 @@ permissions:
on:
pull_request:
- push:
- branches:
- - main
jobs:
+ lint:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repo
+ uses: actions/checkout@v4
+ - uses: astral-sh/ruff-action@v3
+ with:
+ src: "./backend"
+
test:
runs-on: ubuntu-latest
- defaults:
- run:
- working-directory: backend/
steps:
- name: Checkout repo
@@ -26,25 +30,20 @@ jobs:
- name: Ensure test directories have appropriate permissions for writing
run: |
- mkdir -p ./artifacts/test-reports
- chmod 777 ./artifacts/test-reports
- chmod 777 ./provider_directory/static
+ mkdir -p ./backend/artifacts/test-reports
+ chmod 777 ./backend/artifacts/test-reports
+ chmod 777 ./backend/provider_directory/static
+
+ - name: Setup test environment
+ run: |
+ make test-setup
- - name: Build and run tests
+ - name: Run tests
run: |
- make test
+ make test-backend
- name: Upload test results as artifact
uses: actions/upload-artifact@v4
with:
name: test-results
path: backend/artifacts/test-reports/*.xml
-
- - name: Publish Test Results
- uses: EnricoMi/publish-unit-test-result-action@v2
- if: (!cancelled())
- with:
- check_name: "Backend Django Test Results"
- comment_mode: changes in failures
- files: |
- backend/artifacts/test-reports/*.xml
diff --git a/.github/workflows/deploy-to-sandbox.yml b/.github/workflows/deploy-to-sandbox.yml
deleted file mode 100644
index fc47ff28..00000000
--- a/.github/workflows/deploy-to-sandbox.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-name: Deploy to Sandbox
-on:
- push:
- branches:
- - main
-
-permissions:
- id-token: write
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
- - name: Configure AWS Credentials
- uses: aws-actions/configure-aws-credentials@main
- with:
- role-to-assume: arn:aws-us-gov:iam::250902968334:role/GithubActionsDeployRole
- aws-region: us-gov-west-1
- - name: Login to Amazon ECR
- id: login-ecr
- uses: aws-actions/amazon-ecr-login@v2
- - name: Build, tag, push migration image to Amazon ECR
- working-directory: flyway
- env:
- REGISTRY: ${{ steps.login-ecr.outputs.registry }}
- REPOSITORY: ndh-migrations
- IMAGE_TAG: ${{ github.sha }}
- run: |
- docker build -t $REGISTRY/$REPOSITORY:$IMAGE_TAG .
- docker push $REGISTRY/$REPOSITORY:$IMAGE_TAG
- docker tag $REGISTRY/$REPOSITORY:$IMAGE_TAG $REGISTRY/$REPOSITORY
- docker push $REGISTRY/$REPOSITORY:latest
- echo "TF_VAR_migration_image=$REGISTRY/$REPOSITORY:$IMAGE_TAG" >> "$GITHUB_ENV"
- - name: Setup NodeJS
- uses: actions/setup-node@v4
- - name: Build Frontend Assets
- working-directory: frontend
- env:
- VITE_API_BASE_URL: ""
- run: |
- npm ci
- npm run build
- - name: Build, tag, and push docker image to Amazon ECR
- working-directory: backend
- env:
- REGISTRY: ${{ steps.login-ecr.outputs.registry }}
- REPOSITORY: ndh
- IMAGE_TAG: ${{ github.sha }}
- run: |
- docker build -t $REGISTRY/$REPOSITORY:$IMAGE_TAG .
- docker push $REGISTRY/$REPOSITORY:$IMAGE_TAG
- docker tag $REGISTRY/$REPOSITORY:$IMAGE_TAG $REGISTRY/$REPOSITORY
- docker push $REGISTRY/$REPOSITORY:latest
- echo "TF_VAR_container_image=$REGISTRY/$REPOSITORY:$IMAGE_TAG" >> "$GITHUB_ENV"
- - name: Configure Terraform
- uses: hashicorp/setup-terraform@v3
- - name: Update Infrastructure, Deploy API
- working-directory: infrastructure
- env:
- REGISTRY: ${{ steps.login-ecr.outputs.registry }}
- REPOSITORY: ndh
- IMAGE_TAG: ${{ github.sha }}
- run: |
- terraform -chdir=envs/sandbox init
- terraform -chdir=envs/sandbox apply -auto-approve
diff --git a/.github/workflows/end-to-end-test.yml b/.github/workflows/end-to-end-test.yml
new file mode 100644
index 00000000..b677d6cd
--- /dev/null
+++ b/.github/workflows/end-to-end-test.yml
@@ -0,0 +1,39 @@
+name: End to End Tests
+
+permissions: {}
+
+on:
+ push:
+ branches: [main, release]
+ pull_request:
+ branches: [main, release]
+
+jobs:
+ test:
+ timeout-minutes: 10
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v5
+ - uses: docker/setup-compose-action@v1
+ - uses: actions/setup-node@v5
+ with:
+ node-version: lts/*
+ # build service conatiners required for `make test-server`
+ - name: Build containers
+ run: docker compose build db db-migrations django-web web
+ - name: Install Playwright dependencies
+ working-directory: playwright/
+ run: npm ci
+ - name: Install Playwright browsers
+ working-directory: playwright/
+ run: npx playwright install chromium --with-deps
+ # this step starts the test web server via `make test-server`
+ - name: Run Playwright tests
+ working-directory: playwright/
+ run: npx playwright test
+ - uses: actions/upload-artifact@v4
+ if: ${{ !cancelled() }}
+ with:
+ name: playwright-report
+ path: playwright/playwright-report/
+ retention-days: 7
diff --git a/.github/workflows/frontend-test.yml b/.github/workflows/frontend-test.yml
index 01acd150..089533b5 100644
--- a/.github/workflows/frontend-test.yml
+++ b/.github/workflows/frontend-test.yml
@@ -24,13 +24,19 @@ jobs:
run: |
npm ci
- - name: Lint frontend
+ - name: Typecheck
working-directory: frontend
run: |
npm run typecheck
+
+ - name: Lint
+ working-directory: frontend
+ run: |
npm run lint
- - name: Run tests
+ - name: Test
working-directory: frontend
+ env:
+ VITE_API_BASE_URL: http://localhost:8000
run: |
npm run coverage
diff --git a/.github/workflows/hotfix-sync.yml b/.github/workflows/hotfix-sync.yml
new file mode 100644
index 00000000..273f51df
--- /dev/null
+++ b/.github/workflows/hotfix-sync.yml
@@ -0,0 +1,46 @@
+name: Hotfix Sync
+
+on:
+ pull_request:
+ branches: [release]
+ types: [opened, closed]
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ remind-label:
+ if: github.event.action == 'opened'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/github-script@v7
+ with:
+ script: |
+ const labels = context.payload.pull_request.labels.map(l => l.name);
+
+ if (!labels.includes('hotfix')) {
+ await github.rest.issues.createComment({
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ issue_number: context.payload.pull_request.number,
+ body: `đź‘‹ If this is a hotfix, please add the \`hotfix\` label to automatically cherry pick and sync your changes with main via PR!`
+ });
+ }
+
+ cherry-pick:
+ if: >
+ github.event.action == 'closed' &&
+ github.event.pull_request.merged == true &&
+ contains(github.event.pull_request.labels.*.name, 'hotfix')
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - uses: carloscastrojumo/github-cherry-pick-action@v1.0.1
+ with:
+ branch: main
+ labels: synced-cherry-pick
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml
new file mode 100644
index 00000000..55844cb9
--- /dev/null
+++ b/.github/workflows/snyk.yml
@@ -0,0 +1,26 @@
+name: Scan Project with Snyk Python Workflow
+on: push
+permissions:
+ contents: read
+jobs:
+ security:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@master
+ - uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - name: Install Dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install -r backend/requirements.txt
+ pip install -r etls/loadFIPS/requirements.txt
+ - name: Install Snyk CLI
+ run: |
+ npm install -g snyk@latest
+
+ - name: Run Snyk scan
+ env:
+ SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
+ run: |
+ snyk test --all-projects
diff --git a/.gitignore b/.gitignore
index 38eb255f..6e827ab8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -135,6 +135,7 @@ celerybeat.pid
# Environments
.env
+.env.test
.venv
env/
venv/
@@ -185,4 +186,4 @@ scratch/
# tool version management
mise.toml
-.pre-commit-config.yaml
+
diff --git a/.gitleaksignore b/.gitleaksignore
index 294e2057..dfbbe2c6 100644
--- a/.gitleaksignore
+++ b/.gitleaksignore
@@ -206,3 +206,32 @@
#False positive
e1161fe24ddffc2afaac005126d2e21666d36dcc:db/sql/inserts/sample_data.sql:generic-api-key:27140
20530ff1fcdaaccb9554bc511d21e09f09360750:frontend/index.html:tfstate-provider-ids:13
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/estree/package.json:tfstate-provider-ids:10
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/json-schema/package.json:tfstate-provider-ids:10
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/json-schema/package.json:tfstate-provider-ids:15
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/json-schema/package.json:tfstate-provider-ids:20
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/json-schema/package.json:tfstate-provider-ids:25
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:10
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:15
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:20
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:25
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:30
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:40
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:45
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:50
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:55
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:60
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:65
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:70
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:80
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:85
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:90
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:95
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:100
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:105
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:115
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:110
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:120
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:125
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:35
+22bb4d12d16dcdda27efc8389df470af6e576c96:playwright/node_modules/@types/node/package.json:tfstate-provider-ids:75
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 00000000..c107a30a
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,33 @@
+# https://pre-commit.com/#installation
+#
+# in the root of the project:
+#
+# pip install pre-commit
+# pre-commit install
+#
+
+repos:
+ - repo: https://github.com/gitleaks/gitleaks
+ rev: v8.24.2
+ hooks:
+ - id: gitleaks
+
+ # pre-commit is opinionated about javascript in general and prettier in
+ # particular so we have to do this manually
+ - repo: local
+ hooks:
+ - id: app-prettier
+ name: run prettier
+ language: system
+ files: ^frontend/.*$
+ types_or: [ts, tsx, jsx, javascript, json]
+ entry: |
+ bash -c 'cd frontend; npx prettier --write "${@##frontend/}"' --
+
+ # https://docs.astral.sh/ruff/integrations/#pre-commit
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.14.2
+ hooks:
+ - id: ruff-check
+ args: [--fix]
+ - id: ruff-format
diff --git a/COMMUNITY.md b/COMMUNITY.md
index b3dc3b68..7ec8d583 100644
--- a/COMMUNITY.md
+++ b/COMMUNITY.md
@@ -14,6 +14,7 @@ NPD is supported by a dedicated team of individuals fulfilling various roles to
| Engineer | Blaine Price | DSAC |
| Engineer | Sachin Panayil | USDC @ DSAC |
| Engineer | Isaac Milarsky | USDC @ DSAC |
+| Engineer | Ross Miller | DSAC |
See [CODEOWNERS.md](.github/CODEOWNERS.md) for a list of those responsible for the code and documentation in this repository.
@@ -79,17 +80,17 @@ See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on the release process.
-
-
+
+
- Sachin Panayil
+ Isaac Milarsky
|
-
-
+
+
- Isaac Milarsky
+ Sachin Panayil
|
@@ -136,6 +137,15 @@ See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on the release process.
Fred Trotter
|
+
+
+
+
+ rmillergv
+
+ |
+
+
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 3a0027fb..734e265c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,17 +1,19 @@
-| Status | Date | Author | Context |
-| --- | --- | --- | --- |
-| Drafted | 2029-07-01 | @spopelka-dsac | project scaffolding |
-| Updated | 2029-08-19 | @spopelka-dsac | adding data and docker notes |
+| Status | Date | Author | Context |
+| ------- | ---------- | -------------- | --------------------------------------------- |
+| Drafted | 2029-07-01 | @spopelka-dsac | project scaffolding |
+| Updated | 2029-08-19 | @spopelka-dsac | adding data and docker notes |
| Updated | 2029-09-30 | @abachman-dsac | clarification of coding styles and PR details |
-| Updated | 2029-10-15 | @abachman-dsac | addressing feedback from #108 |
+| Updated | 2029-10-15 | @abachman-dsac | addressing feedback from #108 |
+| Updated | 2029-12-03 | @abachman-dsac | adding notes on `make` and `bin/npr` |
- [How to Contribute](#how-to-contribute)
- [Getting Started](#getting-started)
- [Team Specific Guidelines](#team-specific-guidelines)
- - [Building dependencies](#building-dependencies)
+ - [Installing](#installing)
- [Building the Project](#building-the-project)
- [Database Setup](#database-setup)
- [Running the Application](#running-the-application)
+ - [One-off commands](#one-off-commands)
- [Workflow and Branching](#workflow-and-branching)
- [Testing Conventions](#testing-conventions)
- [Backend Tests](#backend-tests)
@@ -28,7 +30,6 @@
- [Security and Responsible Disclosure Policy](#security-and-responsible-disclosure-policy)
- [Public domain](#public-domain)
-
# How to Contribute
@@ -42,6 +43,11 @@ contributions.
We encourage you to read this project's CONTRIBUTING policy (you are here), its
[LICENSE](LICENSE.md), and its [README](README.md).
+These instructions are general and do not cover every scenario. [Create an
+issue](https://github.com/DSACMS/npd/issues) on this project or double check
+current documentation if you run into a situation you are unable to solve by
+rebuilding the application from scratch.
+
## Getting Started
### Team Specific Guidelines
@@ -56,33 +62,32 @@ health-tech community at large.
The team uses an internal Jira instance for planning and tracking work but
seeks to hold any discussions relevant to specific Pull Requests in the open.
-### Building dependencies
+### Installing
Python and Javascript dependencies are handled via docker containers, so they
will be built when running `docker compose build` or when running `docker
-compose up` for the first time in the `backend/` or `frontend/` directories,
-respectively.
+compose up` for the first time.
+
+Local dependencies for project tooling and testing can be installed with `make`:
-The `backend/` directory additionally includes support for `make` commands to
-help with development. You can run `make help` from inside that folder to get
-more information.
+```sh
+# build containers, create and migrate the development database
+make setup
+
+# install ruff and playwright
+make install-tools
+```
If you prefer to run on host (aka, not inside docker containers), you will have
to follow the instructions provided by your language tooling for installing
-dependencies locally with `pip` for Python or `npm` for Javascript.
+dependencies locally. We recommend using `pip` in the `backend/` directory for Python and `npm` in the `frontend/` and `playwright/` directories for Javascript.
### Building the Project
-The project is currently limited to a Django (Python) application located in the
-`backend/` sub-directory.
+The NPD application is a Django application located in the
+`backend/` sub-directory, serving a React single-page application which is located in the `frontend/` sub-directory, with data provided by a database whose schema is managed by [flyway](https://documentation.red-gate.com/fd/getting-started-with-flyway-184127223.html) using SQL source code from the `flyway/` directory.
-The following guidance assumes that you have navigated in your console to the
-respective folder. To run a `docker compose` command, for example:
-
-```console
-$ cd backend/
-$ make setup && make up
-```
+All commands listed below assume you are at the root of the project in your shell.
#### Database Setup
@@ -94,28 +99,29 @@ Running `make setup` will:
#### Running the Application
-These instructions are general and do not cover every scenario. [Create an
-issue](https://github.com/DSACMS/npd/issues) on this project or double check
-current documentation if you run into a situation you are unable to solve by
-rebuilding the application from scratch.
+To start the development project with system defaults, run `make setup` and then `make up`.
+
+You can sign in to the application using the default development user account at http://localhost:8000/accounts/login/ with username: ` developer@cms.hhs.gov`, password: `password123`. You can use the same credentials to sign in to the Django admin site at http://localhost:8000/admin/login/.
+
+_Optional_: you can manage your local project environment variables by creating a "dotenv" file in the `backend/` directory. The easiest way to do that is make a copy of the example:
+
+```sh
+cp backend/.env_template backend/.env
+```
+
+#### One-off commands
+
+We recommend use of [Docker Compose](https://docs.docker.com/compose/), `make`, and the `bin/npr` tool included in this project for all development work.
-0. Navigate to the `backend/` directory.
-1. Ensure that the `db` service is running. Use `docker compose up -d db` if it
- is not.
-2. Create a `.env` file in the `backend/` directory with `cp
- backend/.env_template backend/.env`
- * _note:_ set `NPD_DB_HOST` to `host.docker.internal` if using a host
- Postgres instance from inside a container.
-3. Run `docker compose up` initially to start the web application service and
- `docker compose up --build` following any substantial updates to the backend
- application
-4. Navigate to `http://localhost:8000/fhir/` or run `curl localhost:8000/fhir`
- to visit the application. You should see an API documentation landing page.
-5. Happy coding!
+You can review the use of those tools by running `make` or `bin/npr --help` at the command line.
+
+`make` is intended to provide the most common commands in a standard shape for development work.
+
+`bin/npr` is a runner for managing complex `docker compose run` commands across the various services that make up this project.
### Workflow and Branching
-We follow the [GitHub Flow Workflow](https://guides.github.com/introduction/flow/)
+We follow the [GitHub Flow Workflow](https://guides.github.com/introduction/flow/).
1. Fork the project
2. Check out the `main` branch
@@ -126,7 +132,6 @@ We follow the [GitHub Flow Workflow](https://guides.github.com/introduction/flow
7. Wait for your change to be pulled into `DSACMS/npd/main`
8. Delete your feature branch
-
### Testing Conventions
It is an expectation of this project that each feature will have new automated
@@ -137,6 +142,8 @@ We do not expect 100% test coverage but we will be unlikely to accept Pull
Requests which reduce test coverage or new features which do not include
updates to the test suite.
+We recommend starting new feature work with a new Playwright end-to-end test and going from there.
+
#### Backend Tests
The backend test suite can be found in the `tests.py` file currently in
@@ -149,9 +156,9 @@ on testing for additional details.
### Coding Style and Linters
-> [!NOTE]
-> **Proposed**: Use `ruff` for python, `prettier` for typescript / javascript.
-> Linter + formatter wins all debates. Use defaults whenever possible.
+We require use of `ruff` to format all Python code and `prettier` to format all Typescript code.
+
+The easiest way to keep your commits clean is to install the formatting tools and pre-commit with `make install-tools`.
### Writing Issues
@@ -173,8 +180,9 @@ When creating an issue please try to adhere to the following format:
see our .github/ISSUE_TEMPLATE.md for more examples.
-In this project, issues should be limited to code, development tooling,
-automation, or site bugs, ___NOT___ data quality.
+In this project, [new issues](https://github.com/DSACMS/npd/issues) should be
+limited to code, development tooling, automation, or site bugs, ___NOT___ data
+quality.
### Creating Commits
@@ -520,7 +528,6 @@ In rare cases, a hotfix for a prior release may be required out-of-phase with th
We also welcome improvements to the project documentation or to the existing
docs. Please file an [issue](https://github.com/DSACMS/npd/issues).
-
## Policies
### Open Source Policy
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..764e2ecc
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,253 @@
+# Makefile
+
+# set NPD_DEVELOPMENT to "True" in your shell to skip dangerous action
+# confirmation steps
+NPD_DEVELOPMENT ?= no
+
+# default target
+.DEFAULT_GOAL := help
+
+help:
+ @echo "Available commands:"
+ @echo ""
+ @echo " build Build all Docker containers for the application"
+ @echo " setup Set up development environment (build + create-db + migrate)"
+ @echo " install-tools Install development support tools"
+ @echo " update Update development environment"
+ @echo ""
+ @echo " create-db Start postgres, create and populate a development DB"
+ @echo " migrate Apply pending migrations to the development database"
+ @echo ""
+ @echo " up Start the NPD application at http://localhost:8000"
+ @echo " down Stop all running docker compose services"
+ @echo ""
+ @echo " test Run the full frontend and backend test suites with DB setup"
+ @echo " test-setup Set up test database (drop/create test DB + run test migrations)"
+ @echo " test-backend Run the backend test suite without rerunning database setup"
+ @echo " Use ARGS=... to pass arguments"
+ @echo " test-frontend Run the frontend test suite"
+ @echo " Use ARGS=... to pass arguments"
+ @echo " test-server Start a test server for e2e testing with Playwright"
+ @echo " playwright Run the playwright e2e test suite (on host)"
+ @echo ""
+ @echo " clean Remove cache files, test artifacts, and transient frontend assets"
+ @echo ""
+ @echo " lint Check code against the appropriate linter (ruff or eslint)"
+ @echo " format Format code with the approrpriate formatter (ruff or prettier)"
+ @echo ""
+ @echo " createsuperuser Interactively set up a Django superuser account."
+ @echo " Pass env vars to run automatically:"
+ @echo " DJANGO_SUPERUSER_EMAIL"
+ @echo " DJANGO_SUPERUSER_USERNAME"
+ @echo " DJANGO_SUPERUSER_PASSWORD"
+ @echo ""
+ @echo " drop-db \033[31m[DANGEROUS]\033[0m Drop the development database"
+ @echo " reset-db \033[31m[DANGEROUS]\033[0m Drop and fully recreate the development database"
+ @echo ""
+ @echo " help Show this help message"
+ @echo ""
+ @echo "Common workflows:"
+ @echo " First time setup: make setup && make install-tools && make up"
+ @echo " Daily development: git pull && make update && make up"
+ @echo " Before committing: make test && make lint"
+ @echo " Run isolated tests: make test"
+ @echo " Run e2e tests: make test-server &; make playwright"
+ @echo " Run some tests: make test-backend ARGS=npdfhir.tests"
+ @echo " Run one test: make test-backend ARGS=provider_directory.tests.test_frontend_settings.TestFeatureFlags.test_returns_flags_json"
+ @echo " Clean shutdown: make down && make clean"
+
+.PHONY: build
+build:
+ @docker compose build
+
+.PHONY: install-tools
+install-tools:
+ @echo "Setting up local python virtual environment"
+ @$(MAKE) -C backend .venv/bin/activate
+ @source backend/.venv/bin/activate; \
+ echo "Setting up local development tools using $(shell which pip)"; \
+ $(MAKE) -C backend install-tools; \
+ pre-commit install
+ @echo "Setting up playwright on host"
+ @cd playwright; \
+ npm install; \
+ npx playwright install --with-deps chromium;
+
+
+.PHONY: lint
+lint:
+ @echo "\033[2m[ lint backend ]\033[0m"
+ @$(MAKE) -C backend lint
+ @echo "\033[2m[ lint frontend ]\033[0m"
+ @bin/npr npm run lint
+
+.PHONY: format
+format:
+ @echo "\033[2m[ format backend ]\033[0m"
+ @$(MAKE) -C backend format
+ @echo "\033[2m[ format frontend ]\033[0m"
+ @bin/npr npm run format
+
+###
+## Database management
+###
+
+.PHONY: drop-db
+drop-db:
+ifneq ($(NPD_DEVELOPMENT), True)
+ @printf "Are you sure you want to drop your local database? [y/N] " && read ans && ( [[ "$${ans:-N}" == y ]] || ( echo "cancelling changes" && exit 1 ) )
+endif
+ @echo "Dropping development database..."
+ @docker compose up -d db
+ @docker compose run --rm db sh -c 'echo "dropping $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -h db -U $$POSTGRES_USER -d postgres -c "DROP DATABASE IF EXISTS $$POSTGRES_DB" || echo "failed to drop $$POSTGRES_DB"'
+
+.PHONY: create-db
+create-db:
+ @echo "Creating development database..."
+ @docker compose up -d db
+# create development database only if it doesn't already exist
+ @docker compose run --rm db sh -c 'echo "creating $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -h db -U $$POSTGRES_USER -d postgres -c "CREATE DATABASE $$POSTGRES_DB" || echo "$$POSTGRES_DB already exists"'
+
+# run all flyway migrations for the development environment
+.PHONY: migrate
+migrate:
+ @echo "Migrating the development database..."
+ @docker compose up -d db
+ @bin/npr migrate
+
+# drop, create, and then run all flyway migrations for the development environment
+.PHONY: reset-db
+reset-db:
+ @echo "Resetting the development database..."
+ @docker compose down db
+ @$(MAKE) drop-db
+ @$(MAKE) create-db
+ @$(MAKE) migrate
+
+###
+# Frontend asset management
+###
+
+.PHONY: clean-frontend
+clean-frontend:
+ @echo "Removing frontend assets from backend/provider_directory/static"
+ @rm -rf backend/provider_directory/static/*
+ @rm -rf backend/provider_directory/static/.vite
+
+# only rebuild frontend assets if they don't already exist
+backend/provider_directory/static/.vite/manifest.json:
+ @echo "Building frontend assets with VITE_API_BASE_URL=$(VITE_API_BASE_URL)"
+ @bin/npr npm install
+ @bin/npr -e VITE_API_BASE_URL=$(VITE_API_BASE_URL) npm run build
+
+.PHONY: build-frontend-assets
+build-frontend-assets: clean-frontend
+ export VITE_API_BASE_URL=http://localhost:8000; \
+ $(MAKE) backend/provider_directory/static/.vite/manifest.json
+
+# build frontend assets and ensure the backend application is running
+.PHONY: up
+up:
+ @echo "Staring django-web and web services..."
+ @docker compose up -d django-web web
+ @echo "Backend is running"
+ @echo " site: http://localhost:8000/"
+ @echo " docs: http://localhost:8000/fhir/docs/"
+
+.PHONY: down
+down:
+ @echo "Shutting down all docker compose services..."
+ @docker compose down
+
+.PHONY: test-setup
+test-setup:
+ @echo "Setting up test database..."
+ @docker compose -f compose.test.yml up -d --wait db
+# drop, create, and migrate test database
+ @docker compose -f compose.test.yml exec db sh -c 'echo "DROP $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -q -h localhost -U "$$POSTGRES_USER" -d postgres -c "DROP DATABASE IF EXISTS $$POSTGRES_DB"'
+ @docker compose -f compose.test.yml exec db sh -c 'echo "CREATE $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -q -h localhost -U "$$POSTGRES_USER" -d postgres -c "CREATE DATABASE $$POSTGRES_DB"'
+ @bin/npr -t migrate
+
+.PHONY: test-backend
+test-backend:
+ @echo "Running backend tests..."
+ @bin/npr -t python manage.py test $(ARGS)
+
+.PHONY: test-frontend
+test-frontend:
+ @echo "Running frontend tests..."
+ @bin/npr npm test $(ARGS)
+
+.PHONY: test
+test: test-setup
+ @$(MAKE) test-backend
+ @$(MAKE) test-frontend
+
+.PHONY: playwright
+playwright:
+ @cd playwright; \
+ npx playwright test
+
+# clean up test artifacts
+.PHONY: clean
+clean: clean-frontend
+ @$(MAKE) -C backend clean
+
+###
+# Data seeding utilities
+###
+
+.PHONY: createsuperuser
+createsuperuser:
+ifeq ($(and $(DJANGO_SUPERUSER_EMAIL),$(DJANGO_SUPERUSER_USERNAME),$(DJANGO_SUPERUSER_PASSWORD)),)
+ @bin/npr python manage.py createsuperuser
+else
+ @bin/npr \
+ -e DJANGO_SUPERUSER_EMAIL="$(DJANGO_SUPERUSER_EMAIL)" \
+ -e DJANGO_SUPERUSER_USERNAME="$(DJANGO_SUPERUSER_USERNAME)" \
+ -e DJANGO_SUPERUSER_PASSWORD="$(DJANGO_SUPERUSER_PASSWORD)" \
+ python manage.py createsuperuser --no-input
+endif
+
+.PHONY: seed-users
+seed-users:
+ @bin/npr \
+ -e DJANGO_SUPERUSER_EMAIL="npd.admin@cms.hhs.gov" \
+ -e DJANGO_SUPERUSER_USERNAME="npdadmin" \
+ -e DJANGO_SUPERUSER_PASSWORD="password123" \
+ python manage.py createsuperuser --no-input
+
+##
+# end-to-end test support
+##
+
+.PHONY: build-frontend-test-assets
+build-frontend-test-assets: clean-frontend
+ export VITE_API_BASE_URL=http://localhost:8008; \
+ $(MAKE) backend/provider_directory/static/.vite/manifest.json
+
+.PHONY: watch-frontend-test-assets
+watch-frontend-test-assets:
+ bin/npr -e VITE_API_BASE_URL=http://localhost:8008 npm run watch
+
+.PHONY: test-system-setup
+test-system-setup: test-setup
+ bin/npr --test python manage.py seeduser
+ bin/npr --test python manage.py seedsystem
+
+.PHONY: test-server
+test-server: test-system-setup build-frontend-test-assets
+ bin/npr --test --publish 8008:8008 python manage.py runserver 0.0.0.0:8008
+
+###
+# whole project concerns
+###
+
+# prepare the local working copy for NPD development
+.PHONY: setup
+setup: build create-db migrate
+ @$(MAKE) -C backend setup
+
+# bring local working copy up to date
+.PHONY: update
+update: build migrate build-frontend-assets
\ No newline at end of file
diff --git a/README.md b/README.md
index 3e3f77f7..1aed72de 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,35 @@
# CMS National Provider Directory
+- [CMS National Provider Directory](#cms-national-provider-directory)
+ - [About the Project](#about-the-project)
+ - [Problem](#problem)
+ - [Main Challenges](#main-challenges)
+ - [Planned Solution](#planned-solution)
+ - [Project Vision](#project-vision)
+ - [Agency Mission](#agency-mission)
+ - [Team Mission](#team-mission)
+ - [Core Team](#core-team)
+ - [Repository Structure](#repository-structure)
+- [Development and Software Delivery Lifecycle](#development-and-software-delivery-lifecycle)
+ - [Community](#community)
+ - [Community Guidelines](#community-guidelines)
+ - [Governance](#governance)
+ - [Feedback](#feedback)
+ - [Policies](#policies)
+ - [Open Source Policy](#open-source-policy)
+ - [Security and Responsible Disclosure Policy](#security-and-responsible-disclosure-policy)
+ - [Software Bill of Materials (SBOM)](#software-bill-of-materials-sbom)
+ - [Public domain](#public-domain)
+
## About the Project
### Problem
-CMS maintains the country’s de facto provider directory because of the agency’s role in registering new doctors for a National Provider ID in the National Plan and Provider Enumeration System (NPPES), and because of the agency’s role in enrolling providers in Medicare, overseeing the State Medicaid programs, and running the Federally-facilitated marketplace.
+CMS maintains the country’s de facto provider directory because of the agency’s role in registering new doctors for a National Provider ID in the National Plan and Provider Enumeration System (NPPES), and because of the agency’s role in enrolling providers in Medicare, overseeing the State Medicaid programs, and running the Federally-facilitated marketplace.
-Enrollment for Medicaid happens in the states, but for Medicare, the enrollment workflow happens in the Provider Enrollment, Chain, and Ownership System (PECOS). However, NPPES and PECOS data is often inaccurate and lacks key interoperability information needed by CMS and the industry. The Medicare enrollment process provides some validation of the information, but the process is done in multiple systems, partly performed by CMS and partly performed by regional Medicare Administrative Contractors (MACs).
+Enrollment for Medicaid happens in the states, but for Medicare, the enrollment workflow happens in the Provider Enrollment, Chain, and Ownership System (PECOS). However, NPPES and PECOS data is often inaccurate and lacks key interoperability information needed by CMS and the industry. The Medicare enrollment process provides some validation of the information, but the process is done in multiple systems, partly performed by CMS and partly performed by regional Medicare Administrative Contractors (MACs).
-Additionally, there are several paper forms (received by fax) involved in the process and proprietary solutions used by the MACs to validate the data before submitting it back to CMS. These processes are duplicative, ineffective, costly, and the improved information is not shared back with the industry.
+Additionally, there are several paper forms (received by fax) involved in the process and proprietary solutions used by the MACs to validate the data before submitting it back to CMS. These processes are duplicative, ineffective, costly, and the improved information is not shared back with the industry.
The healthcare system has been begging for a single directory at CMS for decades and the cost to the larger healthcare industry of not having one is estimated at $2.76B a year.
@@ -19,21 +40,21 @@ The healthcare system has been begging for a single directory at CMS for decades
* Due to duplicative places the provider has to update and the risk of fines from health plans, plans continually badger providers to update their information. Because each provider has to update ~20 systems monthly, the exercise is futile and there is lack of motivation to keep trying.
* Billing information and patient-facing information are consistently conflated within the ecosystem causing patients to try to visit mailing addresses, rather than practice addresses.
* Interoperability efforts desperately need a central repository of provider FHIR endpoints, but it does not currently exist.
-* The health plan data that indicates which providers participate in each insurance plan is stored in different formats, is difficult to access, and is updated at different intervals. This results in patients being unable to access accurate information as they seek care, which means that patients cannot find plans with the specific providers they need, nor can they easily tell if a provider they want to see will be covered by their insurance.
+* The health plan data that indicates which providers participate in each insurance plan is stored in different formats, is difficult to access, and is updated at different intervals. This results in patients being unable to access accurate information as they seek care, which means that patients cannot find plans with the specific providers they need, nor can they easily tell if a provider they want to see will be covered by their insurance.
#### Planned Solution
-Create a modern version of a directory, which includes provider and payer data, to serve as a single source of truth that can be updated by health plans and providers for the benefit of all. This directory will create efficiencies for the entire national healthcare system, as it will reduce data collection and reporting burden on both payers and providers while improving data accuracy and better serving the beneficiaries and consumers. For example, this directory can be used to find information such as the provider practices and addresses, hospitals, specialty, state medical licenses, quality scores, interoperability addresses (including data sharing networks and individual endpoint addresses), the insurance plans the provider participates in, and other useful data for patients, other providers, and health plans.
-
We are breaking the initial MVP work into two work streams: **Core Data Model** and **National Provider Directory**.
The Core Data Model workstream encompassess all of the incoming data pipelines from various CMS open data sources, internal-to-CMS data sources, and data provided by industry partners. The goal of the Core Data Model workstream is to layer and combine data from these sources to build as accurate of a representation of our nation's healthcare providers, healthcare providing organizations, and healthcare data networks as possible.
-The National Provider Direcotry workstream focuses on exposing the key elements of the Core Data Model through a FHIR API and a user-friendly search interface. Eventually, providers and organizations will be able to use the National Provider Directory to update their information, as well.
+The National Provider Directory workstream focuses on exposing the key elements of the Core Data Model through a FHIR API and a user-friendly search interface.
+
+Eventually, providers and organizations will be able to use the National Provider Directory to update their information as well.
### Project Vision
-We envision a world where the provider experience at CMS is so seamless that it is a joy and a breeze for providers to keep their information up-to-date.
+We envision a world where the provider experience at CMS is so seamless that it is a joy and a breeze for providers to keep their information up-to-date.
The CMS Provider Directory should be an authoritative and accurate source of provider information.
@@ -51,57 +72,24 @@ A list of core team members responsible for the code and documentation in this r
## Repository Structure
-This is the main repository for the Naitonal Provider Directory (NPD) workstream, which will will contain sub-directories for each component of National Provider Directory. You will find more information about each component in a README.md file within its respective directory.
-
-There are additional repositories involved in the ecosystem surrounding the NPD effort:
-
-- [Puffin](https://github.com/DSACMS/npd_Puffin)
-- [VEINHasher](https://github.com/DSACMS/npd_VEINHasher)
-- [CSViper](https://github.com/DSACMS/npd_csviper)
-- [Cicadence](https://github.com/DSACMS/npd_cicadence)
-- [PlainerFlow](https://github.com/DSACMS/npd_plainerflow)
-- [Plan Scrape](https://github.com/DSACMS/npd_plan_scrape)
-- [NUCC Slurp](https://github.com/DSACMS/npd_nucc_slurp)
-- [Endpoint API Validator](https://github.com/DSACMS/npd-endpoint-api-validator)
-- [DURC is CRUD](https://github.com/search?q=org%3ADSACMS+npd_&type=repositories#:~:text=DSACMS/npd_durc_is_crud)
-- [VRDC Python Projects](https://github.com/DSACMS/npd_vrdc_python_projects)
-- [NPD EHR FHIR NPI Slurp](https://github.com/DSACMS/npd_ehr_fhir_npi_slurp)
-
-These repositories contain the source code for various elements of the data pipelines that make up the core data product workstream.
-
-### flyway/
-
-[`flyway/`](./flyway/) contains sql code for the National Provider Directory database in a structure suitable for consumption by the [Flyway database migration tool](https://www.red-gate.com/products/flyway/community/).
+This is the main repository for the National Provider Directory (NPD) workstream. You will find more information about each component in a README.md file within its respective directory.
-- [`flyway/sql/`](./flyway/sql/) contains the code necessary to create the database for this project and example data for use in development and testing
-- [`flyway/tinman_SQL_schema_standard`](./flyway/tinman_SQL_schema_standard/) contains the project's sql naming conventions and guidelines
+- [backend](./backend/): FHIR Provider Directory API and directory browser
+- [frontend](./frontend/): Directory browser React application
+- [flyway](./flyway/): Database migrations
+- [playwright](./playwright/): End-to-end test suite
-### etls/
-
-[`etls/`](./etls/) contains pipelines that extract, transform, and load (ETL) ancillary data into the database for the FHIR API. Each sub-directory in the `etls/` directory represents a different input data source.
-
-> [!Note]
-> These are helper ETL tools, specific to the FHIR API. The main ETLs are found in the [Puffin Repo](https://github.com/DSACMS/npd_Puffin). Eventually this folder will store code to map the data from the Core Data Product data model to the provider directory data model.
-
-### backend/
-
-[`backend/`](./backend/) contains the backend python code for the National Provider Directory API application, built on Django. The `backend/npdfhir/` subdirectory contains the code for the FHIR API.
-
-### frontend/
-
-[`frontend/`](./frontend/) contains a Typescript + React application supporting dynamic components for NPD provider search and other user-facing functionality hosted by the NPD project.
+The [DSACMS/npd_etl](https://github.com/DSACMS/npd_etl) project on GitHub provides data ingestion for this system.
# Development and Software Delivery Lifecycle
-The following guide is for members of the project team who have access to the repository as well as code contributors. The main difference between internal and external contributions is that external contributors will need to fork the project and will not be able to merge their own pull requests.
-
For more information on contributing, including notes on project setup and development, see: [CONTRIBUTING.md](./CONTRIBUTING.md).
-Please note: We are taking an iterative approach to the development of this project, starting first with an MVP and building additional functionality as we go.
+We are taking an iterative approach to the development of this project, starting first with an MVP and building additional functionality as we go.
## Community
-The npd team is taking a community-first and open source approach to the product development of this tool. We believe government software should be made in the open and be built and licensed such that anyone can download the code, run it themselves without paying money to third parties or using proprietary software, and use it as they will.
+The NPD team is taking a community-first and open source approach to the product development of this tool. We believe government software should be made in the open and be built and licensed such that anyone can download the code, run it themselves without paying money to third parties or using proprietary software, and use it as they will.
We know that we can learn from a wide variety of communities, including those who will use or will be impacted by the tool, who are experts in technology, or who have experience with similar technologies deployed in other spaces. We are dedicated to creating forums for continuous conversation and feedback to help shape the design and development of the tool.
@@ -113,7 +101,7 @@ Principles and guidelines for participating in our open source community are can
## Governance
-Information about how the npd community is governed may be found in [GOVERNANCE.md](GOVERNANCE.md).
+Information about how the NPD community is governed may be found in [GOVERNANCE.md](GOVERNANCE.md).
## Feedback
@@ -121,11 +109,6 @@ If you have ideas for how we can improve or add to our capacity building efforts
If you would like to comment on the tool itself, please let us know by [filing an issue on our GitHub repository](https://github.com/DSACMS/npd/issues).
-
-
## Policies
### Open Source Policy
diff --git a/backend/Makefile b/backend/Makefile
index d7e2e1bb..3dd569b3 100644
--- a/backend/Makefile
+++ b/backend/Makefile
@@ -1,119 +1,33 @@
-# Makefile
-
-# set NPD_DEVELOPMENT to "True" in your shell to skip dangerous action
-# confirmation steps
-NPD_DEVELOPMENT ?= no
-
-# Default target
-.PHONY: build create-db setup migrate up down test test-setup clean help
-
-# Default target
-.DEFAULT_GOAL := help
-
-help:
- @echo "Available targets:"
- @echo ""
- @echo " build Build Docker containers for the application"
- @echo " create-db Start the database server and create a development database"
- @echo " migrate Apply migrations to the development database"
- @echo " setup Set up development environment (build + create-db + migrate)"
- @echo ""
- @echo " up Start the backend web application (available at http://localhost:8000)"
- @echo " down Stop all running backend services"
- @echo ""
- @echo " test-setup Set up test database (drop/create test DB + run test migrations)"
- @echo " test Run the full test suite (includes test-setup)"
- @echo " clean Remove cache files and artifacts directory"
- @echo ""
- @echo " createsuperuser Interactively set up a Django superuser account."
- @echo " Pass env vars to run automatically:"
- @echo " DJANGO_SUPERUSER_EMAIL"
- @echo " DJANGO_SUPERUSER_USERNAME"
- @echo " DJANGO_SUPERUSER_PASSWORD"
- @echo ""
- @echo " drop-db \033[31m[DANGEROUS]\033[0m Drop the development database"
- @echo " reset-db \033[31m[DANGEROUS]\033[0m Drop and fully recreate the development database"
- @echo ""
- @echo " help Show this help message"
- @echo ""
- @echo "Common workflows:"
- @echo " First time setup: make setup && make up"
- @echo " Daily development: git pull && make migrate && make up"
- @echo " Run tests: make test"
- @echo " Clean shutdown: make down && make clean"
-
-build:
- @docker compose build
-
-drop-db:
-ifneq ($(NPD_DEVELOPMENT), True)
- @printf "Are you sure you want to drop your local database? [y/N] " && read ans && ( [[ "$${ans:-N}" == y ]] || ( echo "cancelling changes" && exit 1 ) )
-endif
- @echo "Dropping development database..."
- @docker compose up -d db
- @docker compose run --rm db sh -c 'echo "dropping $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -h db -U $$POSTGRES_USER -d postgres -c "DROP DATABASE IF EXISTS $$POSTGRES_DB" || echo "failed to drop $$POSTGRES_DB"'
-
-create-db:
- @echo "Creating development database..."
- @docker compose up -d db
-# create development database only if it doesn't already exist
- @docker compose run --rm db sh -c 'echo "creating $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -h db -U $$POSTGRES_USER -d postgres -c "CREATE DATABASE $$POSTGRES_DB" || echo "$$POSTGRES_DB already exists"'
-
-migrate:
- @echo "Migrating the development database..."
- @docker compose up -d db
-# run all flyway migrations for the development environment
- @docker compose run --rm db-migrations migrate
-
-reset-db:
- @echo "Resetting the development database..."
- @docker compose down db
- @$(MAKE) drop-db
- @$(MAKE) create-db
- @$(MAKE) migrate
-
-setup: build create-db migrate
-# pass
-
-up:
- @echo "Running backend..."
- @docker compose up -d django-web
- @echo "Backend is running at http://localhost:8000/fhir/"
-
-down:
- @echo "Shutting down backend..."
- @docker compose down
-
-test-setup:
- @echo "Setting up test database..."
- @docker compose -f compose.test.yml up -d --wait db
-# drop, create, and migrate test database
- @docker compose -f compose.test.yml exec db sh -c 'echo "DROP $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -q -h localhost -U "$$POSTGRES_USER" -d postgres -c "DROP DATABASE IF EXISTS $$POSTGRES_DB"'
- @docker compose -f compose.test.yml exec db sh -c 'echo "CREATE $$POSTGRES_DB"; PGPASSWORD=$$POSTGRES_PASSWORD psql -q -h localhost -U "$$POSTGRES_USER" -d postgres -c "CREATE DATABASE $$POSTGRES_DB"'
- @docker compose -f compose.test.yml run --rm db-migrations migrate
-
-test: test-setup
- @echo "Running backend tests..."
- @docker compose -f compose.test.yml run --rm django-web python manage.py test
-
+.venv/bin/activate:
+ @echo "Creating .venv directory"
+ @python -m venv .venv
+
+.PHONY: install-tools
+install-tools:
+ @pip install ruff
+ @pip install pre-commit
+
+.PHONY: setup
+setup:
+ $(MAKE) .venv/bin/activate
+ $(MAKE) install-tools
+
+.PHONY: lint
+lint:
+ @ruff check .
+
+.PHONY: format
+format:
+# force ruff to fix isort (import sorting) errors in all files
+ @ruff check --select I --fix .
+# do ruff style formatting
+ @ruff format
+
+.PHONY: clean
clean:
- @echo "Cleaning up cache files..."
+ @echo "Cleaning up cache files, test artifacts..."
@find . -type d -name "__pycache__" -exec rm -rf {} +
@rm -rf artifacts/
+ $(MAKE) clean-frontend
@echo "Cleanup done."
-createsuperuser:
-ifeq ($(and $(DJANGO_SUPERUSER_EMAIL),$(DJANGO_SUPERUSER_USERNAME),$(DJANGO_SUPERUSER_PASSWORD)),)
- @docker compose run --rm django-web python manage.py createsuperuser
-else
- @docker compose run -e DJANGO_SUPERUSER_EMAIL="$(DJANGO_SUPERUSER_EMAIL)" \
- -e DJANGO_SUPERUSER_USERNAME="$(DJANGO_SUPERUSER_USERNAME)" \
- -e DJANGO_SUPERUSER_PASSWORD="$(DJANGO_SUPERUSER_PASSWORD)" \
- --rm django-web python manage.py createsuperuser --no-input
-endif
-
-seed-users:
- @docker compose run -e DJANGO_SUPERUSER_EMAIL="npd.admin@cms.hhs.gov" \
- -e DJANGO_SUPERUSER_USERNAME="npdadmin" \
- -e DJANGO_SUPERUSER_PASSWORD="password123" \
- --rm django-web python manage.py createsuperuser --no-input
diff --git a/backend/README.md b/backend/README.md
index 065597d0..b17f3b90 100644
--- a/backend/README.md
+++ b/backend/README.md
@@ -1,23 +1,28 @@
# npdfhir
-Django backend that provides a FHIR API for accessing data from the npd database.
+
+Django backend that provides a FHIR API for accessing data from the npd database and supporting code for the National Provider Directory search web application.
## Project structure
-* The npdfhir directory contains code that drives the api
-* The app directory contains code that controls the overall application
-* The root directory contains code for deploying the application within a docker container
+* `backend/` contains code for deploying the application within a docker container
+* `backend/app/` directory contains code that controls the overall application
+* `backend/npdfhir/` contains code that drives the FHIR API
+* `backend/provider_directory/` contains code that drives the landing page and search application
## Contributing to the API
+
### Prerequesites
+
- [docker](https://www.docker.com/)
- [colima](https://github.com/abiosoft/colima) (if using macOS)
- a postgres database with the npd schema
### Local dev
+
1. Ensure that either colima (if using macOS) or the docker service is running
2. Create a `.env` file in this directory, following the template of the `.env_template` file
* n.b. ensure that NPD_DB_HOST is set to `db` if using a local postgres instance.
-3. Run `docker-compose up --build` initially and following any changes
+3. Run `docker-compose up --build` from the project root and following any changes
4. Happy coding!
### Running Tests
@@ -27,24 +32,40 @@ Without docker:
1. Ensure that you have a running local postgres instance
2. Make sure that you have a working `.env` file as described above
3. Make sure all python dependencies are installed in a venv or otherwise
-4. Navigate to the `backend/` directory and run `./manage.py test`
+4. Navigate to the `backend/` directory and run `python manage.py test`
+
+All commands below
+
+With docker, full setup every time:
-With docker:
+1. Run `make test` from project root
-1. Run `make test`
+With docker, setup once:
+
+1. (one-time) Run `make test-setup`
+2. Run `make test-backend`
### Automated Testing
-Automated tests run in a GitHub workflow whenever a PR is created or a push is made to the main branch. These tests run in docker and build the backend image each time before running the tests. The tests generate JUnit XML files that are used as artifacts. The artifacts are then uploaded in the same GitHub workflow.
+Automated tests run in a GitHub workflow whenever a PR is created or a push is made to the main branch. All tests must pass for PRs to be accepted.
+
+### Code quality
+
+We use [ruff](https://docs.astral.sh/ruff) to lint and format all Python in this project. All PRs must pass the linter to be accepted.
-Additionally, the artifacts are taken and used to populate an automated comment and check summary. The check summary can be viewed by navigating to the "checks" section of the PR. The automated comment is made first initially when the PR is created and subsequent comments are only made if the status of the tests change as new commits are added to the PR branch.
+You can install ruff with `pip install ruff`, `brew install ruff`, or `make install-tools` and run it natively or with the `make lint` command.
-The test checks are supported by an upstream GitHub action: [EnricoMi/publish-unit-test-result-action](github.com/EnricoMi/publish-unit-test-result-action). Further options for the action can be found on the README.md for that project.
+```sh
+make lint
+# runs `ruff check .`
+```
+You can format all code in the project with `make format`, or rely on the pre-commit hooks installed by `make install-tools` to format just the files you are making changes to.
## Documentation
-API documentation can be viewed at `/fhir/docs`. If you make any changes to the Views, please update the swagger schema accordingly.
+API documentation can be viewed at `/fhir/docs`. If you make any changes to the Views, please update the swagger schema accordingly.
## Understanding the Flow of Data through the FHIR API
+

\ No newline at end of file
diff --git a/backend/app/asgi.py b/backend/app/asgi.py
index a44d5f07..7bd66a36 100644
--- a/backend/app/asgi.py
+++ b/backend/app/asgi.py
@@ -11,6 +11,6 @@
from django.core.asgi import get_asgi_application
-os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings")
application = get_asgi_application()
diff --git a/infrastructure/envs/dev/terraform.tfvars b/backend/app/logging/__init__.py
similarity index 100%
rename from infrastructure/envs/dev/terraform.tfvars
rename to backend/app/logging/__init__.py
diff --git a/backend/app/logging/sql_trace_formatter.py b/backend/app/logging/sql_trace_formatter.py
new file mode 100644
index 00000000..84774b3e
--- /dev/null
+++ b/backend/app/logging/sql_trace_formatter.py
@@ -0,0 +1,23 @@
+import logging
+from datetime import datetime
+
+from structlog.typing import EventDict
+
+
+# convert django.db.backend SQL query log records to structured type
+# the default django.db.backends query log format is a string:
+# '(%{duration}.3f) %{sql}s; args=%{args}s; alias=%{alias}s'
+# by converting it to
+def unpack_sql_trace(logger: logging.Logger, method_name: str, event_dict: EventDict) -> EventDict:
+ record: logging.LogRecord = event_dict["_record"]
+
+ if record.name == "django.db.backends" and record.module == "utils":
+ # the default django query log uses the
+ event_dict["event"] = "sql_query"
+ event_dict["alias"] = record.alias
+ event_dict["sql"] = record.sql
+ event_dict["args"] = record.args
+ event_dict["duration"] = record.duration
+ event_dict["created"] = datetime.fromtimestamp(record.created).isoformat()
+
+ return event_dict
diff --git a/backend/app/settings.py b/backend/app/settings.py
index 5b97dcb5..3fa0439b 100644
--- a/backend/app/settings.py
+++ b/backend/app/settings.py
@@ -18,6 +18,8 @@
import structlog
from decouple import config
+from app.logging import sql_trace_formatter
+
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
@@ -25,118 +27,135 @@
# See https://docs.djangoproject.com/en/5.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
-SECRET_KEY = config('NPD_DJANGO_SECRET')
+SECRET_KEY = config("NPD_DJANGO_SECRET")
# SECURITY WARNING: don't run with debug turned on in production!
-DEBUG = config('DEBUG', cast=bool)
+DEBUG = config("DEBUG", cast=bool)
# Detect if tests are being run
-TESTING = 'test' in sys.argv
+TESTING = "test" in sys.argv or config("TESTING", default=False, cast=bool)
REQUIRE_AUTHENTICATION = config("NPD_REQUIRE_AUTHENTICATION", default=False, cast=bool)
if DEBUG:
- ALLOWED_HOSTS = ['localhost','127.0.0.1','0.0.0.0','testserver','django-web']
+ ALLOWED_HOSTS = ["localhost", "127.0.0.1", "0.0.0.0", "testserver", "django-web"]
else:
- ALLOWED_HOSTS = config("DJANGO_ALLOWED_HOSTS").split(',')
+ ALLOWED_HOSTS = config("DJANGO_ALLOWED_HOSTS").split(",")
-INTERNAL_APIS = config("DJANGO_ALLOWED_HOSTS").split(',')
+INTERNAL_APIS = config("DJANGO_ALLOWED_HOSTS").split(",")
# Application definition
INSTALLED_APPS = [
- 'npdfhir.apps.NPDFHIRConfig',
- 'django.contrib.admin',
- 'django.contrib.auth',
- 'django.contrib.contenttypes',
- 'django.contrib.sessions',
- 'django.contrib.messages',
- 'django.contrib.staticfiles',
- 'corsheaders',
- 'rest_framework',
- 'django_filters',
- 'drf_spectacular',
- 'xmlrunner',
- 'django_structlog',
+ "django.contrib.admin",
+ "django.contrib.auth",
+ "django.contrib.contenttypes",
+ "django.contrib.sessions",
+ "django.contrib.messages",
+ "django.contrib.staticfiles",
+ "corsheaders",
+ "rest_framework",
+ "django_filters",
+ "drf_spectacular",
+ "xmlrunner",
+ "django_structlog",
+ "flags",
+ "npdfhir.apps.NPDFHIRConfig",
+ "provider_directory.apps.ProviderDirectoryConfig",
]
if not TESTING:
- INSTALLED_APPS.append('debug_toolbar')
+ INSTALLED_APPS.append("debug_toolbar")
MIDDLEWARE = [
- 'django_structlog.middlewares.RequestMiddleware',
- 'npdfhir.middleware.HealthCheckMiddleware',
- 'corsheaders.middleware.CorsMiddleware',
- 'django.middleware.security.SecurityMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
+ "django_structlog.middlewares.RequestMiddleware",
+ "npdfhir.middleware.HealthCheckMiddleware",
+ "corsheaders.middleware.CorsMiddleware",
+ "django.middleware.security.SecurityMiddleware",
+ "django.contrib.sessions.middleware.SessionMiddleware",
+ "django.middleware.common.CommonMiddleware",
+ "django.middleware.csrf.CsrfViewMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware",
+ "django.contrib.messages.middleware.MessageMiddleware",
+ "django.middleware.clickjacking.XFrameOptionsMiddleware",
]
if REQUIRE_AUTHENTICATION:
- MIDDLEWARE.append('django.contrib.auth.middleware.LoginRequiredMiddleware')
+ MIDDLEWARE.append("django.contrib.auth.middleware.LoginRequiredMiddleware")
if not TESTING:
- MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
+ MIDDLEWARE.append("debug_toolbar.middleware.DebugToolbarMiddleware")
# This must come at the end.
-# We want the fhir urls to be entirely open
-CORS_URLS_REGEX = r'^/fhir/.*$'
+# We want the fhir and frontend API urls to be entirely open
+CORS_URLS_REGEX = r"^/(fhir|api)/.*$"
CORS_ALLOW_ALL_ORIGINS = True
-CORS_ALLOWED_METHODS = ['GET']
+CORS_ALLOWED_METHODS = ["GET"]
+
+CSRF_COOKIE_SECURE = config(
+ "DJANGO_CSRF_COOKIE_SECURE", cast=bool, default=False
+) # Only if using HTTPS
+CSRF_COOKIE_HTTPONLY = config(
+ "DJANGO_CSRF_COOKIE_HTTPONLY", cast=bool, default=False
+) # Must be False for JavaScript access
+CSRF_COOKIE_SAMESITE = config("DJANGO_CSRF_COOKIE_SAMESITE", default="Lax") # or 'Strict' or 'None'
+CSRF_TRUSTED_ORIGINS = config("DJANGO_CSRF_TRUSTED_DOMAINS", default="").split(
+ ","
+) # Add your domains
if DEBUG:
# in development, allow the frontend app to POST forms to the backend
- CSRF_TRUSTED_ORIGINS = ['http://localhost:8000', 'http://localhost:3000']
+ CSRF_TRUSTED_ORIGINS = [
+ "http://localhost:8000",
+ "http://localhost:8008",
+ "http://localhost:3000",
+ ]
-ROOT_URLCONF = 'app.urls'
-APPEND_SLASH = True # this is default, but we're making sure it's explicit
+ROOT_URLCONF = "app.urls"
+APPEND_SLASH = True # this is default, but we're making sure it's explicit
TEMPLATES = [
{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': [
- os.path.join(BASE_DIR, 'templates'),
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "DIRS": [
+ os.path.join(BASE_DIR, "templates"),
# NOTE: (@abachman-dsac) this setup allows frontend/ to build directly
# into provider_directory/static/ and provider_directory.views.landing to
# reference the resulting index.html
- os.path.join(BASE_DIR, 'provider_directory', 'static'),
+ os.path.join(BASE_DIR, "provider_directory", "static"),
],
- 'APP_DIRS': True,
- 'OPTIONS': {
- 'context_processors': [
- 'django.template.context_processors.request',
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
+ "APP_DIRS": True,
+ "OPTIONS": {
+ "context_processors": [
+ "django.template.context_processors.request",
+ "django.contrib.auth.context_processors.auth",
+ "django.contrib.messages.context_processors.messages",
],
},
},
]
-WSGI_APPLICATION = 'app.wsgi.application'
+WSGI_APPLICATION = "app.wsgi.application"
# Database
# https://docs.djangoproject.com/en/5.2/ref/settings/#databases
DATABASES = {
- 'default': {
- 'ENGINE': config('NPD_DB_ENGINE'),
- 'USER': config('NPD_DB_USER'),
- 'PASSWORD': config('NPD_DB_PASSWORD'),
- 'HOST': config('NPD_DB_HOST'),
- 'NAME': config('NPD_DB_NAME'),
- 'PORT': config('NPD_DB_PORT'),
+ "default": {
+ "ENGINE": config("NPD_DB_ENGINE"),
+ "USER": config("NPD_DB_USER"),
+ "PASSWORD": config("NPD_DB_PASSWORD"),
+ "HOST": config("NPD_DB_HOST"),
+ "NAME": config("NPD_DB_NAME"),
+ "PORT": config("NPD_DB_PORT"),
"TEST": {
# Django will create a new test DB with this name prefix
"NAME": f"{os.getenv('NPD_DB_NAME', 'npd')}",
- "MIRROR": "default", # optional: avoids creating a test DB
+ "MIRROR": "default", # optional: avoids creating a test DB
},
- 'OPTIONS': {
- 'options': '-c search_path=npd,public',
+ "OPTIONS": {
+ "options": "-c search_path=npd,public",
"pool": {
# our default gunicorn container configuration only spins up 3 workerse
"min_size": 2,
@@ -146,15 +165,15 @@
# after 2 clients are waiting for connections, subsequent requests should immediately fail
"max_waiting": 2,
},
- }
+ },
}
}
-TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
+TEST_RUNNER = "xmlrunner.extra.djangotestrunner.XMLTestRunner"
# Directory where XML reports will be written
-TEST_OUTPUT_DIR = './artifacts/test-reports'
+TEST_OUTPUT_DIR = "./artifacts/test-reports"
FIXTURE_DIRS = [
- os.path.join(BASE_DIR, 'provider_directory', 'fixtures'),
+ os.path.join(BASE_DIR, "provider_directory", "fixtures"),
]
# Password validation
@@ -162,16 +181,16 @@
AUTH_PASSWORD_VALIDATORS = [
{
- 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
+ "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
- 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
+ "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
- 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
+ "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
- 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
+ "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
@@ -179,9 +198,9 @@
# Internationalization
# https://docs.djangoproject.com/en/5.2/topics/i18n/
-LANGUAGE_CODE = 'en-us'
+LANGUAGE_CODE = "en-us"
-TIME_ZONE = 'UTC'
+TIME_ZONE = "UTC"
USE_I18N = True
@@ -190,11 +209,11 @@
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/5.2/howto/static-files/
-STATIC_URL = 'static/'
+STATIC_URL = "static/"
STATICFILES_DIRS = [
- os.path.join(BASE_DIR, 'static'),
- os.path.join(BASE_DIR, 'provider_directory', 'static'),
+ os.path.join(BASE_DIR, "static"),
+ os.path.join(BASE_DIR, "provider_directory", "static"),
]
# STATICFILES_DIRS = [
@@ -204,7 +223,7 @@
# Default primary key field type
# https://docs.djangoproject.com/en/5.2/ref/settings/#default-auto-field
-DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
+DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
REST_FRAMEWORK = {
"DEFAULT_FILTER_BACKENDS": ["django_filters.rest_framework.DjangoFilterBackend"],
@@ -212,17 +231,18 @@
"rest_framework.authentication.BasicAuthentication",
"rest_framework.authentication.SessionAuthentication",
],
- 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
+ "DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
+ "DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
}
SPECTACULAR_SETTINGS = {
- 'TITLE': 'NPD FHIR API',
- 'DESCRIPTION': 'Developers can query and retrieve National Provider Directory data via a REST API. The API structure conforms to the HL7 Fast Healthcare Interoperability Resources (FHIR) standard and it returns JSON responses following the FHIR specification.',
- 'VERSION': 'beta',
- 'CONTACT': {'email': 'npd@cms.hhs.gov'},
- 'LICENSE': {'name': 'CC0-1.0 License'},
- 'SERVE_INCLUDE_SCHEMA': False,
- 'COMPONENT_SPLIT_REQUEST': True
+ "TITLE": "NPD FHIR API",
+ "DESCRIPTION": "Developers can query and retrieve National Provider Directory data via a REST API. The API structure conforms to the HL7 Fast Healthcare Interoperability Resources (FHIR) standard and it returns JSON responses following the FHIR specification.",
+ "VERSION": "beta",
+ "CONTACT": {"email": "npd@cms.hhs.gov"},
+ "LICENSE": {"name": "CC0-1.0 License"},
+ "SERVE_INCLUDE_SCHEMA": False,
+ "COMPONENT_SPLIT_REQUEST": True,
}
if REQUIRE_AUTHENTICATION:
@@ -232,9 +252,7 @@
LOGIN_URL = "/accounts/login/"
LOGOUT_REDIRECT_URL = LOGIN_URL
-DEBUG_TOOLBAR_CONFIG = {
- 'SHOW_TOOLBAR_CALLBACK': lambda request: DEBUG
-}
+DEBUG_TOOLBAR_CONFIG = {"SHOW_TOOLBAR_CALLBACK": lambda request: DEBUG and not TESTING}
CACHES = {
"default": {
@@ -243,7 +261,31 @@
}
}
-if TESTING:
+SWAGGER_SETTINGS = {"USE_SESSION_AUTH": False}
+
+# feature flags
+FLAGS = {
+ "SEARCH_APP": [], # can see the search app at all
+ "PRACTITIONER_LOOKUP": [], # can reach the provider lookup page
+ "PRACTITIONER_LOOKUP_DETAILS": [], # can reach all details in the provider lookup page
+ "ORGANIZATION_LOOKUP": [],
+ "ORGANIZATION_LOOKUP_DETAILS": [],
+ # static conditions can be defined in this file or through the Admin interface
+ # see the list of built-in conditions here: https://cfpb.github.io/django-flags/conditions/
+ # 'ANONYMOUS_USER': [
+ # {"condition": "anonymous", "value": True}
+ # ],
+ # 'FLAG_WITH_ANY_CONDITIONS': [
+ # {'condition': 'condition name', 'value': 'expected value to be enabled'},
+ # {'condition': 'user', 'value': 'npd@cms.hhs.gov'},
+ # ],
+}
+
+SQL_TRACING = DEBUG and config("SQL_TRACING", default=False, cast=bool)
+
+if TESTING and SQL_TRACING:
+ LOG_LEVEL = logging.DEBUG
+elif TESTING:
LOG_LEVEL = logging.ERROR
else:
LOG_LEVEL = os.environ.get("LOG_LEVEL", logging.INFO)
@@ -270,7 +312,9 @@
},
"key_value": {
"()": structlog.stdlib.ProcessorFormatter,
- "processor": structlog.processors.KeyValueRenderer(key_order=['timestamp', 'level', 'event', 'logger']),
+ "processor": structlog.processors.KeyValueRenderer(
+ key_order=["timestamp", "level", "event", "logger"]
+ ),
},
},
# Custom handler config that gets log messages and outputs them to console
@@ -285,9 +329,38 @@
"handlers": ["console"],
"level": LOG_LEVEL,
},
+ "django.security.csrf": {"handlers": ["console"], "level": LOG_LEVEL, "propagate": False},
},
}
+# DB query logging
+if SQL_TRACING:
+ # allow django libraries to log
+ LOGGING["disable_existing_loggers"] = False
+
+ # inject a django.db.backend message unpacker
+ LOGGING["formatters"]["json_formatter"]["foreign_pre_chain"].append(
+ sql_trace_formatter.unpack_sql_trace,
+ )
+
+ # write SQL query logs to backend/django_queries.log
+ LOGGING["handlers"]["queries_file"] = {
+ "level": LOG_LEVEL,
+ "class": "logging.FileHandler",
+ "filename": "django_queries.log",
+ "formatter": "json_formatter",
+ }
+ LOGGING["loggers"]["django.db.backends"] = {
+ "handlers": ["queries_file"],
+ "level": "DEBUG",
+ "propagate": False,
+ }
+
+ # ... keep these libraries quiet to avoid noisy test output
+ LOGGING["loggers"]["django_structlog"] = {"level": logging.ERROR, "propagate": False}
+ LOGGING["loggers"]["django.request"] = {"level": logging.ERROR, "propagate": False}
+
+
structlog.configure(
processors=[
structlog.contextvars.merge_contextvars,
diff --git a/backend/app/tests/test_routing.py b/backend/app/tests/test_routing.py
index d3c239c3..3ac28004 100644
--- a/backend/app/tests/test_routing.py
+++ b/backend/app/tests/test_routing.py
@@ -1,5 +1,5 @@
-from django.urls import resolve, reverse
from django.test import TestCase
+from django.urls import resolve, reverse
class ProviderDirectorySpaRouting(TestCase):
@@ -77,5 +77,5 @@ def test_fhir_rest_routes_reverse_without_slash(self):
endpoint_list_path = reverse("fhir-endpoint-list")
self.assertEqual(endpoint_list_path, "/fhir/Endpoint")
- endpoint_detail_path = reverse("fhir-endpoint-detail", kwargs={"pk": 12345})
+ endpoint_detail_path = reverse("fhir-endpoint-detail", kwargs={"id": 12345})
self.assertEqual(endpoint_detail_path, "/fhir/Endpoint/12345")
diff --git a/backend/app/urls.py b/backend/app/urls.py
index 9cc83e20..b176207e 100644
--- a/backend/app/urls.py
+++ b/backend/app/urls.py
@@ -14,9 +14,10 @@
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
+
+from debug_toolbar.toolbar import debug_toolbar_urls
from django.contrib import admin
from django.urls import include, path
-from debug_toolbar.toolbar import debug_toolbar_urls
from npdfhir.router import router as npdfhir_router
@@ -46,9 +47,9 @@
#
# See app/tests/test_routing.py for validation tests to ensure that changes
# inside npdfhir.urls don't break our routing configuration.
- path('fhir/', include("npdfhir.urls")),
- path('fhir', npdfhir_router.get_api_root_view, name='api-root'),
- path('admin/', admin.site.urls),
+ path("fhir/", include("npdfhir.urls")),
+ path("fhir", npdfhir_router.get_api_root_view(), name="api-root"),
+ path("admin/", admin.site.urls),
# everything else goes to provider_directory
- path('', include('provider_directory.urls')),
+ path("", include("provider_directory.urls")),
] + debug_toolbar_urls()
diff --git a/backend/app/wsgi.py b/backend/app/wsgi.py
index af656549..25baa8e8 100644
--- a/backend/app/wsgi.py
+++ b/backend/app/wsgi.py
@@ -11,6 +11,6 @@
from django.core.wsgi import get_wsgi_application
-os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings")
application = get_wsgi_application()
diff --git a/backend/compose.test.yml b/backend/compose.test.yml
deleted file mode 100644
index 1ab8ce8f..00000000
--- a/backend/compose.test.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-name: npd
-
-volumes:
- postgres_data:
-
-services:
- db:
- extends:
- file: docker-compose.yml
- service: db
- environment:
- POSTGRES_DB: npd_test
-
- db-migrations:
- extends:
- file: docker-compose.yml
- service: db-migrations
- environment:
- FLYWAY_URL: jdbc:postgresql://${NPD_DB_HOST:-db}:${NPD_DB_PORT:-5432}/npd_test
-
- django-web:
- extends:
- file: docker-compose.yml
- service: django-web
- environment:
- NPD_DB_NAME: npd_test
\ No newline at end of file
diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml
deleted file mode 100644
index d08a8c85..00000000
--- a/backend/docker-compose.yml
+++ /dev/null
@@ -1,78 +0,0 @@
-# service:
-# image: or build:
-# env_file:
-# environment:
-# ports:
-# volumes:
-# networks:
-# depends_on:
-# command:
-name: npd
-
-services:
- db:
- image: 'postgres:17'
- env_file:
- - path: .env
- required: false
- environment:
- POSTGRES_DB: ${NPD_DB_NAME:-npd_development}
- POSTGRES_USER: ${NPD_DB_USER:-postgres}
- POSTGRES_PASSWORD: ${NPD_DB_PASSWORD:-postgres}
- PGUSER: ${NPD_DB_USER:-postgres}
- ports:
- - '${NPD_DB_PORT:-5432}:5432'
- volumes:
- - 'postgres_data:/var/lib/postgresql/data'
- healthcheck:
- test: ["CMD-SHELL", "pg_isready", "-d", "${NPD_DB_NAME:-npd_development}"]
- interval: 10s
- timeout: 5s
- retries: 5
-
- db-migrations:
- image: 'flyway/flyway:10'
- env_file:
- - path: .env
- required: false
- environment:
- FLYWAY_URL: jdbc:postgresql://${NPD_DB_HOST:-db}:5432/${NPD_DB_NAME:-npd_development}
- FLYWAY_USER: ${NPD_DB_USER:-postgres}
- FLYWAY_PASSWORD: ${NPD_DB_PASSWORD:-postgres}
- FLYWAY_PLACEHOLDERS_apiSchema: ${NPD_DB_SCHEMA:-npd}
- FLYWAY_PLACEHOLDERS_superuserDefaultPassword: ""
- volumes:
- - '../flyway/sql:/flyway/sql'
- depends_on:
- - db
- command: migrate -environment=development -outputType=json
-
- django-web:
- build: .
- container_name: npd
- env_file:
- - path: .env
- required: false
- environment:
- NPD_DJANGO_SECRET: ${NPD_DJANGO_SECRET:-_pth2#=k8-wf-_^t%2))it+3..8la^@@97^#ock7.v=@792w7}
- DEBUG: ${DEBUG:-True}
- DJANGO_LOGLEVEL: ${DJANGO_LOGLEVEL:-INFO}
- DJANGO_ALLOWED_HOSTS: "${DJANGO_ALLOWED_HOSTS:-['localhost','127.0.0.1','0.0.0.0']}"
- NPD_DB_ENGINE: ${NPD_DB_ENGINE:-django.db.backends.postgresql}
- NPD_DB_NAME: ${NPD_DB_NAME:-npd_development}
- NPD_DB_USER: ${NPD_DB_USER:-postgres}
- NPD_DB_PASSWORD: ${NPD_DB_PASSWORD:-postgres}
- NPD_DB_HOST: ${NPD_DB_HOST:-db}
- NPD_DB_PORT: ${NPD_DB_PORT:-5432}
- NPD_REQUIRE_AUTHENTICATION: False
- ports:
- - '8000:8000'
- volumes:
- - '.:/app'
- - ./artifacts:/app/artifacts:rw
- - ./provider_directory/static:/app/provider_directory/static:rw
- depends_on:
- - db
-
-volumes:
- postgres_data:
diff --git a/backend/manage.py b/backend/manage.py
index 49313893..923e331a 100755
--- a/backend/manage.py
+++ b/backend/manage.py
@@ -1,12 +1,13 @@
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
+
import os
import sys
def main():
"""Run administrative tasks."""
- os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings')
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
@@ -18,5 +19,5 @@ def main():
execute_from_command_line(sys.argv)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/backend/npdfhir/admin.py b/backend/npdfhir/admin.py
index 8c38f3f3..846f6b40 100644
--- a/backend/npdfhir/admin.py
+++ b/backend/npdfhir/admin.py
@@ -1,3 +1 @@
-from django.contrib import admin
-
# Register your models here.
diff --git a/backend/npdfhir/apps.py b/backend/npdfhir/apps.py
index 019e1eea..61a4af1d 100644
--- a/backend/npdfhir/apps.py
+++ b/backend/npdfhir/apps.py
@@ -1,9 +1,10 @@
from django.apps import AppConfig
+
class NPDFHIRConfig(AppConfig):
- default_auto_field = 'django.db.models.BigAutoField'
- name = 'npdfhir'
+ default_auto_field = "django.db.models.BigAutoField"
+ name = "npdfhir"
def ready(self):
# Import signals module to connect the handlers
- import npdfhir.signals
\ No newline at end of file
+ pass
diff --git a/backend/npdfhir/cache.py b/backend/npdfhir/cache.py
index d9f471fe..8bcd5a8f 100644
--- a/backend/npdfhir/cache.py
+++ b/backend/npdfhir/cache.py
@@ -12,7 +12,7 @@ def cacheData(model):
if not data:
data = {}
for obj in model.objects.all():
- if hasattr(obj, 'display_name'):
+ if hasattr(obj, "display_name"):
data[str(obj.code)] = obj.display_name
else:
data[str(obj.id)] = obj.value
@@ -23,7 +23,7 @@ def cacheData(model):
return data
-if 'runserver' or 'test' in sys.argv:
+if "runserver" or "test" in sys.argv:
other_identifier_type = cacheData(OtherIdType)
fhir_name_use = cacheData(FhirNameUse)
nucc_taxonomy_codes = cacheData(Nucc)
diff --git a/backend/npdfhir/filters/__init__.py b/backend/npdfhir/filters/__init__.py
index bcf172e6..e69de29b 100644
--- a/backend/npdfhir/filters/__init__.py
+++ b/backend/npdfhir/filters/__init__.py
@@ -1,13 +0,0 @@
-from .endpoint_filter_set import EndpointFilterSet
-from .location_filter_set import LocationFilterSet
-from .organization_filter_set import OrganizationFilterSet
-from .practitioner_filter_set import PractitionerFilterSet
-from .practitioner_role_filter_set import PractitionerRoleFilterSet
-
-__all__ = [
- "EndpointFilterSet",
- "LocationFilterSet",
- "OrganizationFilterSet",
- "PractitionerFilterSet",
- "PractitionerRoleFilterSet",
-]
\ No newline at end of file
diff --git a/backend/npdfhir/filters/endpoint_filter_set.py b/backend/npdfhir/filters/endpoint_filter_set.py
index 8dd49a9d..fbfdf0b3 100644
--- a/backend/npdfhir/filters/endpoint_filter_set.py
+++ b/backend/npdfhir/filters/endpoint_filter_set.py
@@ -1,43 +1,38 @@
from django_filters import rest_framework as filters
+
from ..models import EndpointInstance
class EndpointFilterSet(filters.FilterSet):
name = filters.CharFilter(
- field_name='name',
- lookup_expr='icontains',
- help_text='Filter by name'
- )
-
- connection_type=filters.CharFilter(
- field_name='endpoint_connection_type__id',
- lookup_expr='icontains',
- help_text='Filter by connection type'
+ field_name="name", lookup_expr="icontains", help_text="Filter by name"
)
- payload_type=filters.CharFilter(
- field_name='endpointinstancetopayload__payload_type__id',
- lookup_expr='icontains',
- help_text='Filter by payload type'
+ connection_type = filters.CharFilter(
+ field_name="endpoint_connection_type__id",
+ lookup_expr="icontains",
+ help_text="Filter by connection type",
)
- status=filters.CharFilter(
- method='filter_status',
- help_text='Filter by status'
+ payload_type = filters.CharFilter(
+ field_name="endpointinstancetopayload__payload_type__id",
+ lookup_expr="icontains",
+ help_text="Filter by payload type",
)
- organization=filters.CharFilter(
- method='filter_organization',
- help_text='Filter by organization'
+ status = filters.CharFilter(method="filter_status", help_text="Filter by status")
+
+ organization = filters.CharFilter(
+ method="filter_organization", help_text="Filter by organization"
)
class Meta:
model = EndpointInstance
- fields = ['name', 'connection_type', 'payload_type', 'status', 'organization']
+ fields = ["name", "connection_type", "payload_type", "status", "organization"]
def filter_status(self, queryset, name, value):
# needs to be implemented
- return queryset
+ return queryset
def filter_organization(self, queryset, name, value):
# needs to be implemented
diff --git a/backend/npdfhir/filters/location_filter_set.py b/backend/npdfhir/filters/location_filter_set.py
index 20366fc5..32326073 100644
--- a/backend/npdfhir/filters/location_filter_set.py
+++ b/backend/npdfhir/filters/location_filter_set.py
@@ -1,87 +1,78 @@
-from django_filters import rest_framework as filters
from django.contrib.postgres.search import SearchVector
-from django.db.models import Q
+from django_filters import rest_framework as filters
-from ..models import Location
from ..mappings import addressUseMapping
+from ..models import Location
-class LocationFilterSet(filters.FilterSet):
+class LocationFilterSet(filters.FilterSet):
name = filters.CharFilter(
- field_name='name',
- lookup_expr='exact',
- help_text='Filter by location name'
+ field_name="name", lookup_expr="exact", help_text="Filter by location name"
)
organization_type = filters.CharFilter(
- method="filter_organization_type",
- help_text="Filter by organization type"
- )
-
- address = filters.CharFilter(
- method='filter_address',
- help_text='Filter by any part of address'
+ method="filter_organization_type", help_text="Filter by organization type"
)
-
- address_city = filters.CharFilter(
- method='filter_address_city',
- help_text='Filter by city name'
- )
-
+
+ address = filters.CharFilter(method="filter_address", help_text="Filter by any part of address")
+
+ address_city = filters.CharFilter(method="filter_address_city", help_text="Filter by city name")
+
address_state = filters.CharFilter(
- method='filter_address_state',
- help_text='Filter by state (2-letter abbreviation)'
+ method="filter_address_state", help_text="Filter by state (2-letter abbreviation)"
)
-
+
address_postalcode = filters.CharFilter(
- method='filter_address_postalcode',
- help_text='Filter by postal code/zip code'
+ method="filter_address_postalcode", help_text="Filter by postal code/zip code"
)
-
+
address_use = filters.ChoiceFilter(
- method='filter_address_use',
+ method="filter_address_use",
choices=addressUseMapping.to_choices(),
- help_text='Filter by address use type',
+ help_text="Filter by address use type",
)
class Meta:
model = Location
- fields = ['name', 'address', 'address_city', 'address_state',
- 'address_postalcode', 'address_use']
-
+ fields = [
+ "name",
+ "address",
+ "address_city",
+ "address_state",
+ "address_postalcode",
+ "address_use",
+ ]
+
def filter_organization_type(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'organizationtotaxonomy__nucc_code__display_name')
+ search=SearchVector("organizationtotaxonomy__nucc_code__display_name")
).filter(search=value)
def filter_address(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'address__address_us__delivery_line_1',
- 'address__address_us__delivery_line_2',
- 'address__address_us__city_name',
- 'address__address_us__state_code__abbreviation',
- 'address__address_us__zipcode'
+ "address__address_us__delivery_line_1",
+ "address__address_us__delivery_line_2",
+ "address__address_us__city_name",
+ "address__address_us__state_code__abbreviation",
+ "address__address_us__zipcode",
)
).filter(search=value)
def filter_address_city(self, queryset, name, value):
- return queryset.annotate(
- search=SearchVector('address__address_us__city_name')
- ).filter(search=value)
+ return queryset.annotate(search=SearchVector("address__address_us__city_name")).filter(
+ search=value
+ )
def filter_address_state(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'address__address_us__state_code__abbreviation'
- )
+ search=SearchVector("address__address_us__state_code__abbreviation")
).filter(search=value)
def filter_address_postalcode(self, queryset, name, value):
- return queryset.annotate(
- search=SearchVector('address__address_us__zipcode')
- ).filter(search=value)
+ return queryset.annotate(search=SearchVector("address__address_us__zipcode")).filter(
+ search=value
+ )
def filter_address_use(self, queryset, name, value):
if value in addressUseMapping.keys():
diff --git a/backend/npdfhir/filters/organization_filter_set.py b/backend/npdfhir/filters/organization_filter_set.py
index 5fe06e7e..4bfe9d78 100644
--- a/backend/npdfhir/filters/organization_filter_set.py
+++ b/backend/npdfhir/filters/organization_filter_set.py
@@ -1,77 +1,75 @@
-from django_filters import rest_framework as filters
from django.contrib.postgres.search import SearchVector
from django.db.models import Q
+from django_filters import rest_framework as filters
-from ..models import Organization
from ..mappings import addressUseMapping
+from ..models import Organization
from ..utils import parse_identifier_query
class OrganizationFilterSet(filters.FilterSet):
- name = filters.CharFilter(
- method='filter_name',
- help_text='Filter by organization name'
- )
-
+ name = filters.CharFilter(method="filter_name", help_text="Filter by organization name")
+
identifier = filters.CharFilter(
- method='filter_identifier',
- help_text='Filter by identifier (NPI, EIN, or other). Format: value or system|value'
+ method="filter_identifier",
+ help_text="Filter by identifier (NPI, EIN, or other). Format: value or system|value",
)
-
+
organization_type = filters.CharFilter(
- method='filter_organization_type',
- help_text='Filter by organization type/taxonomy'
- )
-
- address = filters.CharFilter(
- method='filter_address',
- help_text='Filter by any part of address'
+ method="filter_organization_type", help_text="Filter by organization type/taxonomy"
)
-
- address_city = filters.CharFilter(
- method='filter_address_city',
- help_text='Filter by city name'
- )
-
+
+ address = filters.CharFilter(method="filter_address", help_text="Filter by any part of address")
+
+ address_city = filters.CharFilter(method="filter_address_city", help_text="Filter by city name")
+
address_state = filters.CharFilter(
- method='filter_address_state',
- help_text='Filter by state (2-letter abbreviation)'
+ method="filter_address_state", help_text="Filter by state (2-letter abbreviation)"
)
-
+
address_postalcode = filters.CharFilter(
- method='filter_address_postalcode',
- help_text='Filter by postal code/zip code'
+ method="filter_address_postalcode", help_text="Filter by postal code/zip code"
)
-
+
address_use = filters.ChoiceFilter(
- method='filter_address_use',
+ method="filter_address_use",
choices=addressUseMapping.to_choices(),
- help_text='Filter by address use type'
+ help_text="Filter by address use type",
)
class Meta:
model = Organization
- fields = ['name', 'identifier', 'organization_type', 'address',
- 'address_city', 'address_state', 'address_postalcode', 'address_use']
+ fields = [
+ "name",
+ "identifier",
+ "organization_type",
+ "address",
+ "address_city",
+ "address_state",
+ "address_postalcode",
+ "address_use",
+ ]
def filter_name(self, queryset, name, value):
- return queryset.annotate(
- search=SearchVector('organizationtoname__name')
- ).filter(search=value).distinct()
+ return (
+ queryset.annotate(search=SearchVector("organizationtoname__name"))
+ .filter(search=value)
+ .distinct()
+ )
def filter_identifier(self, queryset, name, value):
from uuid import UUID
-
+
system, identifier_id = parse_identifier_query(value)
queries = Q(pk__isnull=True)
- if system: # specific identifier search requested
- if system.upper() == 'NPI':
+ if system: # specific identifier search requested
+ if system.upper() == "NPI":
try:
queries = Q(clinicalorganization__npi__npi=int(identifier_id))
except (ValueError, TypeError):
- pass # TODO: implement validationerror to show users that NPI must be an int
- else: # general identifier search requested
+ pass # TODO: implement validationerror to show users that NPI must be an int
+ else: # general identifier search requested
try:
queries |= Q(clinicalorganization__npi__npi=int(identifier_id))
except (ValueError, TypeError):
@@ -90,40 +88,36 @@ def filter_identifier(self, queryset, name, value):
def filter_organization_type(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'clinicalorganization__organizationtotaxonomy__nucc_code__display_name'
+ "clinicalorganization__organizationtotaxonomy__nucc_code__display_name"
)
).filter(search=value)
def filter_address(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'organizationtoaddress__address__address_us__delivery_line_1',
- 'organizationtoaddress__address__address_us__delivery_line_2',
- 'organizationtoaddress__address__address_us__city_name',
- 'organizationtoaddress__address__address_us__state_code__abbreviation',
- 'organizationtoaddress__address__address_us__zipcode'
+ "organizationtoaddress__address__address_us__delivery_line_1",
+ "organizationtoaddress__address__address_us__delivery_line_2",
+ "organizationtoaddress__address__address_us__city_name",
+ "organizationtoaddress__address__address_us__state_code__abbreviation",
+ "organizationtoaddress__address__address_us__zipcode",
)
).filter(search=value)
def filter_address_city(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'organizationtoaddress__address__address_us__city_name'
- )
+ search=SearchVector("organizationtoaddress__address__address_us__city_name")
).filter(search=value)
def filter_address_state(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'organizationtoaddress__address__address_us__state_code__abbreviation'
+ "organizationtoaddress__address__address_us__state_code__abbreviation"
)
).filter(search=value)
def filter_address_postalcode(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'organizationtoaddress__address__address_us__zipcode'
- )
+ search=SearchVector("organizationtoaddress__address__address_us__zipcode")
).filter(search=value)
def filter_address_use(self, queryset, name, value):
@@ -131,6 +125,4 @@ def filter_address_use(self, queryset, name, value):
value = addressUseMapping.toNPD(value)
else:
value = -1
- return queryset.filter(
- organizationtoaddress__address_use_id=value
- )
\ No newline at end of file
+ return queryset.filter(organizationtoaddress__address_use_id=value)
diff --git a/backend/npdfhir/filters/practitioner_filter_set.py b/backend/npdfhir/filters/practitioner_filter_set.py
index bd44a5ce..8fe9ef79 100644
--- a/backend/npdfhir/filters/practitioner_filter_set.py
+++ b/backend/npdfhir/filters/practitioner_filter_set.py
@@ -1,78 +1,74 @@
-from django_filters import rest_framework as filters
from django.contrib.postgres.search import SearchVector
from django.db.models import Q
+from django_filters import rest_framework as filters
-from ..models import Provider
from ..mappings import addressUseMapping, genderMapping
+from ..models import Provider
from ..utils import parse_identifier_query
class PractitionerFilterSet(filters.FilterSet):
identifier = filters.CharFilter(
- method='filter_identifier',
- help_text='Filter by identifier (NPI or other). Format: value or system|value'
+ method="filter_identifier",
+ help_text="Filter by identifier (NPI or other). Format: value or system|value",
)
-
+
name = filters.CharFilter(
- method='filter_name',
- help_text='Filter by practitioner name (first, last, or full name)'
+ method="filter_name", help_text="Filter by practitioner name (first, last, or full name)"
)
-
+
gender = filters.ChoiceFilter(
- method='filter_gender',
- choices=genderMapping.to_choices(),
- help_text='Filter by gender'
+ method="filter_gender", choices=genderMapping.to_choices(), help_text="Filter by gender"
)
-
+
practitioner_type = filters.CharFilter(
- method='filter_practitioner_type',
- help_text='Filter by practitioner type/taxonomy'
- )
-
- address = filters.CharFilter(
- method='filter_address',
- help_text='Filter by any part of address'
+ method="filter_practitioner_type", help_text="Filter by practitioner type/taxonomy"
)
-
- address_city = filters.CharFilter(
- method='filter_address_city',
- help_text='Filter by city name'
- )
-
+
+ address = filters.CharFilter(method="filter_address", help_text="Filter by any part of address")
+
+ address_city = filters.CharFilter(method="filter_address_city", help_text="Filter by city name")
+
address_state = filters.CharFilter(
- method='filter_address_state',
- help_text='Filter by state (2-letter abbreviation)'
+ method="filter_address_state", help_text="Filter by state (2-letter abbreviation)"
)
-
+
address_postalcode = filters.CharFilter(
- method='filter_address_postalcode',
- help_text='Filter by postal code/zip code'
+ method="filter_address_postalcode", help_text="Filter by postal code/zip code"
)
-
+
address_use = filters.ChoiceFilter(
- method='filter_address_use',
+ method="filter_address_use",
choices=addressUseMapping.to_choices(),
- help_text='Filter by address use type'
+ help_text="Filter by address use type",
)
class Meta:
model = Provider
- fields = ['identifier', 'name', 'gender', 'practitioner_type',
- 'address', 'address_city', 'address_state',
- 'address_postalcode', 'address_use']
+ fields = [
+ "identifier",
+ "name",
+ "gender",
+ "practitioner_type",
+ "address",
+ "address_city",
+ "address_state",
+ "address_postalcode",
+ "address_use",
+ ]
def filter_gender(self, queryset, name, value):
if value in genderMapping.keys():
value = genderMapping.toNPD(value)
-
+
return queryset.filter(individual__gender=value)
-
+
def filter_identifier(self, queryset, name, value):
system, identifier_id = parse_identifier_query(value)
queries = Q(pk__isnull=True)
if system: # specific identifier search requested
- if system.upper() == 'NPI':
+ if system.upper() == "NPI":
try:
queries = Q(npi__npi=int(identifier_id))
except (ValueError, TypeError):
@@ -90,47 +86,43 @@ def filter_identifier(self, queryset, name, value):
def filter_name(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'individual__individualtoname__first_name',
- 'individual__individualtoname__last_name',
- 'individual__individualtoname__middle_name'
+ "individual__individualtoname__first_name",
+ "individual__individualtoname__last_name",
+ "individual__individualtoname__middle_name",
)
).filter(search=value)
def filter_practitioner_type(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector('providertotaxonomy__nucc_code__display_name')
+ search=SearchVector("providertotaxonomy__nucc_code__display_name")
).filter(search=value)
def filter_address(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'individual__individualtoaddress__address__address_us__delivery_line_1',
- 'individual__individualtoaddress__address__address_us__delivery_line_2',
- 'individual__individualtoaddress__address__address_us__city_name',
- 'individual__individualtoaddress__address__address_us__state_code__abbreviation',
- 'individual__individualtoaddress__address__address_us__zipcode'
+ "individual__individualtoaddress__address__address_us__delivery_line_1",
+ "individual__individualtoaddress__address__address_us__delivery_line_2",
+ "individual__individualtoaddress__address__address_us__city_name",
+ "individual__individualtoaddress__address__address_us__state_code__abbreviation",
+ "individual__individualtoaddress__address__address_us__zipcode",
)
).filter(search=value)
def filter_address_city(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'individual__individualtoaddress__address__address_us__city_name'
- )
+ search=SearchVector("individual__individualtoaddress__address__address_us__city_name")
).filter(search=value)
def filter_address_state(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'individual__individualtoaddress__address__address_us__state_code__abbreviation'
+ "individual__individualtoaddress__address__address_us__state_code__abbreviation"
)
).filter(search=value)
def filter_address_postalcode(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'individual__individualtoaddress__address__address_us__zipcode'
- )
+ search=SearchVector("individual__individualtoaddress__address__address_us__zipcode")
).filter(search=value)
def filter_address_use(self, queryset, name, value):
@@ -138,6 +130,4 @@ def filter_address_use(self, queryset, name, value):
value = addressUseMapping.toNPD(value)
else:
value = -1
- return queryset.filter(
- individual__individualtoaddress__address_use_id=value
- )
\ No newline at end of file
+ return queryset.filter(individual__individualtoaddress__address_use_id=value)
diff --git a/backend/npdfhir/filters/practitioner_role_filter_set.py b/backend/npdfhir/filters/practitioner_role_filter_set.py
index 925ad1ab..53dab78d 100644
--- a/backend/npdfhir/filters/practitioner_role_filter_set.py
+++ b/backend/npdfhir/filters/practitioner_role_filter_set.py
@@ -1,62 +1,62 @@
-from django_filters import rest_framework as filters
from django.contrib.postgres.search import SearchVector
+from django_filters import rest_framework as filters
-from ..models import ProviderToLocation
from ..mappings import genderMapping
+from ..models import ProviderToLocation
class PractitionerRoleFilterSet(filters.FilterSet):
practitioner_name = filters.CharFilter(
- method='filter_practitioner_name',
- help_text='Filter by practitioner name (first, last, or full name)'
+ method="filter_practitioner_name",
+ help_text="Filter by practitioner name (first, last, or full name)",
)
-
+
practitioner_gender = filters.ChoiceFilter(
- method='filter_practitioner_gender',
+ method="filter_practitioner_gender",
choices=genderMapping.to_choices(),
- help_text='Filter by practitioner gender'
+ help_text="Filter by practitioner gender",
)
-
+
practitioner_type = filters.CharFilter(
- method='filter_practitioner_type',
- help_text='Filter by practitioner type/taxonomy'
+ method="filter_practitioner_type", help_text="Filter by practitioner type/taxonomy"
)
-
+
organization_name = filters.CharFilter(
- method='filter_organization_name',
- help_text='Filter by organization name'
+ method="filter_organization_name", help_text="Filter by organization name"
)
class Meta:
model = ProviderToLocation
- fields = ['practitioner_name', 'practitioner_gender',
- 'practitioner_type', 'organization_name']
+ fields = [
+ "practitioner_name",
+ "practitioner_gender",
+ "practitioner_type",
+ "organization_name",
+ ]
def filter_practitioner_name(self, queryset, name, value):
return queryset.annotate(
search=SearchVector(
- 'provider_to_organization__individual__individual__individualtoname__first_name',
- 'provider_to_organization__individual__individual__individualtoname__last_name',
- 'provider_to_organization__individual__individual__individualtoname__middle_name'
+ "provider_to_organization__individual__individual__individualtoname__first_name",
+ "provider_to_organization__individual__individual__individualtoname__last_name",
+ "provider_to_organization__individual__individual__individualtoname__middle_name",
)
).filter(search=value)
def filter_practitioner_gender(self, queryset, name, value):
if value in genderMapping.keys():
gender = genderMapping.toNPD(value)
- return queryset.filter(
- provider_to_organization__individual__individual__gender=gender
- )
+ return queryset.filter(provider_to_organization__individual__individual__gender=gender)
return queryset
def filter_practitioner_type(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector('provider_to_organization__providertotaxonomy__nucc_code__display_name')
+ search=SearchVector(
+ "provider_to_organization__providertotaxonomy__nucc_code__display_name"
+ )
).filter(search=value)
def filter_organization_name(self, queryset, name, value):
return queryset.annotate(
- search=SearchVector(
- 'provider_to_organization__organization__organizationtoname__name'
- )
- ).filter(search=value)
\ No newline at end of file
+ search=SearchVector("provider_to_organization__organization__organizationtoname__name")
+ ).filter(search=value)
diff --git a/backend/npdfhir/management/commands/exportschema.py b/backend/npdfhir/management/commands/exportschema.py
new file mode 100644
index 00000000..7a8baf99
--- /dev/null
+++ b/backend/npdfhir/management/commands/exportschema.py
@@ -0,0 +1,127 @@
+import base64
+import importlib
+import json
+import random
+from typing import Callable
+
+from django.core.management.base import BaseCommand, CommandError
+from django.db import models
+from django.test import Client
+from django.urls import reverse
+
+# from fhir.resources.R4B import organization, practitioner
+from pydantic import BaseModel
+
+from npdfhir.models import Organization, Provider
+
+
+class Command(BaseCommand):
+ help = "Export a JSON schema, example JSON record, or both for a given FHIR entity"
+
+ def add_arguments(self, parser):
+ parser.add_argument(
+ "--model",
+ help="name of FHIR entity to export (Practitioner, Organization, etc)",
+ default="Practitioner",
+ )
+ parser.add_argument(
+ "--schema",
+ help="only export the model_json_schema",
+ action="store_true",
+ default=False,
+ )
+ parser.add_argument(
+ "--record",
+ help="only export the an example record JSON dump",
+ action="store_true",
+ default=False,
+ )
+ parser.add_argument(
+ "--username",
+ help="username of a local NPD user (required if authentication is required)",
+ default="developer@cms.hhs.gov",
+ )
+ parser.add_argument(
+ "--password",
+ help="password of a local NPD user (required if authentication is required)",
+ default="password123",
+ )
+
+ def prepare_authentication_header(self, options: dict):
+ username = options["username"]
+ password = options["password"]
+ payload_bytes = f"{username}:{password}".encode("utf-8")
+ return f"Basic {base64.b64encode(payload_bytes).decode('utf-8')}"
+
+ def load_fhir_model(self, model_name: str):
+ module = importlib.import_module(f".R4B.{model_name.lower()}", "fhir.resources")
+ return getattr(module, model_name)
+
+ def get_record_and_schema(
+ self,
+ model_class: models.Model | None,
+ api_class: BaseModel | str,
+ route: str | None,
+ options: dict,
+ id_selector: Callable[[models.Model], str] | None,
+ ) -> tuple[str, str]:
+ schema = {}
+ json_record = {}
+
+ if isinstance(api_class, str):
+ api_class = self.load_fhir_model(api_class)
+
+ if not options["record"]:
+ schema = api_class.model_json_schema()
+
+ if not options["schema"]:
+ client = Client()
+ records = model_class.objects.all()[:10]
+ example = random.choice(records)
+
+ record_id = id_selector(example) if id_selector else example.id
+
+ url = reverse(route, args=[record_id])
+ resp = client.get(url, HTTP_AUTHORIZATION=self.prepare_authentication_header(options))
+ json_record = json.loads(resp.content)
+
+ return json_record, schema
+
+ def handle(self, *args, **options):
+ if str(options["model"]).lower().startswith("prac"):
+ model_type = "Practitioner"
+ json_record, schema = self.get_record_and_schema(
+ Provider,
+ model_type,
+ "fhir-practitioner-detail",
+ options,
+ lambda record: record.individual.id,
+ )
+ elif str(options["model"]).lower().startswith("org"):
+ model_type = "Organization"
+ json_record, schema = self.get_record_and_schema(
+ Organization,
+ model_type,
+ "fhir-organization-detail",
+ options,
+ lambda record: record.id,
+ )
+ else:
+ model_type = options["model"]
+ try:
+ json_record, schema = self.get_record_and_schema(
+ None, model_type, None, {**options, "schema": True}, None
+ )
+ except Exception as ex:
+ raise CommandError(f"unable to generate schema for {model_type}") from ex
+
+ if options["schema"]:
+ self.stdout.write(json.dumps(schema, indent=2))
+ elif options["record"]:
+ self.stdout.write(json.dumps(json_record, indent=2))
+ else:
+ self.stdout.write(
+ f'Given this OpenAPI JSON schema document and an example JSON record, please generate a Typescript interface for "{model_type}" records.\n\n'
+ f"\n{json.dumps(schema, indent=2)}\n\n\n"
+ f"\n{json.dumps(json_record, indent=2)}\n\n\n"
+ )
diff --git a/backend/npdfhir/management/commands/seedsystem.py b/backend/npdfhir/management/commands/seedsystem.py
new file mode 100644
index 00000000..6257db2f
--- /dev/null
+++ b/backend/npdfhir/management/commands/seedsystem.py
@@ -0,0 +1,70 @@
+import json
+import random
+
+from django.core.management.base import BaseCommand
+from django.core.serializers.json import DjangoJSONEncoder
+from django.db import IntegrityError
+from faker import Faker
+
+from npdfhir.tests.fixtures.endpoint import create_endpoint
+from npdfhir.tests.fixtures.organization import create_organization
+from npdfhir.tests.fixtures.practitioner import create_practitioner
+
+
+class Command(BaseCommand):
+ help = "Create test data for end-to-end specs"
+
+ def generate_npi(self) -> int:
+ return random.randint(1123456789, 2987654321)
+
+ def to_json(self, **record) -> str:
+ return json.dumps(record, cls=DjangoJSONEncoder, indent=2)
+
+ def generate_sample_organizations(self, qty: int = 25):
+ fake = Faker()
+ for i in range(qty):
+ name = fake.company()
+ org = create_organization(
+ name=name,
+ # not bothering with checksum here
+ npi_value=self.generate_npi(),
+ authorized_official_first_name=fake.first_name(),
+ authorized_official_last_name=fake.last_name(),
+ other_state_code=fake.state_abbr(),
+ other_issuer=fake.company(),
+ )
+ self.stdout.write(f"created Organization: {org.id} {name}")
+
+ def handle(self, *args, **options):
+ if options.get("seed", None):
+ Faker.seed(int(options["seed"]))
+
+ provider = create_practitioner()
+ individualtoname = provider.individual.individualtoname_set.first()
+
+ provider_report = self.to_json(
+ individual__id=provider.individual.id,
+ individual__individualtoname__first_name=individualtoname.first_name,
+ individual__individualtoname__last_name=individualtoname.last_name,
+ npi__npi=provider.npi.npi,
+ )
+ self.stdout.write(f"created Provider: {provider_report}")
+
+ try:
+ # one known NPI
+ organization = create_organization(
+ name="AAA Test Org", npi_value=1234567893, organization_type="261QP2000X"
+ )
+ organizationtoname = organization.organizationtoname_set.first()
+ self.stdout.write(
+ f"created Organization: {self.to_json(id=organization.id, organizationtoname__name=organizationtoname.name)}"
+ )
+ except IntegrityError:
+ organization = None
+ self.stdout.write("(organization with NPI 1234567893 already exists)")
+
+ if organization:
+ endpoint = create_endpoint(organization=organization)
+ self.stdout.write(f"created Endpoint: {self.to_json(id=endpoint.id)}")
+
+ self.generate_sample_organizations(25)
diff --git a/backend/npdfhir/management/commands/seeduser.py b/backend/npdfhir/management/commands/seeduser.py
new file mode 100644
index 00000000..039da14c
--- /dev/null
+++ b/backend/npdfhir/management/commands/seeduser.py
@@ -0,0 +1,36 @@
+from django.contrib.auth.models import Group, User
+from django.core.management.base import BaseCommand
+from flags.models import FlagState
+
+
+class Command(BaseCommand):
+ help = "Create a developer@cms.hhs.gov in the Developers group"
+
+ def prepare_feature_flags(self):
+ FlagState.objects.get_or_create(name="SEARCH_APP", condition="in_group", value="Developers")
+ FlagState.objects.get_or_create(
+ name="PRACTITIONER_LOOKUP", condition="in_group", value="Developers"
+ )
+ FlagState.objects.get_or_create(
+ name="PRACTITIONER_LOOKUP_DETAILS", condition="in_group", value="Developers"
+ )
+ FlagState.objects.get_or_create(
+ name="ORGANIZATION_LOOKUP", condition="in_group", value="Developers"
+ )
+ FlagState.objects.get_or_create(
+ name="ORGANIZATION_LOOKUP_DETAILS", condition="in_group", value="Developers"
+ )
+
+ def handle(self, *args, **options):
+ user, _ = User.objects.get_or_create(username="developer@cms.hhs.gov")
+ user.set_password("password123")
+
+ group, _ = Group.objects.get_or_create(name="Developers")
+ user.groups.add(group)
+
+ user.save()
+ group.save()
+
+ self.prepare_feature_flags()
+
+ self.stdout.write("Created developer@cms.hhs.gov account and added to Developer group")
diff --git a/backend/npdfhir/mappings.py b/backend/npdfhir/mappings.py
index aaa82a89..ad545987 100644
--- a/backend/npdfhir/mappings.py
+++ b/backend/npdfhir/mappings.py
@@ -1,7 +1,7 @@
from bidict import bidict
-class Mapping():
+class Mapping:
def __init__(self, mapping: dict):
self.mapping = bidict(mapping)
@@ -17,30 +17,17 @@ def toNPD(self, fhirValue):
else:
return self.mapping.inverse[fhirValue]
- def keys(self, which='fhir'):
- if which == 'npd':
+ def keys(self, which="fhir"):
+ if which == "npd":
return list(self.mapping.keys())
else:
return list(self.mapping.inverse.keys())
-
+
def to_choices(self):
- fhir_values = self.keys(which='fhir')
+ fhir_values = self.keys(which="fhir")
return [(v, v) for v in fhir_values]
-genderMapping = Mapping(
- {
- "F": "Female",
- "M": "Male",
- "O": "Other"
- }
-)
-
-addressUseMapping = Mapping(
- {
- 1: "home",
- 2: "work",
- 3: "temp",
- 4: "old",
- 5: "billing"
- }
-)
+
+genderMapping = Mapping({"F": "Female", "M": "Male", "O": "Other"})
+
+addressUseMapping = Mapping({1: "home", 2: "work", 3: "temp", 4: "old", 5: "billing"})
diff --git a/backend/npdfhir/middleware.py b/backend/npdfhir/middleware.py
index 7bebe379..edd7ad07 100644
--- a/backend/npdfhir/middleware.py
+++ b/backend/npdfhir/middleware.py
@@ -1,7 +1,8 @@
-from django.http import JsonResponse
-from django.db import connection
from datetime import datetime, timezone
+
import structlog
+from django.db import connection
+from django.http import JsonResponse
logger = structlog.get_logger(__name__)
@@ -11,14 +12,10 @@ def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
- if request.path == '/fhir/healthCheck':
+ if request.path == "/fhir/healthCheck":
utc_now = datetime.now(timezone.utc).isoformat()
- health_status = {
- 'status': 'healthy',
- 'database': 'connected',
- 'timestamp': utc_now
- }
+ health_status = {"status": "healthy", "database": "connected", "timestamp": utc_now}
try:
# Test database connection
@@ -28,12 +25,14 @@ def __call__(self, request):
return JsonResponse(health_status, status=200)
- except Exception as e:
+ except Exception:
logger.error("Database health check failed")
- health_status.update({
- 'status': 'unhealthy',
- 'database': 'disconnected',
- })
+ health_status.update(
+ {
+ "status": "unhealthy",
+ "database": "disconnected",
+ }
+ )
return JsonResponse(health_status, status=502)
- return self.get_response(request)
\ No newline at end of file
+ return self.get_response(request)
diff --git a/backend/npdfhir/models.py b/backend/npdfhir/models.py
index 9ed14117..6b44ca45 100644
--- a/backend/npdfhir/models.py
+++ b/backend/npdfhir/models.py
@@ -10,22 +10,22 @@
class Address(models.Model):
id = models.UUIDField(primary_key=True)
- address_us = models.ForeignKey(
- 'AddressUs', models.DO_NOTHING, blank=True, null=True)
+ address_us = models.ForeignKey("AddressUs", models.DO_NOTHING, blank=True, null=True)
address_international = models.ForeignKey(
- 'AddressInternational', models.DO_NOTHING, blank=True, null=True)
+ "AddressInternational", models.DO_NOTHING, blank=True, null=True
+ )
address_nonstandard = models.ForeignKey(
- 'AddressNonstandard', models.DO_NOTHING, blank=True, null=True)
+ "AddressNonstandard", models.DO_NOTHING, blank=True, null=True
+ )
class Meta:
managed = False
- db_table = 'address'
+ db_table = "address"
class AddressInternational(models.Model):
id = models.CharField(primary_key=True, max_length=10)
- country_code = models.ForeignKey(
- 'IsoCountry', models.DO_NOTHING, db_column='country_code')
+ country_code = models.ForeignKey("IsoCountry", models.DO_NOTHING, db_column="country_code")
geocode = models.CharField(max_length=4, blank=True, null=True)
local_language = models.CharField(max_length=6, blank=True, null=True)
freeform = models.CharField(max_length=512, blank=True, null=True)
@@ -35,34 +35,26 @@ class AddressInternational(models.Model):
address4 = models.CharField(max_length=64, blank=True, null=True)
organization = models.CharField(max_length=64, blank=True, null=True)
locality = models.CharField(max_length=64, blank=True, null=True)
- administrative_area = models.CharField(
- max_length=32, blank=True, null=True)
+ administrative_area = models.CharField(max_length=32, blank=True, null=True)
postal_code = models.CharField(max_length=16, blank=True, null=True)
- administrative_area_iso2 = models.CharField(
- max_length=8, blank=True, null=True)
- sub_administrative_area = models.CharField(
- max_length=64, blank=True, null=True)
+ administrative_area_iso2 = models.CharField(max_length=8, blank=True, null=True)
+ sub_administrative_area = models.CharField(max_length=64, blank=True, null=True)
country_iso_3 = models.CharField(max_length=3, blank=True, null=True)
premise = models.CharField(max_length=64, blank=True, null=True)
premise_number = models.CharField(max_length=64, blank=True, null=True)
thoroughfare = models.CharField(max_length=64, blank=True, null=True)
- latitude = models.DecimalField(
- max_digits=9, decimal_places=6, blank=True, null=True)
- longitude = models.DecimalField(
- max_digits=9, decimal_places=6, blank=True, null=True)
+ latitude = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
+ longitude = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
geocode_precision = models.CharField(max_length=32, blank=True, null=True)
- max_geocode_precision = models.CharField(
- max_length=32, blank=True, null=True)
+ max_geocode_precision = models.CharField(max_length=32, blank=True, null=True)
address_format = models.CharField(max_length=128, blank=True, null=True)
- verification_status = models.CharField(
- max_length=32, blank=True, null=True)
+ verification_status = models.CharField(max_length=32, blank=True, null=True)
address_precision = models.CharField(max_length=32, blank=True, null=True)
- max_address_precision = models.CharField(
- max_length=32, blank=True, null=True)
+ max_address_precision = models.CharField(max_length=32, blank=True, null=True)
class Meta:
managed = False
- db_table = 'address_international'
+ db_table = "address_international"
class AddressNonstandard(models.Model):
@@ -73,72 +65,58 @@ class AddressNonstandard(models.Model):
address_type = models.CharField(max_length=32, blank=True, null=True)
address_format = models.CharField(max_length=128, blank=True, null=True)
raw_address = models.TextField(blank=True, null=True)
- latitude = models.DecimalField(
- max_digits=9, decimal_places=6, blank=True, null=True)
- longitude = models.DecimalField(
- max_digits=9, decimal_places=6, blank=True, null=True)
+ latitude = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
+ longitude = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
notes = models.TextField(blank=True, null=True)
class Meta:
managed = False
- db_table = 'address_nonstandard'
+ db_table = "address_nonstandard"
class AddressUs(models.Model):
- id = models.CharField(primary_key=True, max_length=10)
+ id = models.BigIntegerField(primary_key=True)
delivery_line_1 = models.CharField(max_length=64)
delivery_line_2 = models.CharField(max_length=64, blank=True, null=True)
last_line = models.CharField(max_length=64, blank=True, null=True)
- delivery_point_barcode = models.CharField(
- max_length=12, blank=True, null=True)
+ delivery_point_barcode = models.CharField(max_length=12, blank=True, null=True)
urbanization = models.CharField(max_length=64, blank=True, null=True)
primary_number = models.CharField(max_length=30, blank=True, null=True)
street_name = models.CharField(max_length=64, blank=True, null=True)
- street_predirection = models.CharField(
- max_length=16, blank=True, null=True)
- street_postdirection = models.CharField(
- max_length=16, blank=True, null=True)
+ street_predirection = models.CharField(max_length=16, blank=True, null=True)
+ street_postdirection = models.CharField(max_length=16, blank=True, null=True)
street_suffix = models.CharField(max_length=16, blank=True, null=True)
secondary_number = models.CharField(max_length=32, blank=True, null=True)
- secondary_designator = models.CharField(
- max_length=16, blank=True, null=True)
- extra_secondary_number = models.CharField(
- max_length=32, blank=True, null=True)
- extra_secondary_designator = models.CharField(
- max_length=16, blank=True, null=True)
+ secondary_designator = models.CharField(max_length=16, blank=True, null=True)
+ extra_secondary_number = models.CharField(max_length=32, blank=True, null=True)
+ extra_secondary_designator = models.CharField(max_length=16, blank=True, null=True)
pmb_designator = models.CharField(max_length=16, blank=True, null=True)
pmb_number = models.CharField(max_length=16, blank=True, null=True)
city_name = models.CharField(max_length=64)
default_city_name = models.CharField(max_length=64, blank=True, null=True)
- state_code = models.ForeignKey(
- 'FipsState', models.DO_NOTHING, db_column='state_code')
+ state_code = models.ForeignKey("FipsState", models.DO_NOTHING, db_column="state_code")
zipcode = models.CharField(max_length=5)
plus4_code = models.CharField(max_length=4, blank=True, null=True)
delivery_point = models.CharField(max_length=2, blank=True, null=True)
- delivery_point_check_digit = models.CharField(
- max_length=1, blank=True, null=True)
+ delivery_point_check_digit = models.CharField(max_length=1, blank=True, null=True)
record_type = models.CharField(max_length=1, blank=True, null=True)
zip_type = models.CharField(max_length=32, blank=True, null=True)
county_code = models.ForeignKey(
- 'FipsCounty', models.DO_NOTHING, db_column='county_code', blank=True, null=True)
+ "FipsCounty", models.DO_NOTHING, db_column="county_code", blank=True, null=True
+ )
ews_match = models.CharField(max_length=5, blank=True, null=True)
carrier_route = models.CharField(max_length=4, blank=True, null=True)
- congressional_district = models.CharField(
- max_length=2, blank=True, null=True)
- building_default_indicator = models.CharField(
- max_length=1, blank=True, null=True)
+ congressional_district = models.CharField(max_length=2, blank=True, null=True)
+ building_default_indicator = models.CharField(max_length=1, blank=True, null=True)
rdi = models.CharField(max_length=12, blank=True, null=True)
elot_sequence = models.CharField(max_length=4, blank=True, null=True)
elot_sort = models.CharField(max_length=4, blank=True, null=True)
- latitude = models.DecimalField(
- max_digits=9, decimal_places=6, blank=True, null=True)
- longitude = models.DecimalField(
- max_digits=9, decimal_places=6, blank=True, null=True)
+ latitude = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
+ longitude = models.DecimalField(max_digits=9, decimal_places=6, blank=True, null=True)
coordinate_license = models.IntegerField(blank=True, null=True)
geo_precision = models.CharField(max_length=18, blank=True, null=True)
time_zone = models.CharField(max_length=48, blank=True, null=True)
- utc_offset = models.DecimalField(
- max_digits=4, decimal_places=2, blank=True, null=True)
+ utc_offset = models.DecimalField(max_digits=4, decimal_places=2, blank=True, null=True)
dst = models.CharField(max_length=5, blank=True, null=True)
dpv_match_code = models.CharField(max_length=1, blank=True, null=True)
dpv_footnotes = models.CharField(max_length=32, blank=True, null=True)
@@ -154,18 +132,16 @@ class AddressUs(models.Model):
class Meta:
managed = False
- db_table = 'address_us'
+ db_table = "address_us"
class ClinicalOrganization(models.Model):
- organization = models.OneToOneField(
- 'Organization', models.DO_NOTHING, blank=True, null=True)
- npi = models.OneToOneField(
- 'Npi', models.DO_NOTHING, db_column='npi', primary_key=True)
+ organization = models.OneToOneField("Organization", models.DO_NOTHING, blank=True, null=True)
+ npi = models.OneToOneField("Npi", models.DO_NOTHING, db_column="npi", primary_key=True)
class Meta:
managed = False
- db_table = 'clinical_organization'
+ db_table = "clinical_organization"
class CredentialType(models.Model):
@@ -173,7 +149,7 @@ class CredentialType(models.Model):
class Meta:
managed = False
- db_table = 'credential_type'
+ db_table = "credential_type"
class DegreeType(models.Model):
@@ -181,7 +157,7 @@ class DegreeType(models.Model):
class Meta:
managed = False
- db_table = 'degree_type'
+ db_table = "degree_type"
class EhrVendor(models.Model):
@@ -191,20 +167,19 @@ class EhrVendor(models.Model):
class Meta:
managed = False
- db_table = 'ehr_vendor'
+ db_table = "ehr_vendor"
class Endpoint(models.Model):
id = models.UUIDField(primary_key=True)
address = models.CharField(max_length=200)
- endpoint_type = models.ForeignKey('EndpointType', models.DO_NOTHING)
- endpoint_instance = models.ForeignKey(
- 'EndpointInstance', models.DO_NOTHING)
+ endpoint_type = models.ForeignKey("EndpointType", models.DO_NOTHING)
+ endpoint_instance = models.ForeignKey("EndpointInstance", models.DO_NOTHING)
name = models.CharField(max_length=200, blank=True, null=True)
class Meta:
managed = False
- db_table = 'endpoint'
+ db_table = "endpoint"
class EndpointConnectionType(models.Model):
@@ -214,7 +189,7 @@ class EndpointConnectionType(models.Model):
class Meta:
managed = False
- db_table = 'endpoint_connection_type'
+ db_table = "endpoint_connection_type"
class EndpointInstance(models.Model):
@@ -222,20 +197,21 @@ class EndpointInstance(models.Model):
ehr_vendor = models.ForeignKey(EhrVendor, models.DO_NOTHING)
address = models.CharField(max_length=200)
endpoint_connection_type = models.ForeignKey(
- EndpointConnectionType, models.DO_NOTHING, blank=True, null=True)
+ EndpointConnectionType, models.DO_NOTHING, blank=True, null=True
+ )
name = models.CharField(max_length=200, blank=True, null=True)
description = models.CharField(max_length=1000, blank=True, null=True)
environment_type = models.ForeignKey(
- 'EnvironmentType', models.DO_NOTHING, blank=True, null=True)
+ "EnvironmentType", models.DO_NOTHING, blank=True, null=True
+ )
class Meta:
managed = False
- db_table = 'endpoint_instance'
+ db_table = "endpoint_instance"
class EndpointInstanceToOtherId(models.Model):
- pk = models.CompositePrimaryKey(
- 'endpoint_instance_id', 'other_id', 'issuer_id')
+ pk = models.CompositePrimaryKey("endpoint_instance_id", "other_id", "issuer_id")
endpoint_instance = models.ForeignKey(EndpointInstance, models.DO_NOTHING)
other_id = models.CharField(max_length=100)
system = models.CharField(max_length=200)
@@ -243,19 +219,18 @@ class EndpointInstanceToOtherId(models.Model):
class Meta:
managed = False
- db_table = 'endpoint_instance_to_other_id'
+ db_table = "endpoint_instance_to_other_id"
class EndpointInstanceToPayload(models.Model):
- pk = models.CompositePrimaryKey('endpoint_instance_id', 'payload_type_id')
+ pk = models.CompositePrimaryKey("endpoint_instance_id", "payload_type_id")
endpoint_instance = models.ForeignKey(EndpointInstance, models.DO_NOTHING)
- mime_type = models.ForeignKey(
- 'MimeType', models.DO_NOTHING, blank=True, null=True)
- payload_type = models.ForeignKey('PayloadType', models.DO_NOTHING)
+ mime_type = models.ForeignKey("MimeType", models.DO_NOTHING, blank=True, null=True)
+ payload_type = models.ForeignKey("PayloadType", models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'endpoint_instance_to_payload'
+ db_table = "endpoint_instance_to_payload"
class EndpointInstanceType(models.Model):
@@ -263,11 +238,11 @@ class EndpointInstanceType(models.Model):
class Meta:
managed = False
- db_table = 'endpoint_instance_type'
+ db_table = "endpoint_instance_type"
class EndpointToOtherId(models.Model):
- pk = models.CompositePrimaryKey('endpoint_id', 'other_id', 'issuer_id')
+ pk = models.CompositePrimaryKey("endpoint_id", "other_id", "issuer_id")
endpoint = models.ForeignKey(Endpoint, models.DO_NOTHING)
other_id = models.CharField(max_length=100)
system = models.CharField(max_length=200)
@@ -275,19 +250,18 @@ class EndpointToOtherId(models.Model):
class Meta:
managed = False
- db_table = 'endpoint_to_other_id'
+ db_table = "endpoint_to_other_id"
class EndpointToPayload(models.Model):
- pk = models.CompositePrimaryKey('endpoint_id', 'payload_type_id')
+ pk = models.CompositePrimaryKey("endpoint_id", "payload_type_id")
endpoint = models.ForeignKey(Endpoint, models.DO_NOTHING)
- mime_type = models.ForeignKey(
- 'MimeType', models.DO_NOTHING, blank=True, null=True)
- payload_type = models.ForeignKey('PayloadType', models.DO_NOTHING)
+ mime_type = models.ForeignKey("MimeType", models.DO_NOTHING, blank=True, null=True)
+ payload_type = models.ForeignKey("PayloadType", models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'endpoint_to_payload'
+ db_table = "endpoint_to_payload"
class EndpointType(models.Model):
@@ -295,7 +269,7 @@ class EndpointType(models.Model):
class Meta:
managed = False
- db_table = 'endpoint_type'
+ db_table = "endpoint_type"
class EnvironmentType(models.Model):
@@ -305,7 +279,7 @@ class EnvironmentType(models.Model):
class Meta:
managed = False
- db_table = 'environment_type'
+ db_table = "environment_type"
class FhirAddressUse(models.Model):
@@ -313,7 +287,7 @@ class FhirAddressUse(models.Model):
class Meta:
managed = False
- db_table = 'fhir_address_use'
+ db_table = "fhir_address_use"
class FhirEmailUse(models.Model):
@@ -321,7 +295,7 @@ class FhirEmailUse(models.Model):
class Meta:
managed = False
- db_table = 'fhir_email_use'
+ db_table = "fhir_email_use"
class FhirNameUse(models.Model):
@@ -329,7 +303,7 @@ class FhirNameUse(models.Model):
class Meta:
managed = False
- db_table = 'fhir_name_use'
+ db_table = "fhir_name_use"
class FhirPhoneSystem(models.Model):
@@ -337,7 +311,7 @@ class FhirPhoneSystem(models.Model):
class Meta:
managed = False
- db_table = 'fhir_phone_system'
+ db_table = "fhir_phone_system"
class FhirPhoneUse(models.Model):
@@ -345,18 +319,18 @@ class FhirPhoneUse(models.Model):
class Meta:
managed = False
- db_table = 'fhir_phone_use'
+ db_table = "fhir_phone_use"
class FipsCounty(models.Model):
id = models.CharField(primary_key=True, max_length=5)
name = models.CharField(max_length=200)
- fips_state = models.ForeignKey('FipsState', models.DO_NOTHING)
+ fips_state = models.ForeignKey("FipsState", models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'fips_county'
- unique_together = (('name', 'fips_state'),)
+ db_table = "fips_county"
+ unique_together = (("name", "fips_state"),)
class FipsState(models.Model):
@@ -366,7 +340,7 @@ class FipsState(models.Model):
class Meta:
managed = False
- db_table = 'fips_state'
+ db_table = "fips_state"
class Individual(models.Model):
@@ -378,46 +352,43 @@ class Individual(models.Model):
class Meta:
managed = False
- db_table = 'individual'
+ db_table = "individual"
class IndividualToAddress(models.Model):
- pk = models.CompositePrimaryKey(
- 'individual_id', 'address_id', 'address_use_id')
+ pk = models.CompositePrimaryKey("individual_id", "address_id", "address_use_id")
individual = models.ForeignKey(Individual, models.DO_NOTHING)
address = models.ForeignKey(Address, models.DO_NOTHING)
address_use = models.ForeignKey(FhirAddressUse, models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'individual_to_address'
+ db_table = "individual_to_address"
class IndividualToEmail(models.Model):
- pk = models.CompositePrimaryKey(
- 'individual_id', 'email_address', 'email_use_id')
+ pk = models.CompositePrimaryKey("individual_id", "email_address", "email_use_id")
individual = models.ForeignKey(Individual, models.DO_NOTHING)
email_address = models.CharField(max_length=1000)
email_use = models.ForeignKey(FhirEmailUse, models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'individual_to_email'
+ db_table = "individual_to_email"
class IndividualToLanguageSpoken(models.Model):
- pk = models.CompositePrimaryKey('individual_id', 'language_spoken_id')
- language_spoken = models.ForeignKey('LanguageSpoken', models.DO_NOTHING)
+ pk = models.CompositePrimaryKey("individual_id", "language_spoken_id")
+ language_spoken = models.ForeignKey("LanguageSpoken", models.DO_NOTHING)
individual_id = models.UUIDField()
class Meta:
managed = False
- db_table = 'individual_to_language_spoken'
+ db_table = "individual_to_language_spoken"
class IndividualToName(models.Model):
- pk = models.CompositePrimaryKey(
- 'individual_id', 'first_name', 'last_name', 'name_use_id')
+ pk = models.CompositePrimaryKey("individual_id", "first_name", "last_name", "name_use_id")
individual = models.ForeignKey(Individual, models.DO_NOTHING)
prefix = models.CharField(max_length=10, blank=True, null=True)
first_name = models.CharField(max_length=50)
@@ -430,7 +401,7 @@ class IndividualToName(models.Model):
class Meta:
managed = False
- db_table = 'individual_to_name'
+ db_table = "individual_to_name"
class IndividualToPhone(models.Model):
@@ -442,8 +413,8 @@ class IndividualToPhone(models.Model):
class Meta:
managed = False
- db_table = 'individual_to_phone'
- unique_together = (('individual', 'phone_number', 'phone_use'),)
+ db_table = "individual_to_phone"
+ unique_together = (("individual", "phone_number", "phone_use"),)
class IsoCountry(models.Model):
@@ -452,7 +423,7 @@ class IsoCountry(models.Model):
class Meta:
managed = False
- db_table = 'iso_country'
+ db_table = "iso_country"
class LanguageSpoken(models.Model):
@@ -461,7 +432,7 @@ class LanguageSpoken(models.Model):
class Meta:
managed = False
- db_table = 'language_spoken'
+ db_table = "language_spoken"
class LegalEntity(models.Model):
@@ -470,31 +441,30 @@ class LegalEntity(models.Model):
class Meta:
managed = False
- db_table = 'legal_entity'
+ db_table = "legal_entity"
class Location(models.Model):
id = models.UUIDField(primary_key=True)
name = models.CharField(max_length=200, blank=True, null=True)
- organization = models.ForeignKey('Organization', models.DO_NOTHING)
+ organization = models.ForeignKey("Organization", models.DO_NOTHING)
address = models.ForeignKey(Address, models.DO_NOTHING)
active = models.BooleanField(blank=True, null=True)
- phone = models.ForeignKey('OrganizationToPhone',
- models.DO_NOTHING, blank=True, null=True)
+ phone = models.ForeignKey("OrganizationToPhone", models.DO_NOTHING, blank=True, null=True)
class Meta:
managed = False
- db_table = 'location'
+ db_table = "location"
class LocationToEndpointInstance(models.Model):
- pk = models.CompositePrimaryKey('location_id', 'endpoint_instance_id')
+ pk = models.CompositePrimaryKey("location_id", "endpoint_instance_id")
location = models.ForeignKey(Location, models.DO_NOTHING)
endpoint_instance = models.ForeignKey(EndpointInstance, models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'location_to_endpoint_instance'
+ db_table = "location_to_endpoint_instance"
class MedicareProviderType(models.Model):
@@ -502,7 +472,7 @@ class MedicareProviderType(models.Model):
class Meta:
managed = False
- db_table = 'medicare_provider_type'
+ db_table = "medicare_provider_type"
class MimeType(models.Model):
@@ -510,24 +480,23 @@ class MimeType(models.Model):
class Meta:
managed = False
- db_table = 'mime_type'
+ db_table = "mime_type"
class Npi(models.Model):
npi = models.BigIntegerField(primary_key=True)
entity_type_code = models.SmallIntegerField()
- replacement_npi = models.CharField(max_length=11, blank=True, null=True)
+ replacement_npi = models.BigIntegerField(blank=True, null=True)
enumeration_date = models.DateField()
last_update_date = models.DateField()
- deactivation_reason_code = models.CharField(
- max_length=3, blank=True, null=True)
+ deactivation_reason_code = models.CharField(max_length=3, blank=True, null=True)
deactivation_date = models.DateField(blank=True, null=True)
reactivation_date = models.DateField(blank=True, null=True)
certification_date = models.DateField(blank=True, null=True)
class Meta:
managed = False
- db_table = 'npi'
+ db_table = "npi"
class Nucc(models.Model):
@@ -540,104 +509,97 @@ class Nucc(models.Model):
class Meta:
managed = False
- db_table = 'nucc'
+ db_table = "nucc"
class NuccClassification(models.Model):
nucc_code = models.ForeignKey(
- Nucc, models.DO_NOTHING, db_column='nucc_code', blank=True, null=True)
+ Nucc, models.DO_NOTHING, db_column="nucc_code", blank=True, null=True
+ )
display_name = models.CharField(max_length=100, blank=True, null=True)
- nucc_grouping = models.ForeignKey(
- 'NuccGrouping', models.DO_NOTHING, blank=True, null=True)
+ nucc_grouping = models.ForeignKey("NuccGrouping", models.DO_NOTHING, blank=True, null=True)
class Meta:
managed = False
- db_table = 'nucc_classification'
- unique_together = (('nucc_code', 'nucc_grouping'),)
+ db_table = "nucc_classification"
+ unique_together = (("nucc_code", "nucc_grouping"),)
class NuccGrouping(models.Model):
- display_name = models.CharField(
- unique=True, max_length=100, blank=True, null=True)
+ display_name = models.CharField(unique=True, max_length=100, blank=True, null=True)
class Meta:
managed = False
- db_table = 'nucc_grouping'
+ db_table = "nucc_grouping"
class NuccSpecialization(models.Model):
nucc_code = models.ForeignKey(
- Nucc, models.DO_NOTHING, db_column='nucc_code', blank=True, null=True)
+ Nucc, models.DO_NOTHING, db_column="nucc_code", blank=True, null=True
+ )
display_name = models.CharField(max_length=100, blank=True, null=True)
nucc_classification_id = models.IntegerField(blank=True, null=True)
class Meta:
managed = False
- db_table = 'nucc_specialization'
- unique_together = (('nucc_code', 'nucc_classification_id'),)
+ db_table = "nucc_specialization"
+ unique_together = (("nucc_code", "nucc_classification_id"),)
class NuccToMedicareProviderType(models.Model):
- pk = models.CompositePrimaryKey('medicare_provider_type_id', 'nucc_code')
- medicare_provider_type = models.ForeignKey(
- MedicareProviderType, models.DO_NOTHING)
- nucc_code = models.ForeignKey(
- Nucc, models.DO_NOTHING, db_column='nucc_code')
+ pk = models.CompositePrimaryKey("medicare_provider_type_id", "nucc_code")
+ medicare_provider_type = models.ForeignKey(MedicareProviderType, models.DO_NOTHING)
+ nucc_code = models.ForeignKey(Nucc, models.DO_NOTHING, db_column="nucc_code")
class Meta:
managed = False
- db_table = 'nucc_to_medicare_provider_type'
+ db_table = "nucc_to_medicare_provider_type"
class Organization(models.Model):
id = models.UUIDField(primary_key=True)
- authorized_official = models.ForeignKey(Individual, models.DO_NOTHING)
- ein = models.ForeignKey(
- LegalEntity, models.DO_NOTHING, blank=True, null=True)
- parent = models.ForeignKey(
- 'self', models.DO_NOTHING, blank=True, null=True)
+ authorized_official = models.ForeignKey(Individual, models.DO_NOTHING, blank=True, null=True)
+ ein = models.ForeignKey(LegalEntity, models.DO_NOTHING, blank=True, null=True)
+ parent = models.ForeignKey("self", models.DO_NOTHING, blank=True, null=True)
class Meta:
managed = False
- db_table = 'organization'
+ db_table = "organization"
class OrganizationToAddress(models.Model):
- pk = models.CompositePrimaryKey(
- 'organization_id', 'address_id', 'address_use_id')
+ pk = models.CompositePrimaryKey("organization_id", "address_id", "address_use_id")
organization = models.ForeignKey(Organization, models.DO_NOTHING)
address = models.ForeignKey(Address, models.DO_NOTHING)
address_use = models.ForeignKey(FhirAddressUse, models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'organization_to_address'
+ db_table = "organization_to_address"
class OrganizationToName(models.Model):
- pk = models.CompositePrimaryKey('organization_id', 'name')
+ pk = models.CompositePrimaryKey("organization_id", "name")
organization = models.ForeignKey(Organization, models.DO_NOTHING)
name = models.CharField(max_length=1000)
is_primary = models.BooleanField(blank=True, null=True)
class Meta:
managed = False
- db_table = 'organization_to_name'
+ db_table = "organization_to_name"
class OrganizationToOtherId(models.Model):
- pk = models.CompositePrimaryKey(
- 'npi', 'other_id', 'other_id_type_id', 'issuer', 'state_code')
- npi = models.ForeignKey(ClinicalOrganization,
- models.DO_NOTHING, db_column='npi')
+ pk = models.CompositePrimaryKey("npi", "other_id", "other_id_type_id", "issuer", "state_code")
+ npi = models.ForeignKey(ClinicalOrganization, models.DO_NOTHING, db_column="npi")
other_id = models.CharField(max_length=100)
- other_id_type = models.ForeignKey('OtherIdType', models.DO_NOTHING)
+ other_id_type = models.ForeignKey("OtherIdType", models.DO_NOTHING)
state_code = models.CharField(max_length=2)
issuer = models.CharField(max_length=200)
class Meta:
managed = False
- db_table = 'organization_to_other_id'
+ db_table = "organization_to_other_id"
class OrganizationToPhone(models.Model):
@@ -649,22 +611,19 @@ class OrganizationToPhone(models.Model):
class Meta:
managed = False
- db_table = 'organization_to_phone'
- unique_together = (
- ('organization', 'phone_number', 'extension', 'phone_use'),)
+ db_table = "organization_to_phone"
+ unique_together = (("organization", "phone_number", "extension", "phone_use"),)
class OrganizationToTaxonomy(models.Model):
- pk = models.CompositePrimaryKey('npi', 'nucc_code')
- npi = models.ForeignKey(ClinicalOrganization,
- models.DO_NOTHING, db_column='npi')
- nucc_code = models.ForeignKey(
- Nucc, models.DO_NOTHING, db_column='nucc_code')
+ pk = models.CompositePrimaryKey("npi", "nucc_code")
+ npi = models.ForeignKey(ClinicalOrganization, models.DO_NOTHING, db_column="npi")
+ nucc_code = models.ForeignKey(Nucc, models.DO_NOTHING, db_column="nucc_code")
is_primary = models.BooleanField(blank=True, null=True)
class Meta:
managed = False
- db_table = 'organization_to_taxonomy'
+ db_table = "organization_to_taxonomy"
class OtherIdType(models.Model):
@@ -672,7 +631,7 @@ class OtherIdType(models.Model):
class Meta:
managed = False
- db_table = 'other_id_type'
+ db_table = "other_id_type"
class PayloadType(models.Model):
@@ -682,23 +641,21 @@ class PayloadType(models.Model):
class Meta:
managed = False
- db_table = 'payload_type'
+ db_table = "payload_type"
class Provider(models.Model):
- npi = models.OneToOneField(
- Npi, models.DO_NOTHING, db_column='npi', primary_key=True)
- individual = models.OneToOneField(
- Individual, models.DO_NOTHING, blank=True, null=True)
+ npi = models.OneToOneField(Npi, models.DO_NOTHING, db_column="npi")
+ individual = models.OneToOneField(Individual, models.DO_NOTHING, primary_key=True)
class Meta:
managed = False
- db_table = 'provider'
+ db_table = "provider"
class ProviderEducation(models.Model):
- pk = models.CompositePrimaryKey('npi', 'school_id')
- npi = models.ForeignKey(Provider, models.DO_NOTHING, db_column='npi')
+ pk = models.CompositePrimaryKey("npi", "school_id")
+ npi = models.ForeignKey(Provider, models.DO_NOTHING, db_column="npi", to_field="npi")
school_id = models.IntegerField()
degree_type = models.ForeignKey(DegreeType, models.DO_NOTHING)
start_date = models.DateField(blank=True, null=True)
@@ -706,7 +663,7 @@ class ProviderEducation(models.Model):
class Meta:
managed = False
- db_table = 'provider_education'
+ db_table = "provider_education"
class ProviderRole(models.Model):
@@ -717,87 +674,78 @@ class ProviderRole(models.Model):
class Meta:
managed = False
- db_table = 'provider_role'
+ db_table = "provider_role"
class ProviderToCredential(models.Model):
pk = models.CompositePrimaryKey(
- 'provider_to_taxonomy_id', 'credential_type_id', 'license_number', 'state_code')
+ "provider_to_taxonomy_id", "credential_type_id", "license_number", "state_code"
+ )
credential_type = models.ForeignKey(CredentialType, models.DO_NOTHING)
license_number = models.CharField(max_length=20)
- state_code = models.ForeignKey(
- FipsState, models.DO_NOTHING, db_column='state_code')
- provider_to_taxonomy = models.ForeignKey(
- 'ProviderToTaxonomy', models.DO_NOTHING)
+ state_code = models.ForeignKey(FipsState, models.DO_NOTHING, db_column="state_code")
+ provider_to_taxonomy = models.ForeignKey("ProviderToTaxonomy", models.DO_NOTHING)
class Meta:
managed = False
- db_table = 'provider_to_credential'
+ db_table = "provider_to_credential"
class ProviderToLocation(models.Model):
location = models.ForeignKey(Location, models.DO_NOTHING)
- other_address = models.ForeignKey(
- Address, models.DO_NOTHING, blank=True, null=True)
+ other_address = models.ForeignKey(Address, models.DO_NOTHING, blank=True, null=True)
nucc_code = models.IntegerField(blank=True, null=True)
specialty_id = models.IntegerField(blank=True, null=True)
id = models.UUIDField(primary_key=True)
provider_role_code = models.CharField(max_length=10, blank=True, null=True)
- other_phone = models.ForeignKey(
- IndividualToPhone, models.DO_NOTHING, blank=True, null=True)
- other_endpoint = models.ForeignKey(
- Endpoint, models.DO_NOTHING, blank=True, null=True)
+ other_phone = models.ForeignKey(IndividualToPhone, models.DO_NOTHING, blank=True, null=True)
+ other_endpoint = models.ForeignKey(Endpoint, models.DO_NOTHING, blank=True, null=True)
active = models.BooleanField(blank=True, null=True)
provider_to_organization = models.ForeignKey(
- 'ProviderToOrganization', models.DO_NOTHING, blank=True, null=True)
+ "ProviderToOrganization", models.DO_NOTHING, blank=True, null=True
+ )
class Meta:
managed = False
- db_table = 'provider_to_location'
+ db_table = "provider_to_location"
class ProviderToOrganization(models.Model):
- individual = models.ForeignKey(
- Provider, models.DO_NOTHING, to_field='individual_id')
+ individual = models.ForeignKey(Provider, models.DO_NOTHING)
organization = models.ForeignKey(Organization, models.DO_NOTHING)
- relationship_type = models.ForeignKey(
- 'RelationshipType', models.DO_NOTHING)
+ relationship_type = models.ForeignKey("RelationshipType", models.DO_NOTHING)
id = models.UUIDField(primary_key=True)
active = models.BooleanField(blank=True, null=True)
class Meta:
managed = False
- db_table = 'provider_to_organization'
- unique_together = (
- ('individual', 'organization', 'relationship_type'),)
+ db_table = "provider_to_organization"
+ unique_together = (("individual", "organization", "relationship_type"),)
class ProviderToOtherId(models.Model):
- pk = models.CompositePrimaryKey(
- 'npi', 'other_id', 'other_id_type_id', 'issuer', 'state_code')
- npi = models.ForeignKey(Provider, models.DO_NOTHING, db_column='npi')
+ pk = models.CompositePrimaryKey("npi", "other_id", "other_id_type_id", "issuer", "state_code")
+ npi = models.ForeignKey(Provider, models.DO_NOTHING, db_column="npi", to_field="npi")
other_id = models.CharField(max_length=100)
other_id_type = models.ForeignKey(OtherIdType, models.DO_NOTHING)
- state_code = models.ForeignKey(
- FipsState, models.DO_NOTHING, db_column='state_code')
+ state_code = models.ForeignKey(FipsState, models.DO_NOTHING, db_column="state_code")
issuer = models.CharField(max_length=100)
class Meta:
managed = False
- db_table = 'provider_to_other_id'
+ db_table = "provider_to_other_id"
class ProviderToTaxonomy(models.Model):
- npi = models.ForeignKey(Provider, models.DO_NOTHING, db_column='npi')
- nucc_code = models.ForeignKey(
- Nucc, models.DO_NOTHING, db_column='nucc_code')
+ npi = models.ForeignKey(Provider, models.DO_NOTHING, db_column="npi", to_field="npi")
+ nucc_code = models.ForeignKey(Nucc, models.DO_NOTHING, db_column="nucc_code")
is_primary = models.BooleanField(blank=True, null=True)
id = models.UUIDField(primary_key=True)
class Meta:
managed = False
- db_table = 'provider_to_taxonomy'
- unique_together = (('npi', 'nucc_code'),)
+ db_table = "provider_to_taxonomy"
+ unique_together = (("npi", "nucc_code"),)
class RelationshipType(models.Model):
@@ -805,4 +753,4 @@ class RelationshipType(models.Model):
class Meta:
managed = False
- db_table = 'relationship_type'
+ db_table = "relationship_type"
diff --git a/backend/npdfhir/pagination.py b/backend/npdfhir/pagination.py
index 119dc4ee..ef8e9594 100644
--- a/backend/npdfhir/pagination.py
+++ b/backend/npdfhir/pagination.py
@@ -1,6 +1,7 @@
from rest_framework.pagination import PageNumberPagination
+
class CustomPaginator(PageNumberPagination):
page_size_query_param = "page_size"
- max_page_size = 100
- page_size = 10
\ No newline at end of file
+ max_page_size = 1000
+ page_size = 10
diff --git a/backend/npdfhir/renderers.py b/backend/npdfhir/renderers.py
index 3431e0f4..3010e5a9 100644
--- a/backend/npdfhir/renderers.py
+++ b/backend/npdfhir/renderers.py
@@ -6,5 +6,5 @@ class FHIRRenderer(JSONRenderer):
Custom renderer for FHIR resources that set the proper content type
"""
- media_type="application/fhir+json"
- format="fhir+json"
+ media_type = "application/fhir+json"
+ format = "fhir+json"
diff --git a/backend/npdfhir/router.py b/backend/npdfhir/router.py
index 24d97516..e4ea3365 100644
--- a/backend/npdfhir/router.py
+++ b/backend/npdfhir/router.py
@@ -24,13 +24,10 @@ def __init__(self, *args, **kwargs):
router = OptionalSlashRouter()
-router.register(r"Practitioner", views.FHIRPractitionerViewSet,
- basename="fhir-practitioner")
-router.register(r"Organization", views.FHIROrganizationViewSet,
- basename="fhir-organization")
-router.register(r"Endpoint", views.FHIREndpointViewSet,
- basename="fhir-endpoint")
-router.register(r"PractitionerRole", views.FHIRPractitionerRoleViewSet,
- basename="fhir-practitionerrole")
-router.register(r"Location", views.FHIRLocationViewSet,
- basename="fhir-location")
+router.register(r"Practitioner", views.FHIRPractitionerViewSet, basename="fhir-practitioner")
+router.register(r"Organization", views.FHIROrganizationViewSet, basename="fhir-organization")
+router.register(r"Endpoint", views.FHIREndpointViewSet, basename="fhir-endpoint")
+router.register(
+ r"PractitionerRole", views.FHIRPractitionerRoleViewSet, basename="fhir-practitionerrole"
+)
+router.register(r"Location", views.FHIRLocationViewSet, basename="fhir-location")
diff --git a/backend/npdfhir/serializers.py b/backend/npdfhir/serializers.py
index ed9a0ccb..0d8dff12 100644
--- a/backend/npdfhir/serializers.py
+++ b/backend/npdfhir/serializers.py
@@ -1,32 +1,31 @@
import sys
+from datetime import datetime, timezone
from django.urls import reverse
from fhir.resources.R4B.address import Address
from fhir.resources.R4B.bundle import Bundle
+from fhir.resources.R4B.capabilitystatement import (
+ CapabilityStatement,
+ CapabilityStatementImplementation,
+ CapabilityStatementRest,
+ CapabilityStatementRestResource,
+ CapabilityStatementRestResourceSearchParam,
+)
from fhir.resources.R4B.codeableconcept import CodeableConcept
from fhir.resources.R4B.coding import Coding
+from fhir.resources.R4B.contactdetail import ContactDetail
from fhir.resources.R4B.contactpoint import ContactPoint
from fhir.resources.R4B.endpoint import Endpoint
from fhir.resources.R4B.humanname import HumanName
from fhir.resources.R4B.identifier import Identifier
from fhir.resources.R4B.location import Location as FHIRLocation
-from fhir.resources.R4B.contactdetail import ContactDetail
from fhir.resources.R4B.meta import Meta
from fhir.resources.R4B.organization import Organization as FHIROrganization
from fhir.resources.R4B.period import Period
from fhir.resources.R4B.practitioner import Practitioner, PractitionerQualification
from fhir.resources.R4B.practitionerrole import PractitionerRole
from fhir.resources.R4B.reference import Reference
-from fhir.resources.R4B.capabilitystatement import (
- CapabilityStatement,
- CapabilityStatementRest,
- CapabilityStatementRestResource,
- CapabilityStatementRestResourceSearchParam,
- CapabilityStatementImplementation
-)
-from datetime import datetime, timezone
from rest_framework import serializers
-from .utils import get_schema_data, genReference
from .models import (
IndividualToPhone,
@@ -36,37 +35,38 @@
OrganizationToName,
ProviderToOrganization,
)
+from .utils import genReference, get_schema_data
-if 'runserver' or 'test' in sys.argv:
+if "runserver" or "test" in sys.argv:
from .cache import (
fhir_name_use,
fhir_phone_use,
nucc_taxonomy_codes,
- other_identifier_type,
)
class AddressSerializer(serializers.Serializer):
- delivery_line_1 = serializers.CharField(
- source='addressus__delivery_line_1', read_only=True)
- delivery_line_2 = serializers.CharField(
- source='addressus__delivery_line_2', read_only=True)
- city_name = serializers.CharField(
- source='addressus__city_name', read_only=True)
+ delivery_line_1 = serializers.CharField(source="addressus__delivery_line_1", read_only=True)
+ delivery_line_2 = serializers.CharField(source="addressus__delivery_line_2", read_only=True)
+ city_name = serializers.CharField(source="addressus__city_name", read_only=True)
state_abbreviation = serializers.CharField(
- source='addressus__fipsstate__abbrev', read_only=True)
- zipcode = serializers.CharField(
- source='addressus__zipcode', read_only=True)
- use = serializers.CharField(
- source='address_use__value', read_only=True)
+ source="addressus__fipsstate__abbrev", read_only=True
+ )
+ zipcode = serializers.CharField(source="addressus__zipcode", read_only=True)
+ use = serializers.CharField(source="address_use__value", read_only=True)
class Meta:
- fields = ['delivery_line_1', 'delivery_line_2',
- 'city_name', 'state_abbreviation', 'zipcode', 'use']
+ fields = [
+ "delivery_line_1",
+ "delivery_line_2",
+ "city_name",
+ "state_abbreviation",
+ "zipcode",
+ "use",
+ ]
def to_representation(self, instance):
- representation = super().to_representation(instance)
- if hasattr(instance, 'address'):
+ if hasattr(instance, "address"):
address = instance.address.address_us
else:
address = instance.address_us
@@ -78,10 +78,11 @@ def to_representation(self, instance):
city=address.city_name,
state=address.state_code.abbreviation,
postalCode=address.zipcode,
- country='US'
+ country="US",
)
- if 'use' in representation.keys():
- address.use = representation['use'],
+
+ if hasattr(instance, "address_use"):
+ address.use = instance.address_use.value
return address.model_dump()
@@ -89,7 +90,7 @@ class EmailSerializer(serializers.Serializer):
email_address = serializers.CharField(read_only=True)
class Meta:
- fields = ['email_address']
+ fields = ["email_address"]
def to_representation(self, instance):
email_contact = ContactPoint(
@@ -101,45 +102,47 @@ def to_representation(self, instance):
class PhoneSerializer(serializers.Serializer):
-
class Meta:
model = IndividualToPhone
- fields = ['phone_number', 'phone_use_id', 'extension']
+ fields = ["phone_number", "phone_use_id", "extension"]
def to_representation(self, instance):
phone_contact = ContactPoint(
- system='phone',
+ system="phone",
use=fhir_phone_use[str(instance.phone_use_id)],
- value=f"{instance.phone_number}"
+ value=f"{instance.phone_number}",
)
if instance.extension is not None:
- phone_contact.value += f'ext. {instance.extension}'
+ phone_contact.value += f"ext. {instance.extension}"
return phone_contact.model_dump()
class TaxonomySerializer(serializers.Serializer):
- id = serializers.CharField(source='nucc__code', read_only=True)
- display_name = serializers.CharField(
- source='nucc__display_name', read_only=True)
+ id = serializers.CharField(source="nucc__code", read_only=True)
+ display_name = serializers.CharField(source="nucc__display_name", read_only=True)
class Meta:
- fields = ['id', 'display_name']
+ fields = ["id", "display_name"]
def to_representation(self, instance):
code = CodeableConcept(
- coding=[Coding(
- system="http://nucc.org/provider-taxonomy",
- code=instance.nucc_code_id,
- display=nucc_taxonomy_codes[str(instance.nucc_code_id)]
- )]
+ coding=[
+ Coding(
+ system="http://nucc.org/provider-taxonomy",
+ code=instance.nucc_code_id,
+ display=nucc_taxonomy_codes[str(instance.nucc_code_id)],
+ )
+ ]
)
qualification = PractitionerQualification(
- identifier=[Identifier(
- value="test",
- type=code, # TODO: Replace
- period=Period()
- )],
- code=code
+ identifier=[
+ Identifier(
+ value="test",
+ type=code, # TODO: Replace
+ period=Period(),
+ )
+ ],
+ code=code,
)
return qualification.model_dump()
@@ -150,28 +153,30 @@ class OtherIdentifierSerializer(serializers.Serializer):
expiry_date = serializers.DateField(read_only=True)
class Meta:
- fields = ['value', 'issue_date', 'expiry_date', 'other_identifier_type',
- 'other_identifier_type_id', 'other_identifier_type_value']
-
- def to_representation(self, id):
+ fields = [
+ "value",
+ "issue_date",
+ "expiry_date",
+ "other_identifier_type",
+ "other_identifier_type_id",
+ "other_identifier_type_value",
+ ]
- other_identifier_type_id = id.other_identifier_type_id
+ def to_representation(self, instance):
license_identifier = Identifier(
# system="", TODO: Figure out how to associate a system with each identifier
- value=id.value,
+ value=instance.other_id,
type=CodeableConcept(
- coding=[Coding(
- system="http://terminology.hl7.org/CodeSystem/v2-0203",
- code=str(other_identifier_type_id),
- display=other_identifier_type[str(
- other_identifier_type_id)]
- )]
+ coding=[
+ Coding(
+ system="http://terminology.hl7.org/CodeSystem/v2-0203",
+ code=str(instance.other_id_type.value),
+ display=instance.other_id,
+ )
+ ]
),
# use="" TODO: Add use for other identifier
- period=Period(
- start=id.issue_date,
- end=id.expiry_date
- )
+ # period=Period(start=instance.issue_date, end=instance.expiry_date),
)
return license_identifier.model_dump()
@@ -186,24 +191,36 @@ class NameSerializer(serializers.Serializer):
suffix = serializers.CharField(read_only=True)
class Meta:
- fields = ['last_name', 'first_name', 'middle_name',
- 'start_date', 'end_date', 'prefix', 'suffix']
+ fields = [
+ "last_name",
+ "first_name",
+ "middle_name",
+ "start_date",
+ "end_date",
+ "prefix",
+ "suffix",
+ ]
def to_representation(self, name):
-
- name_parts = [part for part in [name.prefix, name.first_name,
- name.middle_name, name.last_name, name.suffix] if part != '' and part is not None]
+ name_parts = [
+ part
+ for part in [
+ name.prefix,
+ name.first_name,
+ name.middle_name,
+ name.last_name,
+ name.suffix,
+ ]
+ if part != "" and part is not None
+ ]
human_name = HumanName(
use=fhir_name_use[str(name.name_use_id)],
- text=' '.join(name_parts),
+ text=" ".join(name_parts),
family=name.last_name,
given=[name.first_name, name.middle_name],
prefix=[name.prefix],
suffix=[name.suffix],
- period=Period(
- start=name.start_date,
- end=name.end_date
- )
+ period=Period(start=name.start_date, end=name.end_date),
)
return human_name.model_dump()
@@ -211,35 +228,29 @@ def to_representation(self, name):
class NPISerializer(serializers.ModelSerializer):
class Meta:
model = Npi
- fields = '__all__'
+ fields = "__all__"
class IndividualSerializer(serializers.Serializer):
- name = NameSerializer(
- source='individualtoname_set', read_only=True, many=True)
- email = EmailSerializer(
- source='individualtoemail_set', read_only=True, many=True)
- phone = PhoneSerializer(
- source='individualtophone_set', many=True, read_only=True)
- address = AddressSerializer(
- source='individualtoaddress_set', many=True, read_only=True)
+ name = NameSerializer(source="individualtoname_set", read_only=True, many=True)
+ email = EmailSerializer(source="individualtoemail_set", read_only=True, many=True)
+ phone = PhoneSerializer(source="individualtophone_set", many=True, read_only=True)
+ address = AddressSerializer(source="individualtoaddress_set", many=True, read_only=True)
class Meta:
- fields = ['name', 'email', 'phone']
+ fields = ["name", "email", "phone"]
def to_representation(self, instance):
representation = super().to_representation(instance)
- individual = {
- 'name': representation['name']
- }
+ individual = {"name": representation["name"]}
telecom = []
- if 'phone' in representation.keys():
- telecom += representation['phone']
- if 'email' in representation.keys():
- telecom += representation['email']
- individual['telecom'] = telecom
- if representation['address'] != []:
- individual['address'] = representation['address']
+ if "phone" in representation.keys():
+ telecom += representation["phone"]
+ if "email" in representation.keys():
+ telecom += representation["email"]
+ individual["telecom"] = telecom
+ if representation["address"] != []:
+ individual["address"] = representation["address"]
return individual
@@ -249,20 +260,22 @@ class OrganizationNameSerializer(serializers.Serializer):
class Meta:
model = OrganizationToName
- fields = ['name', 'is_primary']
+ fields = ["name", "is_primary"]
class EndpointPayloadSeriazlier(serializers.Serializer):
class Meta:
- fields = ['type', 'mime_type']
+ fields = ["type", "mime_type"]
def to_representation(self, instance):
payload_type = CodeableConcept(
- coding=[Coding(
- system="http://terminology.hl7.org/CodeSystem/endpoint-payload-type",
- code=instance.payload_type.id,
- display=instance.payload_type.value
- )]
+ coding=[
+ Coding(
+ system="http://terminology.hl7.org/CodeSystem/endpoint-payload-type",
+ code=instance.payload_type.id,
+ display=instance.payload_type.value,
+ )
+ ]
)
return payload_type
@@ -270,7 +283,7 @@ def to_representation(self, instance):
class EndpointIdentifierSerialzier(serializers.Serializer):
class Meta:
- fields = ['identifier', 'system', 'value', 'assigner']
+ fields = ["identifier", "system", "value", "assigner"]
def to_representation(self, instance):
endpoint_identifier = Identifier(
@@ -278,47 +291,44 @@ def to_representation(self, instance):
system=instance.system,
value=instance.other_id,
# TODO: Replace with Organization reference
- assigner=Reference(display=str(instance.issuer_id))
+ assigner=Reference(display=str(instance.issuer_id)),
)
return endpoint_identifier.model_dump()
class OrganizationSerializer(serializers.Serializer):
- name = OrganizationNameSerializer(
- source='organizationtoname_set', many=True, read_only=True)
+ name = OrganizationNameSerializer(source="organizationtoname_set", many=True, read_only=True)
authorized_official = IndividualSerializer(read_only=True)
- address = AddressSerializer(
- source='organizationtoaddress_set', many=True, read_only=True)
+ address = AddressSerializer(source="organizationtoaddress_set", many=True, read_only=True)
class Meta:
model = Organization
- fields = '__all__'
+ fields = "__all__"
def to_representation(self, instance):
+ request = self.context.get("request")
representation = super().to_representation(instance)
organization = FHIROrganization()
organization.id = str(instance.id)
organization.meta = Meta(
- profile=[
- "http://hl7.org/fhir/us/core/StructureDefinition/us-core-organization"]
+ profile=["http://hl7.org/fhir/us/core/StructureDefinition/us-core-organization"]
)
identifiers = []
taxonomies = []
-
- if instance.ein:
- ein_identifier = Identifier(
- system="https://terminology.hl7.org/NamingSystem-USEIN.html",
- value=str(instance.ein.ein_id),
- type=CodeableConcept(
- coding=[Coding(
- system="http://terminology.hl7.org/CodeSystem/v2-0203",
- code="TAX",
- display="Tax ID number"
- )]
- )
- )
- identifiers.append(ein_identifier)
+ # if instance.ein:
+ # ein_identifier = Identifier(
+ # system="https://terminology.hl7.org/NamingSystem-USEIN.html",
+ # value=str(instance.ein.ein_id),
+ # type=CodeableConcept(
+ # coding=[Coding(
+ # system="http://terminology.hl7.org/CodeSystem/v2-0203",
+ # code="TAX",
+ # display="Tax ID number"
+ # )]
+ # )
+ # )
+ # identifiers.append(ein_identifier)
if hasattr(instance, "clinicalorganization"):
clinical_org = instance.clinicalorganization
@@ -327,17 +337,19 @@ def to_representation(self, instance):
system="http://terminology.hl7.org/NamingSystem/npi",
value=str(clinical_org.npi.npi),
type=CodeableConcept(
- coding=[Coding(
- system="http://terminology.hl7.org/CodeSystem/v2-0203",
- code="PRN",
- display="Provider number"
- )]
+ coding=[
+ Coding(
+ system="http://terminology.hl7.org/CodeSystem/v2-0203",
+ code="PRN",
+ display="Provider number",
+ )
+ ]
),
- use='official',
+ use="official",
period=Period(
start=clinical_org.npi.enumeration_date,
- end=clinical_org.npi.deactivation_date
- )
+ end=clinical_org.npi.deactivation_date,
+ ),
)
identifiers.append(npi_identifier)
@@ -346,31 +358,36 @@ def to_representation(self, instance):
system=str(other_id.other_id_type_id),
value=other_id.other_id,
type=CodeableConcept(
- coding=[Coding(
- system="http://terminology.hl7.org/CodeSystem/v2-0203",
- code="test", # do we define this based on the type of id it is?
- display="test" # same as above ^
- )]
- )
+ coding=[
+ Coding(
+ system="http://terminology.hl7.org/CodeSystem/v2-0203",
+ code="test", # do we define this based on the type of id it is?
+ display="test", # same as above ^
+ )
+ ]
+ ),
)
identifiers.append(other_identifier)
for taxonomy in clinical_org.organizationtotaxonomy_set.all():
code = CodeableConcept(
- coding=[Coding(
- system="http://nucc.org/provider-taxonomy",
- code=taxonomy.nucc_code_id,
- display=nucc_taxonomy_codes[str(
- taxonomy.nucc_code_id)]
- )]
+ coding=[
+ Coding(
+ system="http://nucc.org/provider-taxonomy",
+ code=taxonomy.nucc_code_id,
+ display=nucc_taxonomy_codes[str(taxonomy.nucc_code_id)],
+ )
+ ]
)
qualification = PractitionerQualification(
- identifier=[Identifier(
- value="test",
- type=code, # TODO: Replace
- period=Period()
- )],
- code=code
+ identifier=[
+ Identifier(
+ value="test",
+ type=code, # TODO: Replace
+ period=Period(),
+ )
+ ],
+ code=code,
)
taxonomies.append(qualification.model_dump())
# TODO extend based on US core
@@ -379,28 +396,39 @@ def to_representation(self, instance):
organization.identifier = identifiers
- names = representation.get('name', [])
- primary_names = [n['name'] for n in names if n['is_primary']]
- alias_names = [n['name'] for n in names if not n['is_primary']]
+ # The NPPES data that we would be ingesting and storing in the core data model has a concept of a primary organization name and other organization names, which maps to the fhir concept of organization.name (1:1) and organization.alias(1:M).
+ # The Halloween data do not have these concepts, so the intent of this code is to try to assign an organization name with or without the concept of "is_primary" and then to assign any other names (if present) to organization.alias
+ names = representation.get("name", [])
+ primary_names = [(i, n) for i, n in enumerate(names) if n["is_primary"]]
+ aliases = []
if primary_names:
- organization.name = primary_names[0]
+ organization.name = primary_names[0][1]["name"]
+ primary_name_index = primary_names[0][0]
+ del names[primary_name_index]
+ aliases = names
elif names:
- organization.name = names[0]['name']
-
- if alias_names:
- organization.alias = alias_names
+ organization.name = names[0]
+ if len(names) > 1:
+ aliases = names[1:]
+ if aliases:
+ organization.alias = aliases
+
+ if instance.parent_id is not None:
+ organization.partOf = genReference(
+ "fhir-organization-detail", instance.parent_id, request
+ )
- if hasattr(instance, "authorized_official"):
- authorized_official = representation['authorized_official']
+ if hasattr(instance, "authorized_official") and instance.authorized_official is not None:
+ authorized_official = representation["authorized_official"]
# r4 only allows one name for contact. TODO update to ndh
- authorized_official['name'] = authorized_official['name'][0]
+ authorized_official["name"] = authorized_official["name"][0]
- if representation['address'] != []:
- authorized_official['address'] = representation['address'][0]
+ if representation["address"] != []:
+ authorized_official["address"] = representation["address"][0]
else:
- if 'address' in authorized_official.keys():
- del authorized_official['address']
+ if "address" in authorized_official.keys():
+ del authorized_official["address"]
organization.contact = [authorized_official]
return organization.model_dump()
@@ -410,47 +438,48 @@ class PractitionerSerializer(serializers.Serializer):
npi = NPISerializer()
individual = IndividualSerializer(read_only=True)
identifier = OtherIdentifierSerializer(
- source='providertootheridentifier_set', many=True, read_only=True)
- taxonomy = TaxonomySerializer(
- source='providertotaxonomy_set', many=True, read_only=True)
+ source="providertootherid_set", many=True, read_only=True
+ )
+ taxonomy = TaxonomySerializer(source="providertotaxonomy_set", many=True, read_only=True)
class Meta:
- fields = ['npi', 'name', 'email', 'phone', 'identifier', 'taxonomy']
+ fields = ["npi", "name", "email", "phone", "identifier", "taxonomy"]
def to_representation(self, instance):
representation = super().to_representation(instance)
practitioner = Practitioner()
practitioner.id = str(instance.individual.id)
practitioner.meta = Meta(
- profile=[
- "http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner"]
+ profile=["http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner"]
)
npi_identifier = Identifier(
system="http://terminology.hl7.org/NamingSystem/npi",
value=str(instance.npi.npi),
type=CodeableConcept(
- coding=[Coding(
- system="http://terminology.hl7.org/CodeSystem/v2-0203",
- code="PRN",
- display="Provider number"
- )]
+ coding=[
+ Coding(
+ system="http://terminology.hl7.org/CodeSystem/v2-0203",
+ code="PRN",
+ display="Provider number",
+ )
+ ]
),
- use='official',
- period=Period(
- start=instance.npi.enumeration_date,
- end=instance.npi.deactivation_date
- )
+ use="official",
+ period=Period(start=instance.npi.enumeration_date, end=instance.npi.deactivation_date),
)
- if representation['individual']['telecom'] != []:
- practitioner.telecom = representation['individual']['telecom']
- if 'address' in representation['individual'].keys() and representation['individual']['address'] != []:
- practitioner.address = representation['individual']['address']
+ if representation["individual"]["telecom"] != []:
+ practitioner.telecom = representation["individual"]["telecom"]
+ if (
+ "address" in representation["individual"].keys()
+ and representation["individual"]["address"] != []
+ ):
+ practitioner.address = representation["individual"]["address"]
practitioner.identifier = [npi_identifier]
- if 'identifier' in representation.keys():
- practitioner.identifier += representation['identifier']
- practitioner.name = representation['individual']['name']
- if 'taxonomy' in representation.keys():
- practitioner.qualification = representation['taxonomy']
+ if "identifier" in representation.keys():
+ practitioner.identifier += representation["identifier"]
+ practitioner.name = representation["individual"]["name"]
+ if "taxonomy" in representation.keys():
+ practitioner.qualification = representation["taxonomy"]
return practitioner.model_dump()
@@ -462,21 +491,22 @@ class Meta:
model = Location
def to_representation(self, instance):
- request = self.context.get('request')
+ request = self.context.get("request")
representation = super().to_representation(instance)
location = FHIRLocation()
location.id = str(instance.id)
if instance.active:
- location.status = 'active'
+ location.status = "active"
else:
- location.status = 'inactive'
+ location.status = "inactive"
location.name = instance.name
# if 'phone' in representation.keys():
# location.telecom = representation['phone']
- if 'address' in representation.keys():
- location.address = representation['address']
+ if "address" in representation.keys():
+ location.address = representation["address"]
location.managingOrganization = genReference(
- 'fhir-organization-detail', instance.organization_id, request)
+ "fhir-organization-detail", instance.organization_id, request
+ )
return location.model_dump()
@@ -487,17 +517,20 @@ class Meta:
model = ProviderToOrganization
def to_representation(self, instance):
- request = self.context.get('request')
- representation = super().to_representation(instance)
+ request = self.context.get("request")
+ # representation = super().to_representation(instance)
practitioner_role = PractitionerRole()
practitioner_role.id = str(instance.id)
practitioner_role.active = instance.active
practitioner_role.practitioner = genReference(
- 'fhir-practitioner-detail', instance.provider_to_organization.individual_id, request)
+ "fhir-practitioner-detail", instance.provider_to_organization.individual_id, request
+ )
practitioner_role.organization = genReference(
- 'fhir-organization-detail', instance.provider_to_organization.organization_id, request)
- practitioner_role.location = [genReference(
- 'fhir-location-detail', instance.location.id, request)]
+ "fhir-organization-detail", instance.provider_to_organization.organization_id, request
+ )
+ practitioner_role.location = [
+ genReference("fhir-location-detail", instance.location.id, request)
+ ]
# These lines rely on the fhir.resources.R4B representation of PractitionerRole to be expanded to match the ndh FHIR definition. This is a TODO with an open ticket.
# if 'other_phone' in representation.keys():
# practitioner_role.telecom = representation['other_phone']
@@ -507,46 +540,69 @@ def to_representation(self, instance):
class EndpointSerializer(serializers.Serializer):
payload = EndpointPayloadSeriazlier(
- source='endpointinstancetopayload_set', many=True, read_only=True)
+ source="endpointinstancetopayload_set", many=True, read_only=True
+ )
identifier = EndpointIdentifierSerialzier(
- source='endpointinstancetootherid_set', many=True, read_only=True
+ source="endpointinstancetootherid_set", many=True, read_only=True
)
class Meta:
- fields = ['id', 'ehr_vendor', 'address', 'endpoint_connection_type',
- 'name', 'description', 'endpoint_instance']
+ fields = [
+ "id",
+ "ehr_vendor",
+ "address",
+ "endpoint_connection_type",
+ "name",
+ "description",
+ "endpoint_instance",
+ ]
def to_representation(self, instance):
+ # request = self.context.get("request")
representation = super().to_representation(instance)
- connection_type = Coding(
- system="http://terminology.hl7.org/CodeSystem/endpoint-connection-type",
- code=instance.endpoint_connection_type.id,
- display=instance.endpoint_connection_type.display
- )
+ if instance.endpoint_connection_type:
+ connection_type = Coding(
+ system="http://terminology.hl7.org/CodeSystem/endpoint-connection-type",
+ code=instance.endpoint_connection_type.id,
+ display=instance.endpoint_connection_type.display,
+ )
+ # TODO THIS IS TEMPORARY DUE TO INSUFFICIENT DATA
+ else:
+ connection_type = Coding(
+ system="http://terminology.hl7.org/CodeSystem/endpoint-connection-type",
+ code="hl7-fhir-rest",
+ display="HL7 FHIR",
+ )
- environment_type = [CodeableConcept(
- coding=[Coding(
- system="https://hl7.org/fhir/valueset-endpoint-environment.html",
- code=instance.environment_type.id,
- display=instance.environment_type.display
- )]
- )]
+ ## TODO extend base fhir spec to ndh spec
+ # if instance.environment_type:
+ # environment_type = [
+ # CodeableConcept(
+ # coding=[
+ # Coding(
+ # system="https://hl7.org/fhir/valueset-endpoint-environment.html",
+ # code=instance.environment_type.id,
+ # display=instance.environment_type.display,
+ # )
+ # ]
+ # )
+ # ]
endpoint = Endpoint(
id=str(instance.id),
- identifier=representation['identifier'],
- status="active", # hardcoded for now
+ identifier=representation["identifier"],
+ status="active", # TODO hardcoded for now
connectionType=connection_type,
name=instance.name,
# TODO extend base fhir spec to ndh spec description=instance.description,
# TODO extend base fhir spec to ndh spec environmentType=environment_type,
- # managingOrganization=Reference(managing_organization), ~ organization/npi or whatever we use as the organization identifier
- # contact=ContactPoint(contact), ~ still gotta figure this out
- # period=Period(period), ~ still gotta figure this out
- payloadType=representation['payload'],
+ # managingOrganization=genReference(
+ # 'fhir-organization-detail', instance.location.organization_id, request),
+ # contact=ContactPoint(contact),
+ # period=Period(period),
+ payloadType=representation["payload"],
address=instance.address,
- header=["application/fhir"] # hardcoded for now
)
return endpoint.model_dump()
@@ -557,39 +613,31 @@ class CapabilityStatementSerializer(serializers.Serializer):
Serializer for FHIR CapablityStatement resource
"""
- def to_representation(self, instance):
- request = self.context.get('request')
- baseURL = request.build_absolute_uri('/fhir')
- metadataURL = request.build_absolute_uri(reverse('fhir-metadata'))
- schemaData = get_schema_data(request, 'schema')
+ def to_representation(self):
+ request = self.context.get("request")
+ baseURL = request.build_absolute_uri("/fhir")
+ metadataURL = request.build_absolute_uri(reverse("fhir-metadata"))
+ schemaData = get_schema_data(request)
capability_statement = CapabilityStatement(
url=metadataURL,
- version=schemaData['info']['version'],
+ version=schemaData["info"]["version"],
name="FHIRCapablityStatement",
title=f"{schemaData['info']['title']} - FHIR Capablity Statement",
status="active",
date=datetime.now(timezone.utc),
publisher="CMS",
contact=[
- ContactDetail(
- telecom=[
- ContactPoint(
- system="email",
- value="npd@cms.hhs.gov"
- )
- ]
- )
+ ContactDetail(telecom=[ContactPoint(system="email", value="npd@cms.hhs.gov")])
],
description="This CapabilityStatement describes the capabilities of the National Provider Directory FHIR API, including supported resources, search parameters, and operations.",
kind="instance",
implementation=CapabilityStatementImplementation(
- description=schemaData['info']['description'],
- url=baseURL
+ description=schemaData["info"]["description"], url=baseURL
),
fhirVersion="4.0.1",
format=["fhir+json"],
- rest=[self.build_rest_components(schemaData)]
+ rest=[self.build_rest_components(schemaData)],
)
return capability_statement.model_dump()
@@ -597,29 +645,28 @@ def to_representation(self, instance):
def build_rest_components(self, schemaData):
"""
Building out each REST component describing our endpoint capabilities
-
- To support a new Endpoint, just add it to the dictionary below following the same format
+
+ To support a new Endpoint, just add it to the dictionary below following the same format
"""
resources = {
"Practitioner": "/fhir/Practitioner/",
"Organization": "/fhir/Organization/",
"Endpoint": "/fhir/Endpoint/",
"Location": "/fhir/Location/",
- "PractitionerRole": "/fhir/PractitionerRole/"
+ "PractitionerRole": "/fhir/PractitionerRole/",
}
resource_capabilities = []
for resource_type, path in resources.items():
- if path in schemaData['paths']:
+ if path in schemaData["paths"]:
resource_capabilities.append(
- self.build_resource_capabilities(
- resource_type, schemaData['paths'][path])
+ self.build_resource_capabilities(resource_type, schemaData["paths"][path])
)
return CapabilityStatementRest(
mode="server",
documentation="All FHIR endpoints for the National Provider Directory",
- resource=resource_capabilities
+ resource=resource_capabilities,
)
def build_resource_capabilities(self, resource_type, schemaData):
@@ -630,17 +677,14 @@ def build_resource_capabilities(self, resource_type, schemaData):
CapabilityStatementRestResourceSearchParam(
name=param["name"],
type=param["schema"]["type"],
- documentation=param["description"]
+ documentation=param["description"],
)
)
return CapabilityStatementRestResource(
type=resource_type,
- interaction=[
- {"code": "read"},
- {"code": "search-type"}
- ],
- searchParam=searchParams
+ interaction=[{"code": "read"}, {"code": "search-type"}],
+ searchParam=searchParams,
)
@@ -648,6 +692,7 @@ class BundleSerializer(serializers.Serializer):
"""
Serializer for FHIR Bundle resource
"""
+
class Meta:
model = Bundle
@@ -655,13 +700,12 @@ def to_representation(self, instance):
entries = []
for resource in instance.data:
- request = self.context.get('request')
+ request = self.context.get("request")
# Get the resource type (Patient, Practitioner, etc.)
- resource_type = resource['resourceType']
- id = resource['id']
- url_name = f'fhir-{resource_type.lower()}-detail'
- full_url = request.build_absolute_uri(
- reverse(url_name, kwargs={'pk': id}))
+ resource_type = resource["resourceType"]
+ id = resource["id"]
+ url_name = f"fhir-{resource_type.lower()}-detail"
+ full_url = request.build_absolute_uri(reverse(url_name, kwargs={"id": id}))
# Create an entry for this resource
entry = {
"fullUrl": full_url,
@@ -671,10 +715,6 @@ def to_representation(self, instance):
entries.append(entry)
# Create the bundle
- bundle = Bundle(
- type="searchset",
- entry=entries,
- total=len(entries)
- )
+ bundle = Bundle(type="searchset", entry=entries, total=len(entries))
return bundle.model_dump()
diff --git a/backend/npdfhir/signals.py b/backend/npdfhir/signals.py
index 9e3350f7..81020abb 100644
--- a/backend/npdfhir/signals.py
+++ b/backend/npdfhir/signals.py
@@ -6,8 +6,6 @@
@receiver(signals.bind_extra_request_metadata)
def bind_trace_id(request, logger, **kwargs):
- trace_id = get_request_header(
- request, "x-amzn-trace-id", "HTTP_X_AMZN_TRACE_ID"
- )
+ trace_id = get_request_header(request, "x-amzn-trace-id", "HTTP_X_AMZN_TRACE_ID")
if trace_id:
structlog.contextvars.bind_contextvars(trace_id=trace_id)
diff --git a/backend/npdfhir/tests.py b/backend/npdfhir/tests.py
deleted file mode 100644
index bd0716d4..00000000
--- a/backend/npdfhir/tests.py
+++ /dev/null
@@ -1,821 +0,0 @@
-from django.urls import reverse
-from rest_framework import status
-from rest_framework.test import APITestCase as DrfAPITestCase, APIClient
-from django.db import connection
-from django.contrib.auth.models import User
-from fhir.resources.R4B.bundle import Bundle
-from fhir.resources.R4B.capabilitystatement import CapabilityStatement
-
-from .models import Organization
-
-# I can't explain why, but we need to import cacheData here. I think we can
-# remove this once we move to the docker db setup. By using "import thing as
-# thing", we silence "imported but unused" and "not accessed" warnings.
-from .cache import cacheData as cacheData
-
-
-def get_female_npis(npi_list):
- """
- Given a list of NPI numbers, return the subset that are female.
- """
- query = """
- SELECT p.npi, i.gender
- FROM npd.provider p
- JOIN npd.individual i ON p.individual_id = i.id
- WHERE p.npi = ANY(%s)
- AND i.gender = 'F'
- """
- with connection.cursor() as cursor:
- cursor.execute(query, [npi_list])
- results = cursor.fetchall()
-
- return results
-
-class APITestCase(DrfAPITestCase):
- @classmethod
- def setUpTestData(cls):
- cls.user = User.objects.create(username="testuser")
- cls.user.set_password('nothing')
- return super().setUpTestData()
-
- def setUp(self):
- self.client = APIClient()
- self.client.force_authenticate(user=self.user)
-
-
-class DocumentationViewSetTestCase(APITestCase):
- def test_get_swagger_docs(self):
- swagger_url = reverse("schema-swagger-ui")
- response = self.client.get(swagger_url)
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn('id="swagger-ui"', response.text)
-
- def test_get_redoc_docs(self):
- redoc_url = reverse("schema-redoc")
- response = self.client.get(redoc_url)
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn('redoc spec-url', response.text)
-
- def test_get_json_docs(self):
- json_docs_url = reverse("schema")
- response = self.client.get(json_docs_url)
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("application/vnd.oai.openapi+json", response["Content-Type"])
- self.assertIn("openapi", response.data.keys())
-
-
-class EndpointViewSetTestCase(APITestCase):
- def setUp(self):
- super().setUp()
- self.list_url = reverse("fhir-endpoint-list")
-
- def test_list_default(self):
- response = self.client.get(self.list_url)
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
- self.assertIn("results", response.data)
-
- def test_list_in_proper_order(self):
- url = self.list_url
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
-
- # print(response.data["results"]["entry"][0]['resource']['name'])
-
- # Extract names
- # Note: have to normalize the names to have python sorting match sql
- names = [
- d['resource'].get('name', {})
- for d in response.data["results"]["entry"]
- ]
-
- sorted_names = [
- '88 MEDICINE LLC',
- 'AAIA of Tampa Bay, LLC',
- 'ABC Healthcare Service Base URL',
- 'A Better Way LLC',
- 'Abington Surgical Center',
- 'Access Mental Health Agency',
- 'ADHD & Autism Psychological Services PLLC',
- 'Adolfo C FernandezObregon Md',
- 'Advanced Anesthesia, LLC',
- 'Advanced Cardiovascular Center'
- ]
-
- self.assertEqual(
- names, sorted_names, f"Expected endpoints list sorted by name but got {names}\n Sorted: {sorted_names}")
-
- def test_list_returns_fhir_bundle(self):
- response = self.client.get(self.list_url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- data = response.json()
- bundle = Bundle.model_validate(data['results'])
-
- self.assertEqual(bundle.__resource_type__, "Bundle")
-
- def test_list_entries_are_fhir_endpoints(self):
- response = self.client.get(self.list_url)
-
- bundle = response.data["results"]
- self.assertGreater(len(bundle["entry"]), 0)
-
- first_entry = bundle["entry"][0]
- self.assertIn("resource", first_entry)
-
- endpoint_resource = first_entry["resource"]
- self.assertEqual(endpoint_resource["resourceType"], "Endpoint")
- self.assertIn("id", endpoint_resource)
- self.assertIn("status", endpoint_resource)
- self.assertIn("connectionType", endpoint_resource)
- self.assertIn("address", endpoint_resource)
-
- def test_pagination_custom_page_size(self):
- response = self.client.get(self.list_url, {"page_size": 2})
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- bundle = response.data["results"]
- self.assertLessEqual(len(bundle["entry"]), 2)
-
- def test_pagination_enforces_maximum(self):
- response = self.client.get(self.list_url, {"page_size": 5000})
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- bundle = response.data["results"]
- self.assertLessEqual(len(bundle["entry"]), 1000)
-
- def test_filter_by_name(self):
- response = self.client.get(
- self.list_url, {"name": "Kansas City Psychiatric Group"})
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- bundle = response.data["results"]
-
- self.assertGreater(len(bundle["entry"]), 0)
-
- first_endpoint = bundle["entry"][0]["resource"]
-
- self.assertIn("name", first_endpoint)
- self.assertIn("Kansas City", first_endpoint["name"])
-
- def test_filter_by_connection_type(self):
- connection_type = "hl7-fhir-rest"
- response = self.client.get(
- self.list_url, {"endpoint_connection_type": connection_type})
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- bundle = response.data["results"]
-
- entries = bundle.get("entry", [])
- self.assertGreater(len(entries), 0)
-
- first_endpoint = entries[0]["resource"]
- self.assertIn("connectionType", first_endpoint)
-
- code = first_endpoint["connectionType"]["code"]
- self.assertEqual(connection_type, code)
-
- def test_filter_by_payload_type(self):
- payload_type = "ccda-structuredBody:1.1"
- response = self.client.get(
- self.list_url, {"payload_type": payload_type})
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- bundle = response.data["results"]
-
- entries = bundle.get("entry", [])
- self.assertGreater(len(entries), 0)
-
- first_endpoint = entries[0]["resource"]
- self.assertIn("payloadType", first_endpoint)
-
- code = first_endpoint["payloadType"][0]["coding"][0]["display"]
- self.assertEqual(payload_type, code)
-
- def test_filter_returns_empty_for_nonexistent_name(self):
- response = self.client.get(
- self.list_url, {"name": "NonexistentEndpointName12345"})
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- bundle = response.data["results"]
- self.assertEqual(len(bundle["entry"]), 0)
-
- def test_retrieve_specific_endpoint(self):
- list_response = self.client.get(self.list_url, {"page_size": 1})
- first_endpoint = list_response.data["results"]["entry"][0]["resource"]
-
- endpoint_id = first_endpoint["id"]
- detail_url = reverse("fhir-endpoint-detail", args=[endpoint_id])
-
- response = self.client.get(detail_url)
-
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- endpoint = response.data
- self.assertEqual(endpoint["resourceType"], "Endpoint")
- self.assertEqual(endpoint["id"], endpoint_id)
- self.assertIn("status", endpoint)
- self.assertIn("connectionType", endpoint)
- self.assertIn("address", endpoint)
-
- def test_retrieve_nonexistent_endpoint(self):
- detail_url = reverse("fhir-endpoint-detail",
- args=["12300000-0000-0000-0000-000000000123"])
- response = self.client.get(detail_url)
-
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_single_endpoint(self):
- id = "82cc98bb-afd0-4835-ada9-1437dfca8255"
- url = reverse("fhir-endpoint-detail",
- args=[id])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.data['id'], id)
-
-
-class BasicViewsTestCase(APITestCase):
- def test_health_view(self):
- url = reverse("healthCheck") # maps to "/healthCheck"
- response = self.client.get(url)
- res_obj = response.json()
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(res_obj['status'], "healthy")
-
-
-class OrganizationViewSetTestCase(APITestCase):
- def setUp(self):
- super().setUp()
- self.org_without_authorized_official = Organization.objects.create(
- id='26708690-19d6-499e-b481-cebe05b98f08', authorized_official_id=None)
-
- def test_list_default(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
- self.assertIn("results", response.data)
-
- def test_list_in_proper_order(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
-
- # print(response.data["results"]["entry"][0]['resource']['name'])
-
- # Extract names
- names = [
- d['resource'].get('name', {})
- for d in response.data["results"]["entry"]
- ]
-
- sorted_names = [
- '1ST CHOICE HOME HEALTH CARE INC',
- '1ST CHOICE MEDICAL DISTRIBUTORS, LLC',
- '986 INFUSION PHARMACY #1 INC.',
- 'A & A MEDICAL SUPPLY COMPANY',
- 'ABACUS BUSINESS CORPORATION GROUP INC.',
- 'ABBY D CENTER, INC.',
- 'ABC DURABLE MEDICAL EQUIPMENT INC',
- 'ABC HOME MEDICAL SUPPLY, INC.',
- 'A BEAUTIFUL SMILE DENTISTRY, L.L.C.',
- 'A & B HEALTH CARE, INC.'
- ]
- self.assertEqual(
- names, sorted_names, f"Expected fhir orgs sorted by org name but got {names}\n Sorted: {sorted_names}")
-
- def test_list_with_custom_page_size(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"page_size": 2})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 2)
-
- def test_list_with_greater_than_max_page_size(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"page_size": 1001})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 1000)
-
- def test_list_filter_by_name(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"name": "Cumberland"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
- self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- def test_list_filter_by_organization_type(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"organization_type": "Hospital"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
- self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- def test_list_filter_by_npi_general(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"identifier": "1427051473"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
- self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- def test_list_filter_by_npi_specific(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"identifier": "NPI|1427051473"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
- self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- def test_list_filter_by_otherID_general(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"identifier": "testMBI"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
- self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- # def test_list_filter_by_otherID_specific(self):
- # url = reverse("fhir-organization-list")
- # response = self.client.get(url, {"identifier":" 1|001586989"})
- # self.assertEqual(response.status_code, status.HTTP_200_OK)
- # self.assertIn("results", response.data)
- # self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- def test_list_filter_by_ein_general(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(
- url, {"identifier": "22222222-2222-2222-2222-222222222222"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
- self.assertGreaterEqual(response.data["results"]["total"], 1)
-
- # def test_list_filter_by_ein_specific(self):
- # url = reverse("fhir-organization-list")
- # response = self.client.get(url, {"identifier":"USEIN|12-3456789"})
- # self.assertEqual(response.status_code, status.HTTP_200_OK)
- # self.assertIn("results", response.data)
-
- def test_retrieve_non_clinical_organization(self):
- url = reverse("fhir-organization-detail",
- args=["33333333-3333-3333-3333-333333333333"])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- org = response.data
- self.assertEqual(org["resourceType"], "Organization")
- self.assertEqual(org["name"], "Joe Health Incorporated")
- self.assertEqual(org["identifier"][0]["type"]
- ["coding"][0]["code"], "TAX")
-
- def test_retrieve_nonexistent_uuid(self):
- url = reverse("fhir-organization-detail",
- args=["12300000-0000-0000-0000-000000000123"])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_nonexistent_npi(self):
- url = reverse("fhir-organization-detail", args=["999999"])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_single_organization(self):
- id = "501a620e-8521-4610-9717-b35a0597292e"
- url = reverse("fhir-organization-detail",
- args=[id])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.data['id'], id)
-
- def test_organization_without_authorized_official(self):
- id = self.org_without_authorized_official.pk
- url = reverse("fhir-organization-detail",
- args=[id])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.data['id'], id)
-
- def test_list_filter_by_address(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"address": "Main"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_city(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"address_city": "Boston"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_state(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"address_state": "NY"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_postalcode(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"address_postalcode": "10001"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_use(self):
- url = reverse("fhir-organization-list")
- response = self.client.get(url, {"address_use": "work"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
-
-class LocationViewSetTestCase(APITestCase):
-
- def test_list_default(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
- self.assertIn("results", response.data)
-
- def test_list_in_proper_order(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
-
- # print(response.data["results"]["entry"][0]['resource']['name'])
-
- # Extract names
- names = [
- d['resource'].get('name', {})
- for d in response.data["results"]["entry"]
- ]
-
- sorted_names = [
- '1ST CHOICE MEDICAL DISTRIBUTORS, LLC',
- '986 INFUSION PHARMACY #1 INC.',
- 'A & A MEDICAL SUPPLY COMPANY',
- 'ABACUS BUSINESS CORPORATION GROUP INC.',
- 'ABBY D CENTER, INC.',
- 'ABC DURABLE MEDICAL EQUIPMENT INC',
- 'ABC HOME MEDICAL SUPPLY, INC.',
- 'A BEAUTIFUL SMILE DENTISTRY, L.L.C.',
- 'A & B HEALTH CARE, INC.',
- 'ABILENE HELPING HANDS INC'
- ]
-
- self.assertEqual(
- names, sorted_names, f"Expected fhir orgs sorted by org name but got {names}\n Sorted: {sorted_names}")
-
- def test_list_with_custom_page_size(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"page_size": 2})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 2)
-
- def test_list_with_greater_than_max_page_size(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"page_size": 1001})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 1000)
-
- def test_list_filter_by_name(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"name": "Cumberland"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"address": "Avenue"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_city(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"address_city": "Seattle"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_state(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"address_state": "TX"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_postalcode(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"address_postalcode": "90210"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_use(self):
- url = reverse("fhir-location-list")
- response = self.client.get(url, {"address_use": "work"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_retrieve_nonexistent(self):
- url = reverse("fhir-location-detail",
- args=['00000000-0000-0000-0000-000000000000'])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_single_location(self):
- id = "527c8a79-1294-47ab-afce-b571c89a4f2b"
- url = reverse("fhir-location-detail",
- args=[id])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.data['id'], id)
-
-
-class PractitionerViewSetTestCase(APITestCase):
-
- def test_list_default(self):
- url = reverse("fhir-practitioner-list") # /Practitioner/
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
- self.assertIn("results", response.data)
-
- def test_list_in_proper_order(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
-
- # print(response.data["results"]["entry"][0]['resource']['name'][0])
-
- # for name in response.data["results"]["entry"]:
- # print(name['resource']['name'][-1])
-
- # Extract names
- names = [
- (d['resource']['name'][-1].get('family', {}),
- d['resource']['name'][-1]['given'][0])
- for d in response.data["results"]["entry"]
- ]
-
- sorted_names = [
- ('AADALEN', 'KIRK'),
- ('ABBAS', 'ASAD'),
- ('ABBOTT', 'BRUCE'),
- ('ABBOTT', 'PHILIP'),
- ('ABDELHALIM', 'AHMED'),
- ('ABDELHAMED', 'ABDELHAMED'),
- ('ABDEL NOUR', 'MAGDY'),
- ('ABEL', 'MICHAEL'),
- ('ABELES', 'JENNIFER'),
- ('ABELSON', 'MARK')
- ]
-
- self.assertEqual(
- names, sorted_names, f"Expected fhir orgs sorted by org name but got {names}\n Sorted: {sorted_names}")
-
- def test_list_with_custom_page_size(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"page_size": 2})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 2)
-
- def test_list_with_greater_than_max_page_size(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"page_size": 1001})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 1000)
-
- def test_list_filter_by_gender(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"gender": "Male"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- # Assert all required fields are present to get npi id
- self.assertIn("results", response.data)
- self.assertIn("entry", response.data['results'])
-
- npi_ids = []
- for practitioner_entry in response.data['results']['entry']:
- self.assertIn("resource", practitioner_entry)
- self.assertIn("id", practitioner_entry['resource'])
- npi_id = practitioner_entry['resource']['id']
- npi_ids.append(int(npi_id))
-
- # Check to make sure no female practitioners were fetched by mistake
- should_be_empty = get_female_npis(npi_ids)
- self.assertFalse(should_be_empty)
-
- def test_list_filter_by_name(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"name": "Smith"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_practitioner_type(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"practitioner_type": "Nurse"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_npi_general(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"identifier": "1234567890"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_npi_specific(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"identifier": "NPI|1234567890"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"address": "Street"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_city(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"address_city": "Springfield"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_state(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"address_state": "CA"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_postalcode(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"address_postalcode": "12345"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_address_use(self):
- url = reverse("fhir-practitioner-list")
- response = self.client.get(url, {"address_use": "home"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_retrieve_nonexistent(self):
- url = reverse("fhir-practitioner-detail", args=['999999'])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_nonexistent_uuid(self):
- url = reverse("fhir-practitioner-detail",
- args=["12300000-0000-0000-0000-000000000123"])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_single_pracitioner(self):
- id = "b7a4ab09-3207-49c1-9f59-c1c07c75dfb5"
- url = reverse("fhir-practitioner-detail",
- args=[id])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.data['id'], id)
-
-
-class PractitionerRoleViewSetTestCase(APITestCase):
-
- def test_list_default(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
- self.assertIn("results", response.data)
-
- def test_list_in_proper_order(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
-
- # Extract ids
- ids = [
- d['resource'].get('id', {})
- for d in response.data["results"]["entry"]
- ]
-
-
- #Corresponds to the following location name order
- """
- A BEAUTIFUL SMILE DENTISTRY, L.L.C.
- ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC
- ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC
- ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC
- ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC
- ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC
- ADR LLC
- ADR LLC
- ADR LLC
- ADR LLC
- """
-
- sorted_ids = [
- 'e9554c87-6e4e-4df6-93fb-88ee4bc4e5be',
- '9f50dfd8-098a-4e6d-a4ad-ded2175a5321',
- '90011f74-1c0d-4461-95b5-cb346cdbc64b',
- '874c25e0-44fd-48e9-832a-a80f1d07491a',
- '0ba12b55-05e1-450f-8a2c-454a93425a34',
- '38eac005-9373-44ab-bf5d-57b84bca7cb4',
- 'cd3fe6b7-02b0-4136-8db8-4c3867aab131',
- '093091b7-aba7-4acb-8338-65996de10813',
- '2e18cd31-4a89-475b-82be-71ad75011713',
- '59ef9dd6-60e8-4a64-a52c-6f44c540184f'
- ]
-
-
- self.assertEqual(
- ids, sorted_ids, f"Expected Practitioner roles sorted by order of location name but got {ids}\n Sorted: {sorted_ids}")
-
-
- def test_list_with_custom_page_size(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url, {"page_size": 2})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 2)
-
- def test_list_with_greater_than_max_page_size(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url, {"page_size": 1001})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertLessEqual(len(response.data["results"]["entry"]), 1000)
-
- def test_list_filter_by_name(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url, {"name": "Cumberland"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_practitioner_gender(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url, {"practitioner_gender": "Female"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_list_filter_by_organization_name(self):
- url = reverse("fhir-practitionerrole-list")
- response = self.client.get(url, {"organization_name": "Hospital"})
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertIn("results", response.data)
-
- def test_retrieve_nonexistent_uuid(self):
- url = reverse("fhir-practitionerrole-detail",
- args=["12300000-0000-0000-0000-000000000124"])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_nonexistent_npi(self):
- url = reverse("fhir-practitionerrole-detail", args=["999999"])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
-
- def test_retrieve_single_pracitionerrole(self):
- id = "3ac7bd1e-a698-4905-9731-ca650de2dcb0"
- url = reverse("fhir-practitionerrole-detail",
- args=[id])
- response = self.client.get(url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
- self.assertEqual(response.data['id'], id)
-
-
-class CapabilityStatementViewSetTestCase(APITestCase):
- def setUp(self):
- super().setUp()
- self.url = reverse("fhir-metadata")
-
- def test_capability_statement_returns_200(self):
- response = self.client.get(self.url)
- self.assertEqual(response.status_code, status.HTTP_200_OK)
-
- def test_capability_statement_returns_correct_content_type(self):
- response = self.client.get(self.url)
- self.assertEqual(response["Content-Type"], "application/fhir+json")
-
- def test_capability_statement_has_resource_type(self):
- response = self.client.get(self.url)
- self.assertEqual(response.data["resourceType"], "CapabilityStatement")
-
- def test_capability_statement_has_required_fields(self):
- response = self.client.get(self.url)
- data = response.data
-
- self.assertIn("status", data)
- self.assertIn("fhirVersion", data)
- self.assertIn("format", data)
- self.assertIn("rest", data)
-
- def test_capability_statement_is_valid_fhir(self):
- response = self.client.get(self.url)
-
- capability_statement = CapabilityStatement.model_validate(
- response.data)
- self.assertEqual(capability_statement.__resource_type__,
- "CapabilityStatement")
diff --git a/infrastructure/envs/prod/terraform.tfvars b/backend/npdfhir/tests/__init__.py
similarity index 100%
rename from infrastructure/envs/prod/terraform.tfvars
rename to backend/npdfhir/tests/__init__.py
diff --git a/backend/npdfhir/tests/api_test_case.py b/backend/npdfhir/tests/api_test_case.py
new file mode 100644
index 00000000..2b5573fa
--- /dev/null
+++ b/backend/npdfhir/tests/api_test_case.py
@@ -0,0 +1,44 @@
+from django.conf import settings
+from django.contrib.auth.models import User
+from django.db import connection
+from rest_framework.test import APIClient
+from rest_framework.test import APITestCase as DrfAPITestCase
+from structlog import contextvars
+
+from ..cache import cacheData as cacheData
+
+# I can't explain why, but we need to import cacheData here. I think we can
+# remove this once we move to the docker db setup. By using "import thing as
+# thing", we silence "imported but unused" and "not accessed" warnings.
+
+
+class SqlTraceLogger:
+ def __init__(self, testcase: DrfAPITestCase):
+ if settings.SQL_TRACING:
+ contextvars.bind_contextvars(test=testcase.id())
+ connection.force_debug_cursor = True
+
+ def teardown(self):
+ if settings.SQL_TRACING:
+ connection.force_debug_cursor = False
+
+
+class APITestCase(DrfAPITestCase):
+ @classmethod
+ def setUpTestData(cls):
+ cls.user = User.objects.create(username="testuser")
+ cls.user.set_password("nothing")
+ return super().setUpTestData()
+
+ def setUp(self):
+ super().setUp()
+
+ self.client = APIClient()
+ self.client.force_authenticate(user=self.user)
+
+ self.sql_tracer = SqlTraceLogger(self)
+
+ def tearDown(self):
+ super().tearDown()
+
+ self.sql_tracer.teardown()
diff --git a/backend/npdfhir/tests/fixtures.py b/backend/npdfhir/tests/fixtures.py
new file mode 100644
index 00000000..463c1bba
--- /dev/null
+++ b/backend/npdfhir/tests/fixtures.py
@@ -0,0 +1,358 @@
+import uuid
+import datetime
+import random
+
+from ..models import (
+ Individual,
+ IndividualToName,
+ IndividualToAddress,
+ FhirNameUse,
+ FhirAddressUse,
+ FipsState,
+ Npi,
+ Provider,
+ Organization,
+ OrganizationToName,
+ Address,
+ AddressUs,
+ Location,
+ ProviderToOrganization,
+ RelationshipType,
+ ProviderToLocation,
+ ProviderRole,
+ ProviderToTaxonomy,
+ Endpoint,
+ EndpointInstance,
+ EndpointConnectionType,
+ EndpointInstanceToPayload,
+ EndpointType,
+ EnvironmentType,
+ EhrVendor,
+ PayloadType,
+ LegalEntity,
+ OtherIdType,
+ OrganizationToOtherId,
+ OrganizationToTaxonomy,
+ ClinicalOrganization,
+ Nucc,
+)
+
+
+def _ensure_name_use():
+ return FhirNameUse.objects.get_or_create(value="usual")[0]
+
+
+def create_practitioner(
+ first_name="Alice",
+ last_name="Smith",
+ gender="F",
+ birth_date=datetime.date(1990, 1, 1),
+ npi_value=None,
+ practitioner_type=None,
+ location=None,
+ address_use="work",
+):
+ """
+ Creates an Individual, Name (via IndividualToName), Npi, Provider.
+ """
+ individual = Individual.objects.create(
+ id=uuid.uuid4(),
+ gender=gender,
+ birth_date=birth_date,
+ )
+
+ IndividualToName.objects.create(
+ individual=individual,
+ first_name=first_name,
+ last_name=last_name,
+ name_use=_ensure_name_use(),
+ )
+
+ if location:
+ use = FhirAddressUse.objects.get(value=address_use)
+
+ IndividualToAddress.objects.create(
+ individual=individual, address=location.address, address_use=use
+ )
+
+ npi_value = npi_value or random.randint(1000000000, 9999999999)
+
+ npi = Npi.objects.create(
+ npi=npi_value,
+ entity_type_code=1,
+ enumeration_date=datetime.date(2000, 1, 1),
+ last_update_date=datetime.date(2020, 1, 1),
+ )
+
+ provider = Provider.objects.create(
+ npi=npi,
+ individual=individual,
+ )
+
+ if practitioner_type:
+ code = Nucc.objects.get(pk=practitioner_type)
+
+ ProviderToTaxonomy.objects.create(npi=provider, nucc_code=code, id=uuid.uuid4())
+
+ # display name
+ # Nucc
+
+ return provider
+
+
+def create_legal_entity(dba_name="Sample Legal Entity"):
+ legal_entity = LegalEntity.objects.create(ein_id=uuid.uuid4(), dba_name=dba_name)
+
+ return legal_entity
+
+
+def create_other_id_type(name="Sample Other ID"):
+ other_id = OtherIdType.objects.create(value=name)
+
+ return other_id
+
+
+def create_organization(
+ id=None,
+ name="Test Org",
+ parent_id=None,
+ authorized_official_first_name="Alice",
+ authorized_official_last_name="Smith",
+ legal_entity=None,
+ other_id_type=None,
+ npi_value=None,
+ other_id_name="testMBI",
+ other_state_code="NY",
+ other_issuer="New York State Medicaid",
+ organization_type=None,
+):
+ """
+ Creates an Organization + OrganizationToName.
+ """
+ # authorized_official cannot be null → create a dummy individual
+ ind = Individual.objects.create(
+ id=uuid.uuid4(),
+ gender="U",
+ birth_date=datetime.date(1980, 1, 1),
+ )
+
+ IndividualToName.objects.create(
+ individual=ind,
+ first_name=authorized_official_first_name,
+ last_name=authorized_official_last_name,
+ name_use=_ensure_name_use(),
+ )
+
+ if id is None:
+ id = uuid.uuid4()
+
+ org = Organization.objects.create(
+ id=id, authorized_official=ind, ein=legal_entity, parent_id=parent_id
+ )
+
+ if other_id_type or organization_type or npi_value:
+ npi = Npi.objects.create(
+ npi=npi_value or int(str(uuid.uuid4().int)[:10]),
+ entity_type_code=1,
+ enumeration_date=datetime.date(2000, 1, 1),
+ last_update_date=datetime.date(2020, 1, 1),
+ )
+
+ clinical_organization = ClinicalOrganization.objects.create(organization=org, npi=npi)
+
+ if other_id_type:
+ OrganizationToOtherId.objects.create(
+ npi=clinical_organization,
+ other_id=other_id_name,
+ other_id_type=other_id_type,
+ state_code=other_state_code,
+ issuer=other_issuer,
+ )
+
+ if organization_type:
+ code = Nucc.objects.get(pk=organization_type)
+
+ OrganizationToTaxonomy.objects.create(npi=clinical_organization, nucc_code=code)
+
+ OrganizationToName.objects.create(
+ organization=org,
+ name=name,
+ is_primary=True,
+ )
+
+ return org
+
+
+def create_location(
+ organization=None,
+ name="Test Location",
+ city="Albany",
+ state="NY",
+ zipcode="12207",
+ addr_line_1="123 Main St",
+):
+ """
+ Creates AddressUs → Address → Location.
+ """
+ organization = organization or create_organization()
+
+ fips_code = FipsState.objects.get(abbreviation=state)
+ addr_us = AddressUs.objects.create(
+ id=random.randint(-100000000000, 100000000000),
+ delivery_line_1=addr_line_1,
+ city_name=city,
+ state_code_id=fips_code.id,
+ zipcode=zipcode,
+ )
+
+ address = Address.objects.create(
+ id=uuid.uuid4(),
+ address_us=addr_us,
+ )
+
+ loc = Location.objects.create(
+ id=uuid.uuid4(),
+ name=name,
+ organization=organization,
+ address=address,
+ active=True,
+ )
+
+ return loc
+
+
+def _ensure_endpoint_base_types():
+ """
+ Flyway inserts some reference values, but ensure minimal ones exist.
+ """
+ etype, _ = EndpointType.objects.get_or_create(value="rest")
+ ctype, _ = EndpointConnectionType.objects.get_or_create(
+ id="hl7-fhir-rest",
+ defaults={"display": "FHIR REST", "definition": "FHIR REST endpoint"},
+ )
+ payload, _ = PayloadType.objects.get_or_create(
+ id="fhir-json",
+ defaults={"value": "application/fhir+json", "description": "FHIR JSON"},
+ )
+ return etype, ctype, payload
+
+
+def create_endpoint(
+ organization=None,
+ url="https://example.org/fhir",
+ name="Test Endpoint",
+ ehr=None,
+ payload_type=None,
+):
+ """
+ Creates EndpointType, EndpointConnectionType, EndpointInstance, Endpoint.
+ """
+ organization = organization or create_organization()
+
+ etype, ctype, payload = _ensure_endpoint_base_types()
+
+ if not ehr:
+ new_vendor_id = uuid.uuid4()
+ ehr_vendor = EhrVendor.objects.create(
+ id=new_vendor_id, name=f"My Sample{new_vendor_id}", is_cms_aligned_network=True
+ )
+ else:
+ ehr_vendor = ehr
+
+ et = EnvironmentType.objects.get(pk="prod")
+
+ pt = PayloadType.objects.get(pk=payload_type or "urn:hl7-org:sdwg:ccda-structuredBody:1.1")
+
+ instance = EndpointInstance.objects.create(
+ id=uuid.uuid4(),
+ ehr_vendor_id=ehr_vendor.id,
+ address=url,
+ endpoint_connection_type=ctype,
+ name=name,
+ environment_type=et,
+ )
+
+ EndpointInstanceToPayload.objects.create(endpoint_instance=instance, payload_type=pt)
+
+ ep = Endpoint.objects.create(
+ id=uuid.uuid4(),
+ address=url,
+ endpoint_type=etype,
+ endpoint_instance=instance,
+ name=name,
+ )
+
+ return ep
+
+
+def _ensure_relationship_type():
+ """
+ Retrieve an existing relationship_type inserted by Flyway.
+ Default: 'assigning' (id=2)
+ """
+ try:
+ return RelationshipType.objects.get(value="assigning")
+ except RelationshipType.DoesNotExist:
+ # If Flyway hasn’t run (edge/dev case), create one safely
+ return RelationshipType.objects.create(value="assigning")
+
+
+def _ensure_provider_role(code="PRV", display="Provider Role"):
+ return ProviderRole.objects.get_or_create(
+ code=code,
+ defaults={
+ "system": "http://hl7.org/fhir/practitionerrole",
+ "display": display,
+ },
+ )[0]
+
+
+def create_full_practitionerrole(
+ first_name="Alice",
+ last_name="Smith",
+ gender="F",
+ npi_value=None,
+ org_name="Test Org",
+ location_name="Test Location",
+ role_code="PRV",
+ role_display="Provider Role",
+):
+ """
+ Creates:
+ Practitioner (Provider)
+ Organization
+ Location
+ ProviderToOrganization
+ ProviderToLocation
+ """
+ provider = create_practitioner(
+ first_name=first_name,
+ last_name=last_name,
+ gender=gender,
+ npi_value=npi_value,
+ )
+
+ org = create_organization(name=org_name)
+ loc = create_location(organization=org, name=location_name)
+
+ # Ensure relationship + role codes exist
+ rel_type = _ensure_relationship_type()
+ _ensure_provider_role(role_code, role_display)
+
+ pto_org = ProviderToOrganization.objects.create(
+ id=uuid.uuid4(),
+ individual=provider, # special FK uses Provider.individual_id
+ organization=org,
+ relationship_type=rel_type,
+ active=True,
+ )
+
+ pr = ProviderToLocation.objects.create(
+ id=uuid.uuid4(),
+ provider_to_organization=pto_org,
+ location=loc,
+ provider_role_code=role_code,
+ active=True,
+ )
+
+ return pr
diff --git a/backend/npdfhir/tests/fixtures/__init__.py b/backend/npdfhir/tests/fixtures/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/backend/npdfhir/tests/fixtures/endpoint.py b/backend/npdfhir/tests/fixtures/endpoint.py
new file mode 100644
index 00000000..23be4c6b
--- /dev/null
+++ b/backend/npdfhir/tests/fixtures/endpoint.py
@@ -0,0 +1,74 @@
+import uuid
+
+from ...models import (
+ EhrVendor,
+ Endpoint,
+ EndpointConnectionType,
+ EndpointInstance,
+ EndpointInstanceToPayload,
+ EndpointType,
+ PayloadType,
+)
+from .organization import create_organization
+
+
+def _ensure_endpoint_base_types():
+ """
+ Flyway inserts some reference values, but ensure minimal ones exist.
+ """
+ etype, _ = EndpointType.objects.get_or_create(value="rest")
+ ctype, _ = EndpointConnectionType.objects.get_or_create(
+ id="hl7-fhir-rest",
+ defaults={"display": "FHIR REST", "definition": "FHIR REST endpoint"},
+ )
+ payload, _ = PayloadType.objects.get_or_create(
+ id="fhir-json",
+ defaults={"value": "application/fhir+json", "description": "FHIR JSON"},
+ )
+ return etype, ctype, payload
+
+
+def create_endpoint(
+ organization=None,
+ url="https://example.org/fhir",
+ name="Test Endpoint",
+ ehr=None,
+ payload_type=None,
+):
+ """
+ Creates EndpointType, EndpointConnectionType, EndpointInstance, Endpoint.
+ """
+ organization = organization or create_organization()
+
+ etype, ctype, payload = _ensure_endpoint_base_types()
+
+ if not ehr:
+ new_vendor_id = uuid.uuid4()
+ ehr_vendor = EhrVendor.objects.create(
+ id=new_vendor_id, name=f"My Sample{new_vendor_id}", is_cms_aligned_network=True
+ )
+ else:
+ ehr_vendor = ehr
+
+ pt = PayloadType.objects.get(pk=payload_type or "urn:hl7-org:sdwg:ccda-structuredBody:1.1")
+
+ instance = EndpointInstance.objects.create(
+ id=uuid.uuid4(),
+ ehr_vendor_id=ehr_vendor.id,
+ address=url,
+ endpoint_connection_type=ctype,
+ name=name,
+ environment_type_id="prod",
+ )
+
+ EndpointInstanceToPayload.objects.create(endpoint_instance=instance, payload_type=pt)
+
+ ep = Endpoint.objects.create(
+ id=uuid.uuid4(),
+ address=url,
+ endpoint_type=etype,
+ endpoint_instance=instance,
+ name=name,
+ )
+
+ return ep
diff --git a/backend/npdfhir/tests/fixtures/location.py b/backend/npdfhir/tests/fixtures/location.py
new file mode 100644
index 00000000..0991a625
--- /dev/null
+++ b/backend/npdfhir/tests/fixtures/location.py
@@ -0,0 +1,53 @@
+import random
+import uuid
+
+from ...models import Address, AddressUs, FipsState, Location
+from .organization import create_organization
+
+
+def create_address(
+ city="Albany",
+ state="NY",
+ zipcode="12207",
+ addr_line_1="123 Main St",
+):
+ fips_code = FipsState.objects.get(abbreviation=state)
+ addr_us = AddressUs.objects.create(
+ id=random.randint(-100000000000, 100000000000),
+ delivery_line_1=addr_line_1,
+ city_name=city,
+ state_code_id=fips_code.id,
+ zipcode=zipcode,
+ )
+
+ address = Address.objects.create(
+ id=uuid.uuid4(),
+ address_us=addr_us,
+ )
+
+ return address
+
+
+def create_location(
+ organization=None,
+ name="Test Location",
+ city="Albany",
+ state="NY",
+ zipcode="12207",
+ addr_line_1="123 Main St",
+):
+ """
+ Creates AddressUs → Address → Location.
+ """
+ organization = organization or create_organization()
+ address = create_address(city=city, state=state, zipcode=zipcode, addr_line_1=addr_line_1)
+
+ loc = Location.objects.create(
+ id=uuid.uuid4(),
+ name=name,
+ organization=organization,
+ address=address,
+ active=True,
+ )
+
+ return loc
diff --git a/backend/npdfhir/tests/fixtures/organization.py b/backend/npdfhir/tests/fixtures/organization.py
new file mode 100644
index 00000000..33bf1853
--- /dev/null
+++ b/backend/npdfhir/tests/fixtures/organization.py
@@ -0,0 +1,93 @@
+import datetime
+import uuid
+
+from ...models import (
+ ClinicalOrganization,
+ Individual,
+ IndividualToName,
+ LegalEntity,
+ Npi,
+ Nucc,
+ Organization,
+ OrganizationToName,
+ OrganizationToOtherId,
+ OrganizationToTaxonomy,
+)
+from .utils import _ensure_name_use
+
+
+def create_legal_entity(dba_name="Sample Legal Entity"):
+ legal_entity = LegalEntity.objects.create(ein_id=uuid.uuid4(), dba_name=dba_name)
+
+ return legal_entity
+
+
+def create_organization(
+ id=None,
+ name="Test Org",
+ parent_id=None,
+ authorized_official_first_name="Alice",
+ authorized_official_last_name="Smith",
+ legal_entity=None,
+ other_id_type=None,
+ npi_value=None,
+ other_id_name="testMBI",
+ other_state_code="NY",
+ other_issuer="New York State Medicaid",
+ organization_type=None,
+):
+ """
+ Creates an Organization + OrganizationToName.
+ """
+ # authorized_official cannot be null → create a dummy individual
+ ind = Individual.objects.create(
+ id=uuid.uuid4(),
+ gender="U",
+ birth_date=datetime.date(1980, 1, 1),
+ )
+
+ IndividualToName.objects.create(
+ individual=ind,
+ first_name=authorized_official_first_name,
+ last_name=authorized_official_last_name,
+ name_use=_ensure_name_use(),
+ )
+
+ if id is None:
+ id = uuid.uuid4()
+
+ org = Organization.objects.create(
+ id=id, authorized_official=ind, ein=legal_entity, parent_id=parent_id
+ )
+
+ if other_id_type or organization_type or npi_value:
+ npi = Npi.objects.create(
+ npi=npi_value or int(str(uuid.uuid4().int)[:10]),
+ entity_type_code=1,
+ enumeration_date=datetime.date(2000, 1, 1),
+ last_update_date=datetime.date(2020, 1, 1),
+ )
+
+ clinical_organization = ClinicalOrganization.objects.create(organization=org, npi=npi)
+
+ if other_id_type:
+ OrganizationToOtherId.objects.create(
+ npi=clinical_organization,
+ other_id=other_id_name,
+ other_id_type=other_id_type,
+ state_code=other_state_code,
+ issuer=other_issuer,
+ )
+
+ if organization_type:
+ code = Nucc.objects.get(pk=organization_type)
+
+ OrganizationToTaxonomy.objects.create(npi=clinical_organization, nucc_code=code)
+
+ OrganizationToName.objects.create(
+ organization=org,
+ name=name,
+ is_primary=True,
+ )
+
+ return org
diff --git a/backend/npdfhir/tests/fixtures/practitioner.py b/backend/npdfhir/tests/fixtures/practitioner.py
new file mode 100644
index 00000000..1dbef016
--- /dev/null
+++ b/backend/npdfhir/tests/fixtures/practitioner.py
@@ -0,0 +1,170 @@
+import datetime
+import random
+import uuid
+
+from ...models import (
+ FhirAddressUse,
+ FipsState,
+ Individual,
+ IndividualToAddress,
+ IndividualToName,
+ Npi,
+ Nucc,
+ OtherIdType,
+ Provider,
+ ProviderRole,
+ ProviderToLocation,
+ ProviderToOrganization,
+ ProviderToOtherId,
+ ProviderToTaxonomy,
+ RelationshipType,
+)
+from .location import create_location
+from .organization import create_organization
+from .utils import _ensure_name_use
+
+
+def _ensure_provider_role(code="PRV", display="Provider Role"):
+ return ProviderRole.objects.get_or_create(
+ code=code,
+ defaults={
+ "system": "http://hl7.org/fhir/practitionerrole",
+ "display": display,
+ },
+ )[0]
+
+
+def _ensure_relationship_type():
+ """
+ Retrieve an existing relationship_type inserted by Flyway.
+ Default: 'assigning' (id=2)
+ """
+ try:
+ return RelationshipType.objects.get(value="assigning")
+ except RelationshipType.DoesNotExist:
+ # If Flyway hasn’t run (edge/dev case), create one safely
+ return RelationshipType.objects.create(value="assigning")
+
+
+def create_practitioner(
+ first_name="Alice",
+ last_name="Smith",
+ gender="F",
+ birth_date=datetime.date(1990, 1, 1),
+ npi_value=None,
+ other_id=None,
+ other_id_type=None,
+ state=None,
+ practitioner_types=None,
+ location=None,
+ address_use="work",
+):
+ """
+ Creates an Individual, Name (via IndividualToName), Npi, Provider.
+ """
+ individual = Individual.objects.create(
+ id=uuid.uuid4(),
+ gender=gender,
+ birth_date=birth_date,
+ )
+
+ IndividualToName.objects.create(
+ individual=individual,
+ first_name=first_name,
+ last_name=last_name,
+ name_use=_ensure_name_use(),
+ )
+
+ if location:
+ use = FhirAddressUse.objects.get(value=address_use)
+
+ IndividualToAddress.objects.create(
+ individual=individual, address=location.address, address_use=use
+ )
+
+ npi_value = npi_value or random.randint(1000000000, 9999999999)
+
+ npi = Npi.objects.create(
+ npi=npi_value,
+ entity_type_code=1,
+ enumeration_date=datetime.date(2000, 1, 1),
+ last_update_date=datetime.date(2020, 1, 1),
+ )
+
+ provider = Provider.objects.create(
+ npi=npi,
+ individual=individual,
+ )
+
+ if other_id:
+ other_id_type = OtherIdType.objects.get(value=(other_id_type or "OTHER"))
+ fips_code = FipsState.objects.get(abbreviation=(state or "NY"))
+ ProviderToOtherId.objects.create(
+ npi=provider,
+ other_id=other_id,
+ other_id_type=other_id_type,
+ state_code=fips_code,
+ issuer="TEST",
+ )
+
+ if practitioner_types:
+ for type in practitioner_types:
+ code = Nucc.objects.get(pk=type)
+
+ ProviderToTaxonomy.objects.create(npi=provider, nucc_code=code, id=uuid.uuid4())
+
+ # display name
+ # Nucc
+
+ return provider
+
+
+def create_full_practitionerrole(
+ first_name="Alice",
+ last_name="Smith",
+ gender="F",
+ npi_value=None,
+ org_name="Test Org",
+ location_name="Test Location",
+ role_code="PRV",
+ role_display="Provider Role",
+):
+ """
+ Creates:
+ Practitioner (Provider)
+ Organization
+ Location
+ ProviderToOrganization
+ ProviderToLocation
+ """
+ provider = create_practitioner(
+ first_name=first_name,
+ last_name=last_name,
+ gender=gender,
+ npi_value=npi_value,
+ )
+
+ org = create_organization(name=org_name)
+ loc = create_location(organization=org, name=location_name)
+
+ # Ensure relationship + role codes exist
+ rel_type = _ensure_relationship_type()
+ _ensure_provider_role(role_code, role_display)
+
+ pto_org = ProviderToOrganization.objects.create(
+ id=uuid.uuid4(),
+ individual=provider, # special FK uses Provider.individual_id
+ organization=org,
+ relationship_type=rel_type,
+ active=True,
+ )
+
+ pr = ProviderToLocation.objects.create(
+ id=uuid.uuid4(),
+ provider_to_organization=pto_org,
+ location=loc,
+ provider_role_code=role_code,
+ active=True,
+ )
+
+ return pr
diff --git a/backend/npdfhir/tests/fixtures/utils.py b/backend/npdfhir/tests/fixtures/utils.py
new file mode 100644
index 00000000..dfb8f1c0
--- /dev/null
+++ b/backend/npdfhir/tests/fixtures/utils.py
@@ -0,0 +1,5 @@
+from ...models import FhirNameUse
+
+
+def _ensure_name_use():
+ return FhirNameUse.objects.get_or_create(value="usual")[0]
diff --git a/backend/npdfhir/tests/helpers.py b/backend/npdfhir/tests/helpers.py
new file mode 100644
index 00000000..b4ae736f
--- /dev/null
+++ b/backend/npdfhir/tests/helpers.py
@@ -0,0 +1,51 @@
+from django.db import connection
+
+
+# Database query helpers
+def get_female_npis(npi_list):
+ """
+ Given a list of NPI numbers, return the subset that are female.
+ """
+ query = """
+ SELECT p.npi, i.gender
+ FROM npd.provider p
+ JOIN npd.individual i ON p.individual_id = i.id
+ WHERE p.npi = ANY(%s)
+ AND i.gender = 'F'
+ """
+ with connection.cursor() as cursor:
+ cursor.execute(query, [npi_list])
+ results = cursor.fetchall()
+
+ return results
+
+
+# FHIR response assertion helpers
+def assert_fhir_response(test_case, response, expected_status=200):
+ test_case.assertEqual(response.status_code, expected_status)
+ test_case.assertEqual(response["Content-Type"], "application/fhir+json")
+
+
+def assert_has_results(test_case, response):
+ test_case.assertIn("results", response.data)
+
+
+def assert_pagination_limit(test_case, response, max_size=100):
+ bundle = response.data["results"]
+ test_case.assertLessEqual(len(bundle["entry"]), max_size)
+
+
+# Data extraction helpers
+def extract_resource_names(response):
+ return [d["resource"].get("name", {}) for d in response.data["results"]["entry"]]
+
+
+def extract_practitioner_names(response):
+ return [
+ (d["resource"]["name"][-1].get("family", {}), d["resource"]["name"][-1]["given"][0])
+ for d in response.data["results"]["entry"]
+ ]
+
+
+def extract_resource_ids(response):
+ return [d["resource"].get("id", {}) for d in response.data["results"]["entry"]]
diff --git a/backend/npdfhir/tests/test_basic_views.py b/backend/npdfhir/tests/test_basic_views.py
new file mode 100644
index 00000000..015eedb5
--- /dev/null
+++ b/backend/npdfhir/tests/test_basic_views.py
@@ -0,0 +1,21 @@
+from django.urls import reverse
+from rest_framework import status
+
+from .api_test_case import APITestCase
+
+
+class BasicViewsTestCase(APITestCase):
+ def test_health_view(self):
+ url = reverse("healthCheck") # maps to "/healthCheck"
+ response = self.client.get(url)
+ res_obj = response.json()
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(res_obj["status"], "healthy")
+
+ def test_fhir_endpoint_list_without_slash(self):
+ response = self.client.get("/fhir")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_fhir_endpoint_list_with_slash(self):
+ response = self.client.get("/fhir/")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
diff --git a/backend/npdfhir/tests/test_capability_statement.py b/backend/npdfhir/tests/test_capability_statement.py
new file mode 100644
index 00000000..f22c43b5
--- /dev/null
+++ b/backend/npdfhir/tests/test_capability_statement.py
@@ -0,0 +1,41 @@
+from django.urls import reverse
+from fhir.resources.R4B.capabilitystatement import CapabilityStatement
+from rest_framework import status
+
+from .api_test_case import APITestCase
+
+
+class CapabilityStatementViewSetTestCase(APITestCase):
+ def setUp(self):
+ super().setUp()
+ self.url = reverse("fhir-metadata")
+
+ # Response tests
+ def test_capability_statement_returns_200(self):
+ response = self.client.get(self.url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ def test_capability_statement_returns_correct_content_type(self):
+ response = self.client.get(self.url)
+ self.assertEqual(response["Content-Type"], "application/fhir+json")
+
+ # Content tests
+ def test_capability_statement_has_resource_type(self):
+ response = self.client.get(self.url)
+ self.assertEqual(response.data["resourceType"], "CapabilityStatement")
+
+ def test_capability_statement_has_required_fields(self):
+ response = self.client.get(self.url)
+ data = response.data
+
+ self.assertIn("status", data)
+ self.assertIn("fhirVersion", data)
+ self.assertIn("format", data)
+ self.assertIn("rest", data)
+
+ # Validation tests
+ def test_capability_statement_is_valid_fhir(self):
+ response = self.client.get(self.url)
+
+ capability_statement = CapabilityStatement.model_validate(response.data)
+ self.assertEqual(capability_statement.__resource_type__, "CapabilityStatement")
diff --git a/backend/npdfhir/tests/test_documentation.py b/backend/npdfhir/tests/test_documentation.py
new file mode 100644
index 00000000..7d4a36d1
--- /dev/null
+++ b/backend/npdfhir/tests/test_documentation.py
@@ -0,0 +1,28 @@
+from django.urls import reverse
+from rest_framework import status
+
+from .api_test_case import APITestCase
+
+
+class DocumentationViewSetTestCase(APITestCase):
+ def test_get_swagger_docs(self):
+ swagger_url = reverse("schema-swagger-ui")
+ response = self.client.get(swagger_url)
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertIn('id="swagger-ui"', response.text)
+
+ def test_get_redoc_docs(self):
+ redoc_url = reverse("schema-redoc")
+ response = self.client.get(redoc_url)
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertIn("redoc spec-url", response.text)
+
+ def test_get_json_docs(self):
+ json_docs_url = reverse("schema")
+ response = self.client.get(json_docs_url)
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertIn("application/vnd.oai.openapi+json", response["Content-Type"])
+ self.assertIn("openapi", response.data.keys())
diff --git a/backend/npdfhir/tests/test_endpoint.py b/backend/npdfhir/tests/test_endpoint.py
new file mode 100644
index 00000000..d7c50c3e
--- /dev/null
+++ b/backend/npdfhir/tests/test_endpoint.py
@@ -0,0 +1,202 @@
+from django.urls import reverse
+from fhir.resources.R4B.bundle import Bundle
+from rest_framework import status
+
+from .api_test_case import APITestCase
+from .fixtures.endpoint import create_endpoint
+from .helpers import (
+ assert_fhir_response,
+ assert_has_results,
+ assert_pagination_limit,
+ extract_resource_names,
+)
+
+
+class EndpointViewSetTestCase(APITestCase):
+ @classmethod
+ def setUpTestData(cls):
+ cls.endpoints = [
+ create_endpoint(name="88 MEDICINE LLC"),
+ create_endpoint(name="AAIA of Tampa Bay, LLC"),
+ create_endpoint(name="ABC Healthcare Service Base URL"),
+ create_endpoint(name="A Better Way LLC"),
+ create_endpoint(name="Abington Surgical Center"),
+ create_endpoint(name="Access Mental Health Agency"),
+ create_endpoint(name="Abington Center Surgical"),
+ create_endpoint(name="ADHD & Autism Psychological Services PLLC"),
+ create_endpoint(name="Adolfo C FernandezObregon Md"),
+ create_endpoint(name="Advanced Anesthesia, LLC"),
+ create_endpoint(name="Advanced Cardiovascular Center"),
+ create_endpoint(name="Kansas City Psychiatric Group"),
+ ]
+
+ return super().setUpTestData()
+
+ # Basic tests
+ def setUp(self):
+ super().setUp()
+ self.list_url = reverse("fhir-endpoint-list")
+
+ def test_list_default(self):
+ response = self.client.get(self.list_url)
+
+ assert_fhir_response(self, response)
+ assert_has_results(self, response)
+
+ # Sorting tests
+ def test_list_in_default_order(self):
+ url = self.list_url
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+
+ # print(response.data["results"]["entry"][0]['resource']['name'])
+
+ # Extract names
+ # Note: have to normalize the names to have python sorting match sql
+ names = extract_resource_names(response)
+
+ sorted_names = [
+ "88 MEDICINE LLC",
+ "AAIA of Tampa Bay, LLC",
+ "ABC Healthcare Service Base URL",
+ "A Better Way LLC",
+ "Abington Center Surgical",
+ "Abington Surgical Center",
+ "Access Mental Health Agency",
+ "ADHD & Autism Psychological Services PLLC",
+ "Adolfo C FernandezObregon Md",
+ "Advanced Anesthesia, LLC",
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected endpoints list sorted by name but got {names}\n Sorted: {sorted_names}",
+ )
+
+ # Bundle Validation tests
+ def test_list_returns_fhir_bundle(self):
+ response = self.client.get(self.list_url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ data = response.json()
+ bundle = Bundle.model_validate(data["results"])
+
+ self.assertEqual(bundle.__resource_type__, "Bundle")
+
+ def test_list_entries_are_fhir_endpoints(self):
+ response = self.client.get(self.list_url)
+
+ bundle = response.data["results"]
+ self.assertGreater(len(bundle["entry"]), 0)
+
+ first_entry = bundle["entry"][0]
+ self.assertIn("resource", first_entry)
+
+ endpoint_resource = first_entry["resource"]
+ self.assertEqual(endpoint_resource["resourceType"], "Endpoint")
+ self.assertIn("id", endpoint_resource)
+ self.assertIn("status", endpoint_resource)
+ self.assertIn("connectionType", endpoint_resource)
+ self.assertIn("address", endpoint_resource)
+
+ # Pagination tests
+ def test_pagination_custom_page_size(self):
+ response = self.client.get(self.list_url, {"page_size": 2})
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ bundle = response.data["results"]
+ self.assertLessEqual(len(bundle["entry"]), 2)
+
+ def test_pagination_enforces_maximum(self):
+ response = self.client.get(self.list_url, {"page_size": 5000})
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_pagination_limit(self, response)
+
+ # Filter tests
+ def test_filter_by_name(self):
+ response = self.client.get(self.list_url, {"name": "Kansas City Psychiatric Group"})
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ bundle = response.data["results"]
+
+ self.assertGreater(len(bundle["entry"]), 0)
+
+ first_endpoint = bundle["entry"][0]["resource"]
+
+ self.assertIn("name", first_endpoint)
+ self.assertIn("Kansas City", first_endpoint["name"])
+
+ def test_filter_by_connection_type(self):
+ connection_type = "hl7-fhir-rest"
+ response = self.client.get(self.list_url, {"endpoint_connection_type": connection_type})
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ bundle = response.data["results"]
+
+ entries = bundle.get("entry", [])
+ self.assertGreater(len(entries), 0)
+
+ first_endpoint = entries[0]["resource"]
+ self.assertIn("connectionType", first_endpoint)
+
+ code = first_endpoint["connectionType"]["code"]
+ self.assertEqual(connection_type, code)
+
+ def test_filter_by_payload_type(self):
+ payload_type = "ccda-structuredBody:1.1"
+ response = self.client.get(self.list_url, {"payload_type": payload_type})
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ bundle = response.data["results"]
+
+ entries = bundle.get("entry", [])
+ self.assertGreater(len(entries), 0)
+
+ first_endpoint = entries[0]["resource"]
+ self.assertIn("payloadType", first_endpoint)
+
+ code = first_endpoint["payloadType"][0]["coding"][0]["display"]
+ self.assertEqual(payload_type, code)
+
+ def test_filter_returns_empty_for_nonexistent_name(self):
+ response = self.client.get(self.list_url, {"name": "NonexistentEndpointName12345"})
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ bundle = response.data["results"]
+ self.assertEqual(len(bundle["entry"]), 0)
+
+ # Retrieve tests
+ def test_retrieve_specific_endpoint(self):
+ list_response = self.client.get(self.list_url, {"page_size": 1})
+ first_endpoint = list_response.data["results"]["entry"][0]["resource"]
+
+ endpoint_id = first_endpoint["id"]
+ detail_url = reverse("fhir-endpoint-detail", args=[endpoint_id])
+
+ response = self.client.get(detail_url)
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ endpoint = response.data
+ self.assertEqual(endpoint["resourceType"], "Endpoint")
+ self.assertEqual(endpoint["id"], endpoint_id)
+ self.assertIn("status", endpoint)
+ self.assertIn("connectionType", endpoint)
+ self.assertIn("address", endpoint)
+
+ def test_retrieve_nonexistent_endpoint(self):
+ detail_url = reverse("fhir-endpoint-detail", args=["12300000-0000-0000-0000-000000000123"])
+ response = self.client.get(detail_url)
+
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_single_endpoint(self):
+ id = self.endpoints[0].endpoint_instance.id
+ url = reverse("fhir-endpoint-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data["id"], str(id))
diff --git a/backend/npdfhir/tests/test_location.py b/backend/npdfhir/tests/test_location.py
new file mode 100644
index 00000000..87052be5
--- /dev/null
+++ b/backend/npdfhir/tests/test_location.py
@@ -0,0 +1,225 @@
+from django.urls import reverse
+from rest_framework import status
+
+from .api_test_case import APITestCase
+from .fixtures.location import create_location
+from .fixtures.organization import create_organization
+from .helpers import (
+ assert_fhir_response,
+ assert_has_results,
+ assert_pagination_limit,
+ extract_resource_names,
+)
+
+
+class LocationViewSetTestCase(APITestCase):
+ @classmethod
+ def setUpTestData(cls):
+ cls.orgs = [
+ create_organization(name="Alpha Org"),
+ create_organization(name="Beta Org"),
+ ]
+
+ cls.locs = [
+ create_location(name="Main Clinic", organization=cls.orgs[0]),
+ create_location(name="1ST CHOICE MEDICAL DISTRIBUTORS, LLC", organization=cls.orgs[0]),
+ create_location(name="986 INFUSION PHARMACY #1 INC.", organization=cls.orgs[1]),
+ create_location(name="A & A MEDICAL SUPPLY COMPANY", organization=cls.orgs[1]),
+ create_location(
+ name="ABACUS BUSINESS CORPORATION GROUP INC.", organization=cls.orgs[0]
+ ),
+ create_location(name="ABBY D CENTER, INC.", organization=cls.orgs[1]),
+ create_location(name="ABC DURABLE MEDICAL EQUIPMENT INC", organization=cls.orgs[0]),
+ create_location(name="ABC HOME MEDICAL SUPPLY, INC.", organization=cls.orgs[0]),
+ create_location(name="A BEAUTIFUL SMILE DENTISTRY, L.L.C.", organization=cls.orgs[0]),
+ create_location(name="A & B HEALTH CARE, INC.", organization=cls.orgs[0]),
+ create_location(name="ABILENE HELPING HANDS INC", organization=cls.orgs[0]),
+ create_location(name="ZEELAND COMMUNITY HOSPITAL", organization=cls.orgs[0]),
+ create_location(name="YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD", organization=cls.orgs[0]),
+ create_location(name="YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD", organization=cls.orgs[1]),
+ create_location(name="YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD", organization=cls.orgs[1]),
+ create_location(name="YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD", organization=cls.orgs[1]),
+ create_location(name="YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD", organization=cls.orgs[1]),
+ create_location(name="YOUNG C. BAE, M.D."),
+ create_location(name="YORKTOWN EMERGENCY MEDICAL SERVICE"),
+ create_location(name="YODORINCMISSIONPLAZAPHARMACY", organization=cls.orgs[0]),
+ create_location(name="YOAKUM COMMUNITY HOSPITAL", organization=cls.orgs[0]),
+ create_location(
+ name="FROEDTERT MEMORIAL LUTHERAN HOSPITAL, INC.", organization=cls.orgs[1]
+ ),
+ create_location(name="AMBER ENTERPRISES INC.", organization=cls.orgs[0]),
+ create_location(name="COUNTY OF KOOCHICHING", organization=cls.orgs[0]),
+ create_location(name="OCEAN HOME HEALTH SUPPLY, LLC", organization=cls.orgs[0]),
+ create_location(name="PULMONARY MANAGEMENT, INC.", organization=cls.orgs[0]),
+ create_location(name="MEDICATION MANAGEMENT CENTER, LLC.", organization=cls.orgs[1]),
+ create_location(name="HENDRICKS COUNTY HOSPITAL", organization=cls.orgs[1]),
+ create_location(name="BAY AREA REHABILITATION MEDICAL GROUP", organization=cls.orgs[1]),
+ create_location(name="PROHAB REHABILITATION SERVICES, INC.", organization=cls.orgs[1]),
+ ]
+ return super().setUpTestData()
+
+ # Basic tests
+ def test_list_default(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+ assert_has_results(self, response)
+
+ # Sorting tests
+ def test_list_in_default_order(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+
+ # print(response.data["results"]["entry"][0]['resource']['name'])
+
+ # Extract names
+ names = extract_resource_names(response)
+
+ sorted_names = [
+ "1ST CHOICE MEDICAL DISTRIBUTORS, LLC",
+ "986 INFUSION PHARMACY #1 INC.",
+ "A & A MEDICAL SUPPLY COMPANY",
+ "ABACUS BUSINESS CORPORATION GROUP INC.",
+ "ABBY D CENTER, INC.",
+ "ABC DURABLE MEDICAL EQUIPMENT INC",
+ "ABC HOME MEDICAL SUPPLY, INC.",
+ "A BEAUTIFUL SMILE DENTISTRY, L.L.C.",
+ "A & B HEALTH CARE, INC.",
+ "ABILENE HELPING HANDS INC",
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected fhir locations sorted by name but got {names}\n Sorted: {sorted_names}",
+ )
+
+ def test_list_in_descending_order(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"_sort": "-name"})
+ assert_fhir_response(self, response)
+
+ # Extract names
+ # Note: have to normalize the names to have python sorting match sql
+ names = extract_resource_names(response)
+
+ sorted_names = [
+ "ZEELAND COMMUNITY HOSPITAL",
+ "YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD",
+ "YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD",
+ "YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD",
+ "YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD",
+ "YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD",
+ "YOUNG C. BAE, M.D.",
+ "YORKTOWN EMERGENCY MEDICAL SERVICE",
+ "YODORINCMISSIONPLAZAPHARMACY",
+ "YOAKUM COMMUNITY HOSPITAL",
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected locations list sorted by name in descending but got {names}\n Sorted: {sorted_names}",
+ )
+
+ def test_list_in_order_by_address(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"_sort": "address_full,name"})
+ assert_fhir_response(self, response)
+
+ # Extract names
+ # Note: have to normalize the names to have python sorting match sql
+ names = extract_resource_names(response)
+
+ # Names correspond to following addresses
+ # 10000 W Bluemound Rd, Wauwatosa, WI 53226
+ # 10004 S 152nd St, Omaha, NE 68138
+ # 1000 5th St, International Falls, MN 56649
+ # 1000 Airport Rd, Lakewood, NJ 8701
+ # 1000 E Center St, Kingsport, TN 37660
+ # 1000 E Main St, Danville, IN 46122
+ # 1000 Greenley Rd, Sonora, CA 95370
+ # 1000 Regency Ct, Toledo, OH 43623
+
+ sorted_names = [
+ "1ST CHOICE MEDICAL DISTRIBUTORS, LLC",
+ "986 INFUSION PHARMACY #1 INC.",
+ "A & A MEDICAL SUPPLY COMPANY",
+ "ABACUS BUSINESS CORPORATION GROUP INC.",
+ "ABBY D CENTER, INC.",
+ "ABC DURABLE MEDICAL EQUIPMENT INC",
+ "ABC HOME MEDICAL SUPPLY, INC.",
+ "A BEAUTIFUL SMILE DENTISTRY, L.L.C.",
+ "A & B HEALTH CARE, INC.",
+ "ABILENE HELPING HANDS INC",
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected locations list sorted by address ascending but got {names}\n Sorted: {sorted_names}",
+ )
+
+ # Pagination tests
+ def test_list_with_custom_page_size(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"page_size": 2})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertLessEqual(len(response.data["results"]["entry"]), 2)
+
+ def test_list_with_greater_than_max_page_size(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"page_size": 1001})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_pagination_limit(self, response)
+
+ # Filter tests
+ def test_list_filter_by_name(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"name": "Cumberland"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"address": "Avenue"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_city(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"address_city": "Seattle"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_state(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"address_state": "TX"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_postalcode(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"address_postalcode": "90210"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_use(self):
+ url = reverse("fhir-location-list")
+ response = self.client.get(url, {"address_use": "work"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ # Retrieve tests
+ def test_retrieve_nonexistent(self):
+ url = reverse("fhir-location-detail", args=["00000000-0000-0000-0000-000000000000"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_single_location(self):
+ id = self.locs[0].id
+ url = reverse("fhir-location-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data["id"], str(id))
diff --git a/backend/npdfhir/tests/test_organization.py b/backend/npdfhir/tests/test_organization.py
new file mode 100644
index 00000000..2523f3fb
--- /dev/null
+++ b/backend/npdfhir/tests/test_organization.py
@@ -0,0 +1,293 @@
+from django.urls import reverse
+from rest_framework import status
+
+from ..models import Organization, OtherIdType
+from .api_test_case import APITestCase
+from .fixtures.organization import create_legal_entity, create_organization
+from .helpers import (
+ assert_fhir_response,
+ assert_has_results,
+ assert_pagination_limit,
+ extract_resource_names,
+)
+
+
+class OrganizationViewSetTestCase(APITestCase):
+ @classmethod
+ def setUpTestData(cls):
+ cls.orgs = [
+ create_organization(
+ name="1ST CHOICE HOME HEALTH CARE INC", id="c591bfc5-b4ed-49af-926f-569056b5b1aa"
+ ),
+ create_organization(
+ name="1ST CHOICE MEDICAL DISTRIBUTORS, LLC",
+ id="5f56f3f0-3bd6-42ce-b275-f12f92a4ba40",
+ parent_id="c591bfc5-b4ed-49af-926f-569056b5b1aa",
+ ),
+ create_organization(name="986 INFUSION PHARMACY #1 INC."),
+ create_organization(name="A & A MEDICAL SUPPLY COMPANY"),
+ create_organization(name="ABACUS BUSINESS CORPORATION GROUP INC."),
+ create_organization(name="ABBY D CENTER, INC."),
+ create_organization(name="ABC DURABLE MEDICAL EQUIPMENT INC"),
+ create_organization(name="ABC HOME MEDICAL SUPPLY, INC."),
+ create_organization(name="A BEAUTIFUL SMILE DENTISTRY, L.L.C."),
+ create_organization(name="A & B HEALTH CARE, INC."),
+ create_organization(name="ZUNI HOME HEALTH CARE AGENCY"),
+ create_organization(name="ZEELAND COMMUNITY HOSPITAL"),
+ create_organization(name="YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD"),
+ create_organization(name="YOUNG C. BAE, M.D."),
+ create_organization(name="YORKTOWN EMERGENCY MEDICAL SERVICE"),
+ create_organization(name="YODORINCMISSIONPLAZAPHARMACY"),
+ create_organization(name="YOAKUM COMMUNITY HOSPITAL"),
+ create_organization(name="YARMOUTH AUDIOLOGY"),
+ ]
+
+ cls.joe_legal_entity = create_legal_entity(dba_name="Joe Administrative Services LLC")
+ cls.joe_name = "Joe Health Incorporated"
+ cls.joe_health_org = create_organization(
+ name=cls.joe_name, legal_entity=cls.joe_legal_entity
+ )
+ cls.orgs.append(cls.joe_health_org)
+
+ cls.other_id = OtherIdType.objects.first()
+ cls.other_id_org = create_organization(name="Beaver Clinicals", other_id_type=cls.other_id)
+ cls.orgs.append(cls.other_id_org)
+
+ cls.hospital_nucc_org = create_organization(
+ name="TestNuccOrg", organization_type="283Q00000X"
+ )
+ cls.orgs.append(cls.hospital_nucc_org)
+
+ cls.org_with_npi = create_organization(name="Custom NPI General", npi_value=1427051473)
+ cls.orgs.append(cls.org_with_npi)
+
+ cls.org_cumberland = create_organization(name="Cumberland")
+ cls.orgs.append(cls.org_cumberland)
+
+ return super().setUpTestData()
+
+ def setUp(self):
+ super().setUp()
+ self.org_without_authorized_official = Organization.objects.create(
+ id="26708690-19d6-499e-b481-cebe05b98f08", authorized_official_id=None
+ )
+
+ # Basic tests
+ def test_list_default(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+ assert_has_results(self, response)
+
+ # Sorting tests
+ def test_list_in_default_order(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+
+ # print(response.data["results"]["entry"][0]['resource']['name'])
+
+ # Extract names
+ names = extract_resource_names(response)
+
+ sorted_names = [
+ "1ST CHOICE HOME HEALTH CARE INC",
+ "1ST CHOICE MEDICAL DISTRIBUTORS, LLC",
+ "986 INFUSION PHARMACY #1 INC.",
+ "A & A MEDICAL SUPPLY COMPANY",
+ "ABACUS BUSINESS CORPORATION GROUP INC.",
+ "ABBY D CENTER, INC.",
+ "ABC DURABLE MEDICAL EQUIPMENT INC",
+ "ABC HOME MEDICAL SUPPLY, INC.",
+ "A BEAUTIFUL SMILE DENTISTRY, L.L.C.",
+ "A & B HEALTH CARE, INC.",
+ ]
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected fhir orgs sorted by org name but got {names}\n Sorted: {sorted_names}",
+ )
+
+ def test_list_in_descending_order(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"_sort": "-organizationtoname__name"})
+ assert_fhir_response(self, response)
+
+ # Extract names
+ # Note: have to normalize the names to have python sorting match sql
+ names = extract_resource_names(response)
+
+ sorted_names = [
+ {},
+ "ZUNI HOME HEALTH CARE AGENCY",
+ "ZEELAND COMMUNITY HOSPITAL",
+ "YOUNGSTOWN ORTHOPAEDIC ASSOCIATES LTD",
+ "YOUNG C. BAE, M.D.",
+ "YORKTOWN EMERGENCY MEDICAL SERVICE",
+ "YODORINCMISSIONPLAZAPHARMACY",
+ "YOAKUM COMMUNITY HOSPITAL",
+ "YARMOUTH AUDIOLOGY",
+ "TestNuccOrg",
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected fhir org list sorted descending by name but got {names}\n Sorted: {sorted_names}",
+ )
+
+ # Pagination tests
+ def test_list_with_custom_page_size(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"page_size": 2})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertLessEqual(len(response.data["results"]["entry"]), 2)
+
+ def test_list_with_greater_than_max_page_size(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"page_size": 1001})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_pagination_limit(self, response)
+
+ # Basic Filter tests
+ def test_list_filter_by_name(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"name": "Cumberland"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+ self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ def test_list_filter_by_organization_type(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"organization_type": "Hospital"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+ self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ # Identifiers Filter tests
+ def test_list_filter_by_npi_general(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"identifier": "1427051473"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+ self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ def test_list_filter_by_npi_specific(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"identifier": "NPI|1427051473"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+ self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ def test_parent_id(self):
+ parent_id = self.orgs[1].parent_id
+ id = self.orgs[1].id
+ url = reverse("fhir-organization-detail", args=[parent_id])
+ response = self.client.get(url)
+ # check that the parentless organization does not have a parent listed
+ self.assertNotIn("partOf", str(response.data.keys()))
+
+ url = reverse("fhir-organization-detail", args=[id])
+ response = self.client.get(url)
+ # check that the child organization has a parent_id listed
+ self.assertIn("partOf", str(response.data.keys()))
+ # check that the child organization has the correct parent_id listed
+ self.assertIn(parent_id, f"Organization/{response.data['partOf']['reference']}")
+
+ def test_list_filter_by_otherID_general(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"identifier": "testMBI"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+ self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ # def test_list_filter_by_otherID_specific(self):
+ # url = reverse("fhir-organization-list")
+ # response = self.client.get(url, {"identifier":" 1|001586989"})
+ # self.assertEqual(response.status_code, status.HTTP_200_OK)
+ # assert_has_results(self, response)
+ # self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ def test_list_filter_by_ein_general(self):
+ url = reverse("fhir-organization-list")
+
+ id = self.joe_legal_entity.ein_id
+
+ response = self.client.get(url, {"identifier": str(id)})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+ self.assertGreaterEqual(response.data["results"]["total"], 1)
+
+ # def test_list_filter_by_ein_specific(self):
+ # url = reverse("fhir-organization-list")
+ # response = self.client.get(url, {"identifier":"USEIN|12-3456789"})
+ # self.assertEqual(response.status_code, status.HTTP_200_OK)
+ # assert_has_results(self, response)
+
+ # Address Filter tests
+ def test_list_filter_by_address(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"address": "Main"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_city(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"address_city": "Boston"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_state(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"address_state": "NY"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_postalcode(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"address_postalcode": "10001"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_address_use(self):
+ url = reverse("fhir-organization-list")
+ response = self.client.get(url, {"address_use": "work"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ # Retrieve tests
+ def test_retrieve_non_clinical_organization(self):
+ id = self.joe_health_org.id
+
+ url = reverse("fhir-organization-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ org = response.data
+ self.assertEqual(org["resourceType"], "Organization")
+ self.assertEqual(org["name"], self.joe_name)
+
+ def test_retrieve_nonexistent_uuid(self):
+ url = reverse("fhir-organization-detail", args=["12300000-0000-0000-0000-000000000123"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_nonexistent_npi(self):
+ url = reverse("fhir-organization-detail", args=["999999"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_single_organization(self):
+ id = self.orgs[0].id
+ url = reverse("fhir-organization-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data["id"], str(id))
+
+ # Edge cases tests
+ def test_organization_without_authorized_official(self):
+ id = self.org_without_authorized_official.pk
+ url = reverse("fhir-organization-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data["id"], id)
diff --git a/backend/npdfhir/tests/test_practitioner.py b/backend/npdfhir/tests/test_practitioner.py
new file mode 100644
index 00000000..a08490f7
--- /dev/null
+++ b/backend/npdfhir/tests/test_practitioner.py
@@ -0,0 +1,402 @@
+from django.urls import reverse
+from rest_framework import status
+
+from .api_test_case import APITestCase
+from .fixtures.location import create_location
+from .fixtures.practitioner import create_practitioner
+from .helpers import (
+ assert_fhir_response,
+ assert_has_results,
+ assert_pagination_limit,
+ extract_practitioner_names,
+ get_female_npis,
+)
+
+
+class PractitionerViewSetTestCase(APITestCase):
+ @classmethod
+ def setUpTestData(cls):
+ cls.locs = [
+ create_location(
+ name="California Location A",
+ city="Springfield",
+ state="CA",
+ zipcode="12345",
+ addr_line_1="113 Stadium Blvd.",
+ ),
+ create_location(
+ name="California Location B",
+ city="Sacramento",
+ state="CA",
+ zipcode="54321",
+ addr_line_1="333 Rocky Road.",
+ ),
+ create_location(
+ name="New York Location A",
+ city="Rochester",
+ state="NY",
+ zipcode="33333",
+ addr_line_1="123 Street R.",
+ ),
+ ]
+
+ cls.nurse_code = "363L00000X"
+ cls.non_nurse_code = "364SP0200X"
+ cls.nurse_prac = create_practitioner(
+ last_name="ZOLLER",
+ first_name="DAVID",
+ practitioner_types=[cls.nurse_code, cls.non_nurse_code],
+ )
+
+ cls.transplant_code = "204F00000X"
+ cls.non_nurse_prac = create_practitioner(
+ last_name="MILLER",
+ first_name="STACY",
+ practitioner_types=[cls.transplant_code],
+ )
+
+ cls.counselor = "101Y00000X"
+ cls.non_nurse_prac = create_practitioner(
+ last_name="TROY",
+ first_name="DIANA",
+ practitioner_types=[cls.counselor],
+ )
+
+ cls.sample_last_name = "SOLOMON"
+ cls.pracs = [
+ create_practitioner(last_name="AADALEN", first_name="KIRK", npi_value=1234567890),
+ create_practitioner(last_name="ABBAS", first_name="ASAD", other_id=1234567890),
+ create_practitioner(last_name="ABBOTT", first_name="BRUCE"),
+ create_practitioner(last_name="ABBOTT", first_name="PHILIP"),
+ create_practitioner(last_name="ABDELHALIM", first_name="AHMED"),
+ create_practitioner(last_name="ABDELHAMED", first_name="ABDELHAMED"),
+ create_practitioner(last_name="ABDEL NOUR", first_name="MAGDY"),
+ create_practitioner(last_name="ABEL", first_name="MICHAEL", location=cls.locs[0]),
+ create_practitioner(last_name="ABELES", first_name="JENNIFER"),
+ create_practitioner(last_name="ABELSON", first_name="MARK", location=cls.locs[2]),
+ create_practitioner(last_name="CUTLER", first_name="A"),
+ create_practitioner(last_name="NIZAM", first_name="A"),
+ create_practitioner(last_name="SALAIS", first_name="A"),
+ create_practitioner(
+ last_name="JANOS", first_name="AARON", location=cls.locs[1], address_use="home"
+ ),
+ create_practitioner(last_name="NOONBERG", first_name="AARON"),
+ create_practitioner(last_name="PITNEY", first_name="AARON"),
+ create_practitioner(last_name=cls.sample_last_name, first_name="AARON"),
+ create_practitioner(last_name="STEIN", first_name="AARON"),
+ create_practitioner(last_name="ALI", first_name="ABBAS"),
+ create_practitioner(last_name="JAFRI", first_name="ABBAS"),
+ create_practitioner(last_name="ZWERLING", first_name="HAYWARD"),
+ create_practitioner(last_name="ZUROSKE", first_name="GLEN"),
+ create_practitioner(last_name="ZUCKERBERG", first_name="EDWARD"),
+ create_practitioner(last_name="ZUCKER", first_name="WILLIAM"),
+ create_practitioner(last_name="ZUCCALA", first_name="SCOTT"),
+ create_practitioner(last_name="ZOVE", first_name="DANIEL"),
+ create_practitioner(last_name="ZORN", first_name="GUNNAR"),
+ create_practitioner(last_name="ZOOG", first_name="EUGENE"),
+ create_practitioner(last_name="ZOLMAN", first_name="MARK"),
+ cls.nurse_prac,
+ ]
+
+ return super().setUpTestData()
+
+ # Basic tests
+ def test_list_default(self):
+ url = reverse("fhir-practitioner-list") # /Practitioner/
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+ assert_has_results(self, response)
+
+ # Sorting tests
+ def test_list_in_default_order(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+
+ # print(response.data["results"]["entry"][0]['resource']['name'][0])
+
+ # for name in response.data["results"]["entry"]:
+ # print(name['resource']['name'][-1])
+
+ # Extract names
+ names = extract_practitioner_names(response)
+
+ sorted_names = [
+ ("AADALEN", "KIRK"),
+ ("ABBAS", "ASAD"),
+ ("ABBOTT", "BRUCE"),
+ ("ABBOTT", "PHILIP"),
+ ("ABDELHALIM", "AHMED"),
+ ("ABDELHAMED", "ABDELHAMED"),
+ ("ABDEL NOUR", "MAGDY"),
+ ("ABEL", "MICHAEL"),
+ ("ABELES", "JENNIFER"),
+ ("ABELSON", "MARK"),
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected fhir practitioners sorted by family then first name but got {names}\n Sorted: {sorted_names}",
+ )
+
+ def test_list_in_alternate_order(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(
+ url,
+ {
+ "_sort": "individual__individualtoname__first_name,individual__individualtoname__last_name"
+ },
+ )
+ assert_fhir_response(self, response)
+
+ # print(response.data["results"]["entry"][0]['resource']['name'][0])
+
+ # for name in response.data["results"]["entry"]:
+ # print(name['resource']['name'][-1])
+
+ # Extract names
+ names = extract_practitioner_names(response)
+
+ sorted_names = [
+ ("CUTLER", "A"),
+ ("NIZAM", "A"),
+ ("SALAIS", "A"),
+ ("JANOS", "AARON"),
+ ("NOONBERG", "AARON"),
+ ("PITNEY", "AARON"),
+ ("SOLOMON", "AARON"),
+ ("STEIN", "AARON"),
+ ("ALI", "ABBAS"),
+ ("JAFRI", "ABBAS"),
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected fhir practitioners sorted by first then family name but got {names}\n Sorted: {sorted_names}",
+ )
+
+ def test_list_in_descending_order(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(
+ url,
+ {
+ "_sort": "-individual__individualtoname__last_name,-individual__individualtoname__first_name"
+ },
+ )
+ assert_fhir_response(self, response)
+
+ # Extract names
+ # Note: have to normalize the names to have python sorting match sql
+ names = extract_practitioner_names(response)
+
+ sorted_names = [
+ ("ZWERLING", "HAYWARD"),
+ ("ZUROSKE", "GLEN"),
+ ("ZUCKERBERG", "EDWARD"),
+ ("ZUCKER", "WILLIAM"),
+ ("ZUCCALA", "SCOTT"),
+ ("ZOVE", "DANIEL"),
+ ("ZORN", "GUNNAR"),
+ ("ZOOG", "EUGENE"),
+ ("ZOLMAN", "MARK"),
+ ("ZOLLER", "DAVID"),
+ ]
+
+ self.assertEqual(
+ names,
+ sorted_names,
+ f"Expected fhir practitioners sorted by family then first name in descending but got {names}\n Sorted: {sorted_names}",
+ )
+
+ # Pagination tests
+ def test_list_with_custom_page_size(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"page_size": 2})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertLessEqual(len(response.data["results"]["entry"]), 2)
+
+ def test_list_with_greater_than_max_page_size(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"page_size": 1001})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_pagination_limit(self, response)
+
+ # Basic Filter tests
+ def test_list_filter_by_gender(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"gender": "Male"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ # Assert all required fields are present to get npi id
+ assert_has_results(self, response)
+ self.assertIn("entry", response.data["results"])
+
+ npi_ids = []
+ for practitioner_entry in response.data["results"]["entry"]:
+ self.assertIn("resource", practitioner_entry)
+ self.assertIn("id", practitioner_entry["resource"])
+ npi_id = practitioner_entry["resource"]["id"]
+ npi_ids.append(int(npi_id))
+
+ # Check to make sure no female practitioners were fetched by mistake
+ should_be_empty = get_female_npis(npi_ids)
+ self.assertFalse(should_be_empty)
+
+ def test_list_filter_by_name(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"name": self.sample_last_name})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ names = []
+ for name in entry["resource"]["name"]:
+ names.append(name["family"])
+ names.append(name["given"])
+
+ self.assertIn(self.sample_last_name, names)
+
+ def test_list_filter_by_practitioner_type(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"practitioner_type": "Nurse"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ nurse_codes = [
+ nc["code"] for nc in entry["resource"]["qualification"][0]["code"]["coding"]
+ ]
+ self.assertIn(self.nurse_code, nurse_codes)
+ self.assertNotIn(self.transplant_code, nurse_codes)
+
+ # Identifiers Filter tests
+ def test_list_filter_by_identifier_general(self):
+ identifier = "1234567890"
+
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"identifier": identifier})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ all_values = []
+ for entry in response.data["results"]["entry"]:
+ values = [int(v["value"]) for v in entry["resource"]["identifier"]]
+ all_values.extend(values)
+ self.assertIn(int(identifier), values)
+
+ # assert that Kirk Aadalen is in the data
+ self.assertIn(self.pracs[0].npi.npi, all_values)
+ # assert that Asad Abbas is in the data
+ self.assertIn(self.pracs[1].npi.npi, all_values)
+
+ def test_list_filter_by_npi_specific(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"identifier": "NPI|1234567890"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ values = [int(v["value"]) for v in entry["resource"]["identifier"]]
+ self.assertIn(self.pracs[0].npi.npi, values)
+
+ self.assertEqual(len(response.data["results"]["entry"]), 1)
+
+ # Address Filter tests
+ def test_list_filter_by_address(self):
+ url = reverse("fhir-practitioner-list")
+ test_search = "123 Street R. Rochester"
+ response = self.client.get(url, {"address": test_search})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ present_checks = []
+ for address in entry["resource"]["address"]:
+ # print(address)
+ address_string = ""
+
+ for line in address["line"]:
+ address_string += line + " "
+
+ address_string += address["city"] + " "
+ address_string += address["state"] + " "
+ address_string += address["postalCode"]
+
+ # self.assertIn(test_search, address_string)
+ present_checks.append(test_search in address_string)
+ self.assertTrue(any(present_checks))
+
+ def test_list_filter_by_address_city(self):
+ url = reverse("fhir-practitioner-list")
+ city_string = self.locs[0].address.address_us.city_name
+ response = self.client.get(url, {"address_city": city_string})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ cities = []
+ for address in entry["resource"]["address"]:
+ cities.append(address["city"])
+
+ self.assertIn(city_string, cities)
+
+ def test_list_filter_by_address_state(self):
+ url = reverse("fhir-practitioner-list")
+ response = self.client.get(url, {"address_state": "CA"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ state_abreviations = [
+ d["resource"]["address"][0]["state"] for d in response.data["results"]["entry"]
+ ]
+
+ for state in state_abreviations:
+ self.assertEqual("CA", state)
+
+ def test_list_filter_by_address_postalcode(self):
+ url = reverse("fhir-practitioner-list")
+ postal_code_string = self.locs[0].address.address_us.zipcode
+ response = self.client.get(url, {"address_postalcode": postal_code_string})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ zips = []
+ for address in entry["resource"]["address"]:
+ zips.append(address["postalCode"])
+ self.assertIn(postal_code_string, zips)
+
+ def test_list_filter_by_address_use(self):
+ url = reverse("fhir-practitioner-list")
+
+ response = self.client.get(url, {"address_use": "home"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ for entry in response.data["results"]["entry"]:
+ uses = []
+ for address in entry["resource"]["address"]:
+ # assert the address use is in the data
+ self.assertIn("use", address)
+ uses.append(address["use"])
+ self.assertIn("home", uses)
+
+ # Retrieve tests
+ def test_retrieve_nonexistent(self):
+ url = reverse("fhir-practitioner-detail", args=["999999"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_nonexistent_uuid(self):
+ url = reverse("fhir-practitioner-detail", args=["12300000-0000-0000-0000-000000000123"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_single_pracitioner(self):
+ id = self.pracs[0].individual.id
+ url = reverse("fhir-practitioner-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data["id"], str(id))
diff --git a/backend/npdfhir/tests/test_practitioner_role.py b/backend/npdfhir/tests/test_practitioner_role.py
new file mode 100644
index 00000000..ca9a8782
--- /dev/null
+++ b/backend/npdfhir/tests/test_practitioner_role.py
@@ -0,0 +1,126 @@
+from django.urls import reverse
+from rest_framework import status
+
+from .api_test_case import APITestCase
+from .fixtures.practitioner import create_full_practitionerrole
+from .helpers import (
+ assert_fhir_response,
+ assert_has_results,
+ assert_pagination_limit,
+ extract_resource_ids,
+)
+
+
+class PractitionerRoleViewSetTestCase(APITestCase):
+ @classmethod
+ def setUpTestData(cls):
+ # (location_name, uuid)
+ cls.test_records_name = [
+ "A BEAUTIFUL SMILE DENTISTRY, L.L.C.",
+ "ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC",
+ "ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC",
+ "ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC",
+ "ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC",
+ "ADIRONDACK MEDICAL HEALTH CARE ASSOCIATES PLLC",
+ "ADR LLC",
+ "ADR LLC",
+ "ADR LLC",
+ "ADR LLC",
+ ]
+
+ cls.roles = []
+
+ for i, loc_name in enumerate(cls.test_records_name):
+ # You can vary practitioner data a bit to avoid collisions
+ first = f"Test{i}"
+ last = f"Practitioner{i}"
+ npi = 1000000000 + i
+
+ role = create_full_practitionerrole(
+ first_name=first,
+ last_name=last,
+ gender="M" if i % 2 == 0 else "F",
+ npi_value=npi,
+ location_name=loc_name,
+ role_display="Clinician",
+ role_code="MD",
+ )
+
+ cls.roles.append(role)
+
+ cls.first_prac_id = cls.roles[0].id
+ return super().setUpTestData()
+
+ # Basic tests
+ def test_list_default(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+ assert_has_results(self, response)
+
+ # Sorting tests
+ def test_list_in_proper_order(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url)
+ assert_fhir_response(self, response)
+
+ # Extract ids
+ ids = extract_resource_ids(response)
+
+ sorted_ids = [str(role.id) for role in self.roles]
+
+ self.assertEqual(
+ ids,
+ sorted_ids,
+ f"Expected Practitioner roles sorted by order of location name but got {ids}\n Sorted: {sorted_ids}",
+ )
+
+ # Pagination tests
+ def test_list_with_custom_page_size(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url, {"page_size": 2})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertLessEqual(len(response.data["results"]["entry"]), 2)
+
+ def test_list_with_greater_than_max_page_size(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url, {"page_size": 1001})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_pagination_limit(self, response)
+
+ # Filter tests
+ def test_list_filter_by_name(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url, {"name": "Cumberland"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_practitioner_gender(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url, {"practitioner_gender": "Female"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ def test_list_filter_by_organization_name(self):
+ url = reverse("fhir-practitionerrole-list")
+ response = self.client.get(url, {"organization_name": "Hospital"})
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ assert_has_results(self, response)
+
+ # Retrieve tests
+ def test_retrieve_nonexistent_uuid(self):
+ url = reverse("fhir-practitionerrole-detail", args=["12300000-0000-0000-0000-000000000124"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_nonexistent_npi(self):
+ url = reverse("fhir-practitionerrole-detail", args=["999999"])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+ def test_retrieve_single_pracitionerrole(self):
+ id = self.first_prac_id
+ url = reverse("fhir-practitionerrole-detail", args=[id])
+ response = self.client.get(url)
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(response.data["id"], str(id))
diff --git a/backend/npdfhir/urls.py b/backend/npdfhir/urls.py
index e2f6c91c..931d61b7 100644
--- a/backend/npdfhir/urls.py
+++ b/backend/npdfhir/urls.py
@@ -1,20 +1,20 @@
-from django.urls import path, include, re_path
from debug_toolbar.toolbar import debug_toolbar_urls
-from drf_spectacular.views import SpectacularJSONAPIView, SpectacularRedocView, SpectacularSwaggerView
-from django.urls import path
+from django.urls import include, path, re_path
+from drf_spectacular.views import (
+ SpectacularJSONAPIView,
+ SpectacularRedocView,
+ SpectacularSwaggerView,
+)
from . import views
from .router import router
-
urlpatterns = [
path("docs/schema/", SpectacularJSONAPIView.as_view(), name="schema"),
- re_path("docs/redoc/?", SpectacularRedocView.as_view(url_name='schema'), name="schema-redoc"),
- re_path("docs/?", SpectacularSwaggerView.as_view(url_name='schema'), name="schema-swagger-ui"),
+ re_path("docs/redoc/?", SpectacularRedocView.as_view(url_name="schema"), name="schema-redoc"),
+ re_path("docs/?", SpectacularSwaggerView.as_view(url_name="schema"), name="schema-swagger-ui"),
path("healthCheck", views.health, name="healthCheck"),
- path('metadata', views.FHIRCapabilityStatementView.as_view(),
- name='fhir-metadata'),
-
+ re_path("metadata/?", views.FHIRCapabilityStatementView.as_view(), name="fhir-metadata"),
# Router URLs
# everything else is passed to the rest_framework router to manage
path("", include(router.urls), name="index"),
diff --git a/backend/npdfhir/utils.py b/backend/npdfhir/utils.py
index 0e038931..29b30373 100644
--- a/backend/npdfhir/utils.py
+++ b/backend/npdfhir/utils.py
@@ -1,44 +1,44 @@
+from django.urls import reverse
from fhir.resources.R4B.address import Address
from fhir.resources.R4B.reference import Reference
-from rest_framework.test import APIClient
-from django.urls import reverse
-from drf_spectacular.utils import OpenApiParameter
-from drf_spectacular.types import OpenApiTypes
+from drf_spectacular.views import SpectacularJSONAPIView
+
def SmartyStreetstoFHIR(address):
addressLine1 = f"{address.address_us.primary_number} {address.address_us.street_predirection} {address.address_us.street_name} {address.address_us.postdirection} {address.address_us.street_suffix}"
- addressLine2 = f"{address.address_us.secondary_designator} {address.address_us.secondary_number}"
+ addressLine2 = (
+ f"{address.address_us.secondary_designator} {address.address_us.secondary_number}"
+ )
addressLine3 = f"{address.address_us.extra_secondary_designator} {address.address_us.extra_secondary_number}"
cityStateZip = f"f{address.address_us.city_name}, {address.address_us.fips_state.state_abbreviation} {address.address_us.zipcode}"
return Address(
line=[addressLine1, addressLine2, addressLine3, cityStateZip],
- use=address.address_type.value
+ use=address.address_type.value,
)
-def get_schema_data(request, url_name, additional_args=None):
- client = APIClient()
- if request.user:
- # reuse the authenticated user from the active request to make the
- # internal request to retrieve the current schema
- client.force_authenticate(user=request.user)
- schema_url = reverse(url_name, kwargs=additional_args)
- response = client.get(schema_url)
- return response.data
+
+def get_schema_data(request):
+ schema_view = SpectacularJSONAPIView.as_view()
+ response = schema_view(request._request)
+ # The response contains the schema data in its .data attribute
+ schema_data = response.data
+
+ return schema_data
+
def genReference(url_name, identifier, request):
- reference = request.build_absolute_uri(
- reverse(url_name, kwargs={'pk': identifier}))
- reference = Reference(
- reference=reference)
+ reference = request.build_absolute_uri(reverse(url_name, kwargs={"id": identifier}))
+ reference = Reference(reference=reference)
return reference
+
def parse_identifier_query(identifier_value):
"""
Parse an identifier search parameter that should be in the format of "value" OR "system|value".
Currently only supporting NPI search "NPI|123455".
"""
- if '|' in identifier_value:
- parts = identifier_value.split('|', 1)
+ if "|" in identifier_value:
+ parts = identifier_value.split("|", 1)
return (parts[0], parts[1])
return (None, identifier_value)
diff --git a/backend/npdfhir/views.py b/backend/npdfhir/views.py
index 6abd387a..ea7b967c 100644
--- a/backend/npdfhir/views.py
+++ b/backend/npdfhir/views.py
@@ -1,6 +1,7 @@
from uuid import UUID
-from django.db.models import Q, OuterRef, Subquery
+from django.db.models import F, Value, CharField
+from django.db.models.functions import Concat
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils.html import escape
@@ -10,6 +11,7 @@
from rest_framework.views import APIView
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.response import Response
+from rest_framework.filters import SearchFilter, OrderingFilter
from .pagination import CustomPaginator
from .renderers import FHIRRenderer
@@ -24,10 +26,8 @@
EndpointInstance,
Location,
Organization,
- OrganizationToName,
Provider,
ProviderToLocation,
- IndividualToName,
)
from .serializers import (
@@ -37,9 +37,13 @@
OrganizationSerializer,
PractitionerRoleSerializer,
PractitionerSerializer,
- CapabilityStatementSerializer
+ CapabilityStatementSerializer,
)
+from django.conf import settings
+
+DEBUG = settings.DEBUG
+
def index(request):
return HttpResponse("Connection to npd database: successful")
@@ -49,20 +53,31 @@ def health(request):
return HttpResponse("healthy")
+class ParamOrderingFilter(OrderingFilter):
+ ordering_param = "_sort"
+
+
class FHIREndpointViewSet(viewsets.GenericViewSet):
"""
ViewSet for FHIR Endpoint Resources
"""
+
queryset = EndpointInstance.objects.none()
- renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
- filter_backends = [DjangoFilterBackend]
+ if DEBUG:
+ renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+ else:
+ renderer_classes = [FHIRRenderer]
+ filter_backends = [DjangoFilterBackend, SearchFilter, ParamOrderingFilter]
filterset_class = EndpointFilterSet
- pagination_class = CustomPaginator
+ ordering_fields = ["name", "address", "ehr_vendor_name"]
+ pagination_class = CustomPaginator
+ pagination_class = CustomPaginator
+ lookup_url_kwarg = "id"
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR Bundle resource of FHIR Endpoint resources'
+ description="Successfully retrieved FHIR Bundle resource of FHIR Endpoint resources"
)
}
)
@@ -73,52 +88,59 @@ def list(self, request):
Default sort order: ascending endpoint instance name
"""
- endpoints = EndpointInstance.objects.all().prefetch_related(
- 'endpoint_connection_type',
- 'environment_type',
- 'endpointinstancetopayload_set',
- 'endpointinstancetopayload_set__payload_type',
- 'endpointinstancetopayload_set__mime_type',
- 'endpointinstancetootherid_set'
- ).order_by('name')
+ endpoints = (
+ EndpointInstance.objects.all()
+ .prefetch_related(
+ "endpoint_connection_type",
+ "environment_type",
+ "endpointinstancetopayload_set",
+ "endpointinstancetopayload_set__payload_type",
+ "endpointinstancetopayload_set__mime_type",
+ "endpointinstancetootherid_set",
+ )
+ .annotate(ehr_vendor_name=F("ehr_vendor__name"))
+ .order_by("name")
+ )
endpoints = self.filter_queryset(endpoints)
paginated_endpoints = self.paginate_queryset(endpoints)
- serialized_endpoints = EndpointSerializer(paginated_endpoints, many=True)
- bundle = BundleSerializer(
- serialized_endpoints, context={"request": request})
+ serialized_endpoints = EndpointSerializer(
+ paginated_endpoints, many=True, context={"request": request}
+ )
+ bundle = BundleSerializer(serialized_endpoints, context={"request": request})
response = self.get_paginated_response(bundle.data)
return response
@extend_schema(
responses={
- 200: OpenApiResponse(
- description='Successfully retrieved FHIR Endpoint resource'
- )
+ 200: OpenApiResponse(description="Successfully retrieved FHIR Endpoint resource")
}
)
- def retrieve(self, request, pk=None):
+ def retrieve(self, request, id=None):
"""
Query a specific endpoint as a FHIR Endpoint resource
"""
try:
- UUID(pk)
- except (ValueError, TypeError) as e:
- return HttpResponse(f"Endpoint {escape(pk)} not found", status=404)
-
- endpoint = get_object_or_404(EndpointInstance.objects.prefetch_related(
- 'endpoint_connection_type',
- 'environment_type',
- 'endpointinstancetopayload_set',
- 'endpointinstancetopayload_set__payload_type',
- 'endpointinstancetopayload_set__mime_type',
- 'endpointinstancetootherid_set'
- ), pk=pk)
+ UUID(id)
+ except (ValueError, TypeError):
+ return HttpResponse(f"Endpoint {escape(id)} not found", status=404)
+
+ endpoint = get_object_or_404(
+ EndpointInstance.objects.prefetch_related(
+ "endpoint_connection_type",
+ "environment_type",
+ "endpointinstancetopayload_set",
+ "endpointinstancetopayload_set__payload_type",
+ "endpointinstancetopayload_set__mime_type",
+ "endpointinstancetootherid_set",
+ ),
+ id=id,
+ )
- serialized_endpoint = EndpointSerializer(endpoint)
+ serialized_endpoint = EndpointSerializer(endpoint, context={"request": request})
# Set appropriate content type for FHIR responses
response = Response(serialized_endpoint.data)
@@ -130,16 +152,28 @@ class FHIRPractitionerViewSet(viewsets.GenericViewSet):
"""
ViewSet for FHIR Practitioner resources
"""
+
queryset = Provider.objects.none()
- renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
- filter_backends = [DjangoFilterBackend]
+ if DEBUG:
+ renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+ else:
+ renderer_classes = [FHIRRenderer]
+ filter_backends = [DjangoFilterBackend, SearchFilter, ParamOrderingFilter]
filterset_class = PractitionerFilterSet
pagination_class = CustomPaginator
+ lookup_url_kwarg = "id"
+
+ ordering_fields = [
+ "individual__individualtoname__last_name",
+ "individual__individualtoname__first_name",
+ "npi_value",
+ ]
+ # permission_classes = [permissions.IsAuthenticated]
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR Bundle resource of FHIR Practitioner resources'
+ description="Successfully retrieved FHIR Bundle resource of FHIR Practitioner resources"
)
}
)
@@ -150,73 +184,65 @@ def list(self, request):
Default sort order: ascending last name, first name
"""
# Subqueries for last_name and first_name of the individual
- primary_last_name_subquery = (
- IndividualToName.objects
- .filter(individual=OuterRef('individual'))
- .order_by('last_name')
- .values('last_name')[:1]
- )
- primary_first_name_subquery = (
- IndividualToName.objects
- .filter(individual=OuterRef('individual'))
- .order_by('first_name')
- .values('first_name')[:1]
+ providers = (
+ Provider.objects.all()
+ .prefetch_related(
+ "npi",
+ "individual",
+ "individual__individualtoaddress_set",
+ "individual__individualtoaddress_set__address__address_us",
+ "individual__individualtoaddress_set__address__address_us__state_code",
+ "individual__individualtoaddress_set__address_use",
+ "individual__individualtophone_set",
+ "individual__individualtoemail_set",
+ "providertootherid_set",
+ "providertotaxonomy_set",
+ )
+ .order_by(
+ "individual__individualtoname__last_name",
+ "individual__individualtoname__first_name",
+ )
)
- providers = Provider.objects.all().prefetch_related(
- 'npi', 'individual', 'individual__individualtoname_set', 'individual__individualtoaddress_set',
- 'individual__individualtoaddress_set__address__address_us',
- 'individual__individualtoaddress_set__address__address_us__state_code',
- 'individual__individualtoaddress_set__address_use', 'individual__individualtophone_set',
- 'individual__individualtoemail_set', 'providertootherid_set', 'providertotaxonomy_set'
- ).annotate(
- primary_last_name=Subquery(primary_last_name_subquery),
- primary_first_name=Subquery(primary_first_name_subquery)
- ).order_by('primary_last_name', 'primary_first_name')
-
providers = self.filter_queryset(providers)
paginated_providers = self.paginate_queryset(providers)
- serialized_providers = PractitionerSerializer(
- paginated_providers, many=True)
- bundle = BundleSerializer(
- serialized_providers, context={"request": request})
+ serialized_providers = PractitionerSerializer(paginated_providers, many=True)
+ bundle = BundleSerializer(serialized_providers, context={"request": request})
response = self.get_paginated_response(bundle.data)
return response
@extend_schema(
responses={
- 200: OpenApiResponse(
- description='Successfully retrieved FHIR Practitioner resource'
- )
+ 200: OpenApiResponse(description="Successfully retrieved FHIR Practitioner resource")
}
)
- def retrieve(self, request, pk=None):
+ def retrieve(self, request, id=None):
"""
Query a specific provider as a FHIR Practitioner resource
"""
try:
- UUID(pk)
- except (ValueError, TypeError) as e:
- return HttpResponse(f"Practitioner {escape(pk)} not found", status=404)
+ UUID(id)
+ except (ValueError, TypeError):
+ return HttpResponse(f"Practitioner {escape(id)} not found", status=404)
provider = get_object_or_404(
Provider.objects.prefetch_related(
- 'npi',
- 'individual',
- 'individual__individualtoname_set',
- 'individual__individualtoaddress_set',
- 'individual__individualtoaddress_set__address__address_us',
- 'individual__individualtoaddress_set__address__address_us__state_code',
- 'individual__individualtoaddress_set__address_use',
- 'individual__individualtophone_set',
- 'individual__individualtoemail_set',
- 'providertootherid_set',
- 'providertotaxonomy_set'
+ "npi",
+ "individual",
+ "individual__individualtoname_set",
+ "individual__individualtoaddress_set",
+ "individual__individualtoaddress_set__address__address_us",
+ "individual__individualtoaddress_set__address__address_us__state_code",
+ "individual__individualtoaddress_set__address_use",
+ "individual__individualtophone_set",
+ "individual__individualtoemail_set",
+ "providertootherid_set",
+ "providertotaxonomy_set",
),
- individual_id=pk
+ individual_id=id,
)
serialized_practitioner = PractitionerSerializer(provider)
@@ -231,16 +257,24 @@ class FHIRPractitionerRoleViewSet(viewsets.GenericViewSet):
"""
ViewSet for FHIR PractitionerRole resources
"""
+
queryset = ProviderToLocation.objects.none()
- renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
- filter_backends = [DjangoFilterBackend]
+ if DEBUG:
+ renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+ else:
+ renderer_classes = [FHIRRenderer]
+ filter_backends = [DjangoFilterBackend, SearchFilter, ParamOrderingFilter]
filterset_class = PractitionerRoleFilterSet
pagination_class = CustomPaginator
+ lookup_url_kwarg = "id"
+
+ ordering_fields = ["location__name", "practitioner_first_name", "practitioner_last_name"]
+ # permission_classes = [permissions.IsAuthenticated]
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR Bundle resource of FHIR PractitionerRole resources'
+ description="Successfully retrieved FHIR Bundle resource of FHIR PractitionerRole resources"
)
}
)
@@ -250,20 +284,22 @@ def list(self, request):
Default sort order: aschending by location name
"""
+ # all_params = request.query_params
+
practitionerroles = (
- ProviderToLocation.objects
- .select_related('location')
- .prefetch_related('provider_to_organization')
- .order_by('location__name')
+ ProviderToLocation.objects.select_related("location")
+ .prefetch_related("provider_to_organization")
+ .annotate(location_name=F("location__name"))
+ .order_by("location__name")
).all()
practitionerroles = self.filter_queryset(practitionerroles)
paginated_practitionerroles = self.paginate_queryset(practitionerroles)
serialized_practitionerroles = PractitionerRoleSerializer(
- paginated_practitionerroles, many=True, context={"request": request})
- bundle = BundleSerializer(
- serialized_practitionerroles, context={"request": request})
+ paginated_practitionerroles, many=True, context={"request": request}
+ )
+ bundle = BundleSerializer(serialized_practitionerroles, context={"request": request})
response = self.get_paginated_response(bundle.data)
return response
@@ -271,23 +307,24 @@ def list(self, request):
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR PractitionerRole resource'
+ description="Successfully retrieved FHIR PractitionerRole resource"
)
}
)
- def retrieve(self, request, pk=None):
+ def retrieve(self, request, id=None):
"""
Query a specific relationship between providers, healthcare organizations, and practice locations, represented as a FHIR PractitionerRole resource
"""
try:
- UUID(pk)
- except (ValueError, TypeError) as e:
- return HttpResponse(f"PractitionerRole {escape(pk)} not found", status=404)
+ UUID(id)
+ except (ValueError, TypeError):
+ return HttpResponse(f"PractitionerRole {escape(id)} not found", status=404)
- practitionerrole = get_object_or_404(ProviderToLocation, pk=pk)
+ practitionerrole = get_object_or_404(ProviderToLocation, id=id)
serialized_practitionerrole = PractitionerRoleSerializer(
- practitionerrole, context={"request": request})
+ practitionerrole, context={"request": request}
+ )
# Set appropriate content type for FHIR responses
response = Response(serialized_practitionerrole.data)
@@ -299,16 +336,24 @@ class FHIROrganizationViewSet(viewsets.GenericViewSet):
"""
ViewSet for FHIR Organization resources
"""
+
queryset = Organization.objects.none()
- renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
- filter_backends = [DjangoFilterBackend]
+ if DEBUG:
+ renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+ else:
+ renderer_classes = [FHIRRenderer]
+ filter_backends = [DjangoFilterBackend, SearchFilter, ParamOrderingFilter]
filterset_class = OrganizationFilterSet
pagination_class = CustomPaginator
+ lookup_url_kwarg = "id"
+
+ ordering_fields = ["organizationtoname__name"]
+ # permission_classes = [permissions.IsAuthenticated]
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR Bundle resource of FHIR Organization resources'
+ description="Successfully retrieved FHIR Bundle resource of FHIR Organization resources"
)
}
)
@@ -318,91 +363,86 @@ def list(self, request):
Default sort order: ascending by organization name
"""
- primary_name_subquery = (
- OrganizationToName.objects
- .filter(organization=OuterRef('pk'), is_primary=True)
- .values('name')[:1]
- )
- organizations = Organization.objects.all().prefetch_related(
- 'authorized_official',
- 'ein',
- 'organizationtoname_set',
- 'organizationtoaddress_set',
- 'organizationtoaddress_set__address',
- 'organizationtoaddress_set__address__address_us',
- 'organizationtoaddress_set__address__address_us__state_code',
- 'organizationtoaddress_set__address_use',
-
- 'authorized_official__individualtophone_set',
- 'authorized_official__individualtoname_set',
- 'authorized_official__individualtoemail_set',
- 'authorized_official__individualtoaddress_set',
- 'authorized_official__individualtoaddress_set__address__address_us',
- 'authorized_official__individualtoaddress_set__address__address_us__state_code',
-
- 'clinicalorganization',
- 'clinicalorganization__npi',
- 'clinicalorganization__organizationtootherid_set',
- 'clinicalorganization__organizationtootherid_set__other_id_type',
- 'clinicalorganization__organizationtotaxonomy_set',
- 'clinicalorganization__organizationtotaxonomy_set__nucc_code'
- ).annotate(primary_name=Subquery(primary_name_subquery)).order_by('primary_name')
+ organizations = (
+ Organization.objects.all()
+ .prefetch_related(
+ "authorized_official",
+ "ein",
+ "organizationtoname_set",
+ "organizationtoaddress_set",
+ "organizationtoaddress_set__address",
+ "organizationtoaddress_set__address__address_us",
+ "organizationtoaddress_set__address__address_us__state_code",
+ "organizationtoaddress_set__address_use",
+ "authorized_official__individualtophone_set",
+ "authorized_official__individualtoname_set",
+ "authorized_official__individualtoemail_set",
+ "authorized_official__individualtoaddress_set",
+ "authorized_official__individualtoaddress_set__address__address_us",
+ "authorized_official__individualtoaddress_set__address__address_us__state_code",
+ "clinicalorganization",
+ "clinicalorganization__npi",
+ "clinicalorganization__organizationtootherid_set",
+ "clinicalorganization__organizationtootherid_set__other_id_type",
+ "clinicalorganization__organizationtotaxonomy_set",
+ "clinicalorganization__organizationtotaxonomy_set__nucc_code",
+ )
+ .order_by("organizationtoname__name")
+ )
organizations = self.filter_queryset(organizations)
paginated_organizations = self.paginate_queryset(organizations)
serialized_organizations = OrganizationSerializer(
- paginated_organizations, many=True)
- bundle = BundleSerializer(
- serialized_organizations, context={"request": request})
+ paginated_organizations, many=True, context={"request": request}
+ )
+ bundle = BundleSerializer(serialized_organizations, context={"request": request})
response = self.get_paginated_response(bundle.data)
return response
@extend_schema(
responses={
- 200: OpenApiResponse(
- description='Successfully retrieved FHIR Organization resource'
- )
+ 200: OpenApiResponse(description="Successfully retrieved FHIR Organization resource")
}
)
- def retrieve(self, request, pk=None):
+ def retrieve(self, request, id=None):
"""
Query a specific organization, represented as a FHIR Organization resource
"""
try:
- UUID(pk)
- except (ValueError, TypeError) as e:
- return HttpResponse(f"Organization {escape(pk)} not found", status=404)
-
- organization = get_object_or_404(Organization.objects.prefetch_related(
- 'authorized_official',
- 'ein',
- 'organizationtoname_set',
- 'organizationtoaddress_set',
- 'organizationtoaddress_set__address',
- 'organizationtoaddress_set__address__address_us',
- 'organizationtoaddress_set__address__address_us__state_code',
- 'organizationtoaddress_set__address_use',
-
- 'authorized_official__individualtophone_set',
- 'authorized_official__individualtoname_set',
- 'authorized_official__individualtoemail_set',
- 'authorized_official__individualtoaddress_set',
- 'authorized_official__individualtoaddress_set__address__address_us',
- 'authorized_official__individualtoaddress_set__address__address_us__state_code',
-
- 'clinicalorganization',
- 'clinicalorganization__npi',
- 'clinicalorganization__organizationtootherid_set',
- 'clinicalorganization__organizationtootherid_set__other_id_type',
- 'clinicalorganization__organizationtotaxonomy_set',
- 'clinicalorganization__organizationtotaxonomy_set__nucc_code'
- ),
- pk=pk)
-
- serialized_organization = OrganizationSerializer(organization)
+ UUID(id)
+ except (ValueError, TypeError):
+ return HttpResponse(f"Organization {escape(id)} not found", status=404)
+
+ organization = get_object_or_404(
+ Organization.objects.prefetch_related(
+ "authorized_official",
+ "ein",
+ "organizationtoname_set",
+ "organizationtoaddress_set",
+ "organizationtoaddress_set__address",
+ "organizationtoaddress_set__address__address_us",
+ "organizationtoaddress_set__address__address_us__state_code",
+ "organizationtoaddress_set__address_use",
+ "authorized_official__individualtophone_set",
+ "authorized_official__individualtoname_set",
+ "authorized_official__individualtoemail_set",
+ "authorized_official__individualtoaddress_set",
+ "authorized_official__individualtoaddress_set__address__address_us",
+ "authorized_official__individualtoaddress_set__address__address_us__state_code",
+ "clinicalorganization",
+ "clinicalorganization__npi",
+ "clinicalorganization__organizationtootherid_set",
+ "clinicalorganization__organizationtootherid_set__other_id_type",
+ "clinicalorganization__organizationtotaxonomy_set",
+ "clinicalorganization__organizationtotaxonomy_set__nucc_code",
+ ),
+ id=id,
+ )
+
+ serialized_organization = OrganizationSerializer(organization, context={"request": request})
# Set appropriate content type for FHIR responses
response = Response(serialized_organization.data)
@@ -414,16 +454,24 @@ class FHIRLocationViewSet(viewsets.GenericViewSet):
"""
ViewSet for FHIR Location resources
"""
+
queryset = Location.objects.none()
- renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
- filter_backends = [DjangoFilterBackend]
+ if DEBUG:
+ renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+ else:
+ renderer_classes = [FHIRRenderer]
+ filter_backends = [DjangoFilterBackend, SearchFilter, ParamOrderingFilter]
filterset_class = LocationFilterSet
pagination_class = CustomPaginator
+ lookup_url_kwarg = "id"
+
+ ordering_fields = ["organization_name", "address_full", "name"]
+ # permission_classes = [permissions.IsAuthenticated]
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR Bundle resource of FHIR Location resources'
+ description="Successfully retrieved FHIR Bundle resource of FHIR Location resources"
)
}
)
@@ -433,42 +481,58 @@ def list(self, request):
Default sort order: ascending by location name
"""
- locations = Location.objects.all().prefetch_related(
- 'address__address_us', 'address__address_us__state_code'
- ).order_by('name')
+ locations = (
+ Location.objects.all()
+ .select_related(
+ "organization",
+ "address__address_us",
+ "address__address_us__state_code",
+ )
+ .annotate(
+ organization_name=F("organization__organizationtoname__name"),
+ address_full=Concat(
+ F("address__address_us__delivery_line_1"),
+ Value(", "),
+ F("address__address_us__city_name"),
+ Value(", "),
+ F("address__address_us__state_code__abbreviation"),
+ Value(" "),
+ F("address__address_us__zipcode"),
+ output_field=CharField(),
+ ),
+ )
+ .order_by("name")
+ )
locations = self.filter_queryset(locations)
paginated_locations = self.paginate_queryset(locations)
# Serialize the bundle
serialized_locations = LocationSerializer(
- paginated_locations, many=True, context={"request": request})
- bundle = BundleSerializer(
- serialized_locations, context={"request": request})
+ paginated_locations, many=True, context={"request": request}
+ )
+ bundle = BundleSerializer(serialized_locations, context={"request": request})
response = self.get_paginated_response(bundle.data)
return response
@extend_schema(
responses={
- 200: OpenApiResponse(
- description='Successfully retrieved FHIR Location resource'
- )
+ 200: OpenApiResponse(description="Successfully retrieved FHIR Location resource")
}
)
- def retrieve(self, request, pk=None):
+ def retrieve(self, request, id=None):
"""
Query a specific healthcare practice location as a FHIR Location resource
"""
try:
- UUID(pk)
- except (ValueError, TypeError) as e:
- return HttpResponse(f"Location {escape(pk)} not found", status=404)
+ UUID(id)
+ except (ValueError, TypeError):
+ return HttpResponse(f"Location {escape(id)} not found", status=404)
- location = get_object_or_404(Location, pk=pk)
+ location = get_object_or_404(Location, id=id)
- serialized_location = LocationSerializer(
- location, context={"request": request})
+ serialized_location = LocationSerializer(location, context={"request": request})
# Set appropriate content type for FHIR responses
response = Response(serialized_location.data)
@@ -480,12 +544,16 @@ class FHIRCapabilityStatementView(APIView):
"""
ViewSet for FHIR Practitioner resources
"""
- renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+
+ if DEBUG:
+ renderer_classes = [FHIRRenderer, BrowsableAPIRenderer]
+ else:
+ renderer_classes = [FHIRRenderer]
@extend_schema(
responses={
200: OpenApiResponse(
- description='Successfully retrieved FHIR CapabilityStatement resource'
+ description="Successfully retrieved FHIR CapabilityStatement resource"
)
}
)
@@ -493,8 +561,10 @@ def get(self, request):
"""
Query metadata about this FHIR instance, represented as FHIR CapabilityStatement resource
"""
- serializer = CapabilityStatementSerializer(
- context={"request": request})
- response = serializer.to_representation(None)
+ serialized_capability_statement = CapabilityStatementSerializer(
+ context={"request": request}
+ )
+
+ response = Response(serialized_capability_statement.to_representation())
- return Response(response)
\ No newline at end of file
+ return response
diff --git a/backend/provider_directory/apps.py b/backend/provider_directory/apps.py
index 90f15d7c..5bc8415d 100644
--- a/backend/provider_directory/apps.py
+++ b/backend/provider_directory/apps.py
@@ -1,5 +1,9 @@
from django.apps import AppConfig
-class ProviderDirectory(AppConfig):
- default_auto_field = 'django.db.models.BigAutoField'
- name = 'provider_directory'
+
+class ProviderDirectoryConfig(AppConfig):
+ default_auto_field = "django.db.models.BigAutoField"
+ name = "provider_directory"
+
+ def ready(self):
+ from . import flag_conditions as flag_conditions
diff --git a/backend/provider_directory/flag_conditions.py b/backend/provider_directory/flag_conditions.py
new file mode 100644
index 00000000..825077f7
--- /dev/null
+++ b/backend/provider_directory/flag_conditions.py
@@ -0,0 +1,25 @@
+from flags.conditions import register
+from structlog import get_logger
+
+logger = get_logger(__name__)
+
+
+@register("in_group")
+def in_group_condition(value: str | list[str], request, **kwargs):
+ """
+ Checks if the current user is in any of the specified groups.
+ """
+
+ logger.debug("in_group_condition", value=value)
+ if not request.user.is_authenticated:
+ return False
+
+ if isinstance(value, str):
+ group_names = [value]
+ elif isinstance(value, (list, tuple)):
+ group_names = value
+ else:
+ return False
+
+ user_groups = request.user.groups.values_list("name", flat=True)
+ return any(group_name in user_groups for group_name in group_names)
diff --git a/backend/provider_directory/tests/fixtures/__init__.py b/backend/provider_directory/tests/fixtures/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/backend/provider_directory/tests/fixtures/index.html b/backend/provider_directory/tests/fixtures/index.html
new file mode 100644
index 00000000..07338da8
--- /dev/null
+++ b/backend/provider_directory/tests/fixtures/index.html
@@ -0,0 +1,9 @@
+
+
+ {{title}}
+
+
+
+ index.html test file
+
+
\ No newline at end of file
diff --git a/backend/provider_directory/tests/test_frontend_settings.py b/backend/provider_directory/tests/test_frontend_settings.py
new file mode 100644
index 00000000..32f47a6c
--- /dev/null
+++ b/backend/provider_directory/tests/test_frontend_settings.py
@@ -0,0 +1,48 @@
+from django.contrib.auth.models import Group, User
+from django.test import TestCase
+from django.urls import reverse
+from flags.models import FlagState
+
+
+class TestFeatureFlags(TestCase):
+ """
+ Visiting the index route when no static/index.html asset exists.
+ """
+
+ fixtures = ["auth_user.json"]
+
+ @classmethod
+ def setUpTestData(cls):
+ cls.user = User.objects.create(username="testuser", password="nothing")
+
+ def setUp(self):
+ self.client.force_login(self.user)
+
+ def test_returns_flags_json(self):
+ """
+ When static/index.html doesn't exist, route redirects
+ """
+ response = self.client.get(reverse("provider_directory:frontend_settings"))
+ self.assertEqual(response.status_code, 200)
+
+ data = response.json()["feature_flags"]
+
+ self.assertIn("ORGANIZATION_LOOKUP", data)
+ self.assertIn("ORGANIZATION_LOOKUP_DETAILS", data)
+
+ self.assertEqual(data["ORGANIZATION_LOOKUP"], False)
+ self.assertEqual(data["SEARCH_APP"], False)
+
+ def test_permits_flag_with_group_membership(self):
+ # arrange
+ FlagState.objects.get_or_create(name="SEARCH_APP", condition="in_group", value="Developers")
+ group, _created = Group.objects.get_or_create(name="Developers")
+ user = User.objects.get(username="testuser")
+ user.groups.add(group)
+
+ # act
+ response = self.client.get(reverse("provider_directory:frontend_settings"))
+
+ # assert
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json()["feature_flags"]["SEARCH_APP"], True)
diff --git a/backend/provider_directory/tests/test_views.py b/backend/provider_directory/tests/test_views.py
index 4efd0488..688fe6e2 100644
--- a/backend/provider_directory/tests/test_views.py
+++ b/backend/provider_directory/tests/test_views.py
@@ -1,14 +1,24 @@
-from datetime import datetime
-from http import HTTPStatus
-import os
-from pathlib import Path
+from unittest import mock
+
from django.contrib.auth.models import User
+from django.http import HttpResponse
from django.test import TestCase
from django.urls import reverse
-TEST_DIR = Path(os.path.dirname(os.path.abspath(__file__)))
-STATIC_INDEX = TEST_DIR / ".." / "static" / "index.html"
+def mock_not_found(path: str) -> list[str] | str | None:
+ return None
+
+
+def mock_found(path: str) -> list[str] | str | None:
+ return "/some/path/to/index.html"
+
+
+def mock_render(request, template_name: str, ctx: dict | None = None):
+ return HttpResponse(content=f"{template_name} test content")
+
+
+@mock.patch("provider_directory.views.index.find", mock_not_found)
class WithoutStaticIndex(TestCase):
"""
Visiting the index route when no static/index.html asset exists.
@@ -19,8 +29,6 @@ class WithoutStaticIndex(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create(username="testuser", password="nothing")
- if os.path.exists(STATIC_INDEX):
- os.unlink(STATIC_INDEX)
def setUp(self):
self.client.force_login(self.user)
@@ -33,6 +41,8 @@ def test_index_redirects_to_vite_in_development(self):
self.assertRedirects(response, "http://localhost:3000/", fetch_redirect_response=False)
+@mock.patch("provider_directory.views.index.find", mock_found)
+@mock.patch("provider_directory.views.index.render", mock_render)
class WithStaticIndex(TestCase):
"""
Visiting the index route when static/index.html asset does exist.
@@ -41,10 +51,6 @@ class WithStaticIndex(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create(username="testuser", password="nothing")
- if os.path.exists(STATIC_INDEX):
- os.unlink(STATIC_INDEX)
- with open(STATIC_INDEX, "a") as index:
- index.write(f"\n")
def setUp(self):
self.client.force_login(self.user)
@@ -54,4 +60,4 @@ def test_index_serves_static_file(self):
When static/index.html exists, route serves it
"""
response = self.client.get(reverse("provider_directory:index"))
- self.assertContains(response, "test content", status_code=HTTPStatus.OK)
+ self.assertContains(response, "index.html test content")
diff --git a/backend/provider_directory/urls.py b/backend/provider_directory/urls.py
index f76501f9..1683477e 100644
--- a/backend/provider_directory/urls.py
+++ b/backend/provider_directory/urls.py
@@ -1,14 +1,15 @@
-from django.urls import path
from django.contrib.auth import views as contrib_auth_views
-from provider_directory.views import index, authentication
+from django.urls import path
+
+from .views import authentication, frontend_settings, index
app_name = "provider_directory"
urlpatterns = [
# we're only going to provide login / logout functionality, for now
- path("frontend_settings", authentication.frontend_settings, name="authentication_settings"),
path("accounts/login/", authentication.LoginView.as_view(), name="login"),
path("accounts/logout/", contrib_auth_views.LogoutView.as_view(), name="logout"),
-
+ # non-FHIR application API endpoints
+ path("api/frontend_settings", frontend_settings.frontend_settings, name="frontend_settings"),
path(r"", index.index, name="index"),
path(r"", index.index, name="index_with_path"),
]
diff --git a/backend/provider_directory/views/authentication.py b/backend/provider_directory/views/authentication.py
index 8ec1a214..30cf44ac 100644
--- a/backend/provider_directory/views/authentication.py
+++ b/backend/provider_directory/views/authentication.py
@@ -1,9 +1,6 @@
import structlog
-from django.conf import settings
from django.contrib.auth.decorators import login_not_required
from django.contrib.auth.views import LoginView as ContribLoginView
-from django.http import JsonResponse
-from django.middleware.csrf import get_token
from django.utils.decorators import method_decorator
from pydantic import BaseModel
@@ -25,21 +22,3 @@ class LoginContext(BaseModel):
class LoginView(ContribLoginView):
template_name = "index.html"
redirect_authenticated_user = True
-
-
-class FrontendSettingsPayload(BaseModel):
- require_authentication: bool = settings.REQUIRE_AUTHENTICATION
- user: UserData = UserData()
-
- def populate_user_data(self, request):
- if request.user and not request.user.is_anonymous:
- self.user.username = request.user.username
- self.user.is_anonymous = False
-
-
-@login_not_required
-def frontend_settings(request):
- get_token(request) # always set the CSRF token cookie
- payload = FrontendSettingsPayload()
- payload.populate_user_data(request)
- return JsonResponse(payload.model_dump())
diff --git a/backend/provider_directory/views/flags.py b/backend/provider_directory/views/flags.py
new file mode 100644
index 00000000..721e2708
--- /dev/null
+++ b/backend/provider_directory/views/flags.py
@@ -0,0 +1,16 @@
+# Example API view in Django (using DRF)
+from django.conf import settings
+from flags.state import flag_enabled
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+API_FLAGS = settings.FLAGS.keys()
+
+
+class FeatureFlagsAPIView(APIView):
+ def get(self, request):
+ # NOTE: (@abachman-dsac) this approach is easy, but if we end up with
+ # complex conditions, it may not be cheap.
+ return Response(
+ {flag_name: flag_enabled(flag_name, request=request) for flag_name in API_FLAGS}
+ )
diff --git a/backend/provider_directory/views/frontend_settings.py b/backend/provider_directory/views/frontend_settings.py
new file mode 100644
index 00000000..90c02cb7
--- /dev/null
+++ b/backend/provider_directory/views/frontend_settings.py
@@ -0,0 +1,35 @@
+from django.conf import settings
+from django.contrib.auth.decorators import login_not_required
+from django.http import JsonResponse
+from django.middleware.csrf import get_token
+from flags.state import flag_enabled
+from pydantic import BaseModel
+
+from .authentication import UserData
+
+API_FLAGS = settings.FLAGS.keys()
+
+
+class FrontendSettingsPayload(BaseModel):
+ require_authentication: bool = settings.REQUIRE_AUTHENTICATION
+ user: UserData = UserData()
+ feature_flags: dict = {}
+
+ def populate_user_data(self, request):
+ if request.user and not request.user.is_anonymous:
+ self.user.username = request.user.username
+ self.user.is_anonymous = False
+
+ def populate_feature_flags(self, request):
+ self.feature_flags = {
+ flag_name: flag_enabled(flag_name, request=request) for flag_name in API_FLAGS
+ }
+
+
+@login_not_required
+def frontend_settings(request):
+ get_token(request) # always set the CSRF token cookie
+ payload = FrontendSettingsPayload()
+ payload.populate_user_data(request)
+ payload.populate_feature_flags(request)
+ return JsonResponse(payload.model_dump())
diff --git a/backend/provider_directory/views/index.py b/backend/provider_directory/views/index.py
index 34e819d5..671885f1 100644
--- a/backend/provider_directory/views/index.py
+++ b/backend/provider_directory/views/index.py
@@ -21,7 +21,7 @@ def index(request, path: str | None = None):
npd/frontend/ project.
"""
- context = IndexContext.model_validate({ "title": "National Provider Directory" })
+ context = IndexContext.model_validate({"title": "National Provider Directory"})
if (settings.DEBUG or settings.TESTING) and not find("index.html"):
return redirect("http://localhost:3000/")
diff --git a/backend/requirements.txt b/backend/requirements.txt
index d1e71663..81c673fc 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -1,3 +1,11 @@
+Django==5.2.9
+Faker==38.2.0
+Jinja2==3.1.6
+MarkupSafe==3.0.2
+PyYAML==6.0.2
+Pygments==2.19.2
+SQLAlchemy==2.0.41
+WTForms==3.1.2
anyio==4.9.0
arrow==1.3.0
asgiref==3.8.1
@@ -8,41 +16,35 @@ chardet==5.2.0
charset-normalizer==3.4.2
click==8.2.1
cookiecutter==2.6.0
-Django==5.2.3
django-cors-headers==4.8.0
django-debug-toolbar==5.2.0
-django-structlog==10.0.0
django-filter==25.1
+django-flags==5.0.14
+django-structlog==10.0.0
djangorestframework==3.16.0
-unittest-xml-reporting==3.2.0
drf_spectacular==0.29.0
fhir.resources==8.1.0
fhir_core==1.1.3
gunicorn==23.0.0
idna==3.10
-Jinja2==3.1.6
markdown-it-py==3.0.0
-MarkupSafe==3.0.2
mdurl==0.1.2
+psycopg-pool==3.2.7
psycopg==3.2.12
psycopg_binary==3.2.12
-psycopg-pool==3.2.7
-Pygments==2.19.2
python-dateutil==2.9.0.post0
python-decouple==3.8
python-multipart==0.0.20
python-slugify==8.0.4
-PyYAML==6.0.2
requests==2.32.4
rich==14.0.0
six==1.17.0
sniffio==1.3.1
sqladmin==0.20.1
-SQLAlchemy==2.0.41
-sqlparse==0.5.3
-starlette==0.47.2
+sqlparse==0.5.4
+starlette==0.49.1
text-unidecode==1.3
types-python-dateutil==2.9.0.20250516
typing_extensions==4.14.0
-urllib3==2.5.0
-WTForms==3.1.2
+unittest-xml-reporting==3.2.0
+urllib3==2.6.3
diff --git a/bin/npr b/bin/npr
new file mode 100755
index 00000000..a052813d
--- /dev/null
+++ b/bin/npr
@@ -0,0 +1,233 @@
+#!/usr/bin/env python3
+
+import argparse
+import os
+import subprocess
+import sys
+
+"""
+npr is an NPd Runner
+
+Run docker compose commands in the npd project directory (https://github.com/DSACMS/npd)
+"""
+
+EPILOG_TEXT = """
+examples:
+ \033[36;1m[ Backend ]\033[0m service: django-web
+
+ %(prog)s -t python manage.py test
+ Run backend tests in the "django-web" container, with
+ backend/compose.test.yml configuration, without db setup
+
+ %(prog)s -e DJANGO_SETTINGS_MODULE=app.settings.test python manage.py shell
+ Open Django shell with test settings using an environment variable
+
+ \033[36;1m[ Frontend ]\033[0m service: web
+
+ %(prog)s npm [COMMAND]
+ Run frontend npm commands in the "web" service container
+
+ %(prog)s npm test
+ Run frontend tests
+
+ %(prog)s npm install
+ Install npm packages in the web service container
+
+ %(prog)s npm install --save react-whatever
+ Add an npm package to the frontend project
+
+ \033[36;1m[ Database ]\033[0m service: db
+
+ %(prog)s -e PGPASSWORD=postgres -s db psql -U postgres -h db
+ Connect to PostgreSQL database in the db service with default user/pass
+
+ \033[36;1m[ Flyway ]\033[0m service: db-migrations
+
+ %(prog)s migrate
+ Run database migrations using the db-migrations service
+"""
+
+
+def determine_service(command: list[str]) -> str | None:
+ """
+ Determine the appropriate Docker service or run on host based on the
+ command name.
+ """
+ if not command:
+ return None
+
+ first_arg = command[0].lower()
+
+ if first_arg == "playwright":
+ rest = " ".join(command[1:])
+ eprint(
+ f"Unsupported command. Run `npx playwright {rest}` directly in the playwright/ directory instead"
+ )
+ sys.exit(1)
+
+ # Python-related commands go to django-web
+ python_commands = ["python", "python3", "pip", "pip3", "django-admin", "manage.py"]
+ if first_arg in python_commands or first_arg.startswith("python"):
+ return "django-web"
+
+ database_commands = ["psql", "pg_dump"]
+ if first_arg in database_commands:
+ return "db"
+
+ flyway_commands = ["migrate", "info", "repair"]
+ if first_arg in flyway_commands:
+ return "db-migrations"
+
+ # npm commands go to web
+ frontend_commands = ["npm", "node", "npx"]
+ if first_arg in frontend_commands:
+ return "web"
+
+ return None
+
+
+def dprint(*msg: list[str]):
+ sys.stderr.write(f"\033[2m> {' '.join(str(m) for m in msg)}\033[0m\n")
+
+
+def eprint(*msg: list[str]):
+ sys.stderr.write(f"\033[31m[Error] {' '.join(str(m) for m in msg)}\033[0m\n")
+
+
+def argument_parser() -> argparse.ArgumentParser:
+ """Parse command line arguments."""
+ parser = argparse.ArgumentParser(
+ add_help=True,
+ epilog=EPILOG_TEXT,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description="Docker Compose service runner - automatically determines the appropriate service for your command",
+ )
+
+ parser.add_argument("-s", "--service", dest="service", help="Specify service name")
+ parser.add_argument(
+ "-e", action="append", dest="env_vars", help="Environment variables"
+ )
+ parser.add_argument(
+ "-t",
+ "--test",
+ action="store_true",
+ default=False,
+ help="Use compose.test.yml compose file",
+ )
+ parser.add_argument(
+ "--debug", action="store_true", default=False, help="Show verbose debug output"
+ )
+
+ parser.add_argument(
+ "--publish",
+ action="append",
+ dest="publishes",
+ help="Expose ports from the running command",
+ )
+
+ # after all known options have been parsed, expect a command with its own arguments
+ parser.add_argument(
+ "command",
+ nargs=argparse.REMAINDER,
+ help="command to pass on to docker or the shell",
+ )
+
+ return parser
+
+
+def parse_arguments() -> tuple[argparse.Namespace, list[str]]:
+ parser = argument_parser()
+
+ # Parse known args to separate our options from the command
+ known_args, _ = parser.parse_known_args()
+
+ return known_args, known_args.command
+
+
+def run_command(command: list[str], cwd: str | None = None, noexit: bool = False):
+ try:
+ dprint(*command)
+ result = subprocess.run(command, cwd=cwd)
+ if not noexit:
+ sys.exit(result.returncode)
+ return result.returncode
+ except KeyboardInterrupt:
+ if not noexit:
+ sys.exit(130)
+ return 130
+ except Exception as e:
+ print(f"Error executing command: {e}")
+ if not noexit:
+ sys.exit(1)
+ return 1
+
+
+def main():
+ if len(sys.argv) < 2:
+ parser = argument_parser()
+ parser.print_help()
+
+ sys.exit(1)
+
+ # Parse arguments
+ known_args, command_args = parse_arguments()
+
+ if known_args.debug:
+ dprint("known_args: ", known_args)
+ dprint("command_args:", command_args)
+
+ # Determine service
+ if known_args.service:
+ service = known_args.service
+ else:
+ service = determine_service(command_args)
+ if not service:
+ print("Error: Could not determine appropriate service")
+ sys.exit(1)
+
+ if known_args.debug:
+ dprint("service:", service)
+
+ # Build runnable command
+ cwd = os.getcwd()
+
+ run_cmd = [
+ "docker",
+ "compose",
+ ]
+
+ # Add optional test configuration
+ if service in ("django-web", "db", "db-migrations") and known_args.test:
+ run_cmd.extend(("-f", "compose.test.yml"))
+
+ run_cmd.extend(("run", "--rm"))
+
+ # Add environment variables
+ if known_args.env_vars:
+ for env_var in known_args.env_vars:
+ run_cmd.extend(["-e", env_var])
+
+ if known_args.publishes:
+ for publish in known_args.publishes:
+ run_cmd.extend(["--publish", publish])
+
+ # Add service name
+ run_cmd.append(service)
+
+ # Add the actual command
+ run_cmd.extend(command_args)
+
+ # Execute the command
+ try:
+ dprint(*run_cmd)
+ result = subprocess.run(run_cmd, cwd=cwd)
+ sys.exit(result.returncode)
+ except KeyboardInterrupt:
+ sys.exit(130)
+ except Exception as e:
+ print(f"Error executing command: {e}")
+ sys.exit(1)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/code.json b/code.json
index cc07c4fd..16811c36 100644
--- a/code.json
+++ b/code.json
@@ -1,7 +1,7 @@
{
"name": "npd",
"version": "",
- "description": "A modern provider and payer directory serving as a single source of truth for healthcare provider information nationwide",
+ "description": "A modern version of a health provider directory to serve as a single source of truth for the benefit of all",
"longDescription": "The CMS National Provider Directory addresses fragmented healthcare provider data that costs the industry an estimated $2.76 billion annually. Currently, provider information is duplicated and corrected across over 5,000 systems, leading to widespread inaccuracy. NPD creates a unified FHIR-compliant directory combining data from CMS sources (NPPES, PECOS) and industry partners into a single authoritative source. Built with Django REST Framework and React, the system provides APIs and a search interface for accessing provider practices, addresses, specialties, licenses, quality scores, insurance plan participation, and interoperability endpoints. By serving as the definitive provider directory, NPD reduces data collection burden on providers and payers while improving accuracy for over 100 million Medicare and Medicaid beneficiaries.",
"status": "Development",
"permissions": {
@@ -9,10 +9,11 @@
{
"name": "CC0-1.0",
"URL": "https://github.com/DSACMS/npd/blob/main/LICENSE"
-
}
],
- "usageType": ["openSource"],
+ "usageType": [
+ "openSource"
+ ],
"exemptionText": ""
},
"organization": "Centers for Medicare & Medicaid Services",
@@ -24,7 +25,7 @@
"disclaimerURL": "",
"disclaimerText": "",
"vcs": "git",
- "laborHours": 17012,
+ "laborHours": 17280,
"reuseFrequency": {
"forks": 1,
"clones": 0
@@ -41,14 +42,11 @@
"languages": [
"Python",
"TypeScript",
- "JavaScript",
- "SQL",
- "HCL",
"HTML",
"CSS",
"Makefile",
"Dockerfile",
- "Shell"
+ "JavaScript"
],
"maintenance": "internal",
"contractNumber": [
@@ -116,8 +114,8 @@
"partners": [],
"date": {
"created": "2025-06-13T17:11:32Z",
- "lastModified": "2025-11-12T17:59:42Z",
- "metadataLastUpdated": "2025-11-12T18:02:12.503Z"
+ "lastModified": "2025-12-31T16:34:25Z",
+ "metadataLastUpdated": "2026-01-01T02:23:10.003Z"
},
"tags": [
"providers",
diff --git a/compose.test.yml b/compose.test.yml
new file mode 100644
index 00000000..b7616a00
--- /dev/null
+++ b/compose.test.yml
@@ -0,0 +1,37 @@
+name: npd
+
+# by using `compose.test.yml` alone with `docker compose -f compose.test.yml and
+# the `extends:` settings for each of the named services, we can merge these
+# service definitions cleanly with the ones that already exist in the base
+# compose.yml file
+
+services:
+ db:
+ extends:
+ file: compose.yml
+ service: db
+ environment:
+ POSTGRES_DB: npd_test
+
+ db-migrations:
+ extends:
+ file: compose.yml
+ service: db-migrations
+ environment:
+ FLYWAY_URL: jdbc:postgresql://${NPD_DB_HOST:-db}:${NPD_DB_PORT:-5432}/npd_test
+ volumes: !override
+ - ./flyway/sql/django_migrations:/flyway/sql/django_migrations
+ - ./flyway/sql/django_reference_data:/flyway/sql/django_reference_data
+ - ./flyway/sql/migrations:/flyway/sql/migrations
+ - ./flyway/sql/reference_data:/flyway/sql/reference_data
+
+ django-web:
+ extends:
+ file: compose.yml
+ service: django-web
+ environment:
+ NPD_DB_NAME: npd_test
+ TESTING: True
+
+volumes:
+ postgres_data:
diff --git a/compose.yml b/compose.yml
index e9ea5a4b..75d32e03 100644
--- a/compose.yml
+++ b/compose.yml
@@ -1,5 +1,92 @@
name: npd
-include:
- - backend/docker-compose.yml
- - frontend/docker-compose.yml
+services:
+ django-web:
+ build:
+ context: ./backend
+ container_name: npd
+ env_file:
+ - path: .env
+ required: false
+ environment:
+ NPD_DJANGO_SECRET: ${NPD_DJANGO_SECRET:-_pth2#=k8-wf-_^t%2))it+3..8la^@@97^#ock7.v=@792w7}
+ DEBUG: ${DEBUG:-True}
+ DJANGO_LOGLEVEL: ${DJANGO_LOGLEVEL:-INFO}
+ DJANGO_ALLOWED_HOSTS: "${DJANGO_ALLOWED_HOSTS:-['localhost','127.0.0.1','0.0.0.0']}"
+ NPD_DB_ENGINE: ${NPD_DB_ENGINE:-django.db.backends.postgresql}
+ NPD_DB_NAME: ${NPD_DB_NAME:-npd_development}
+ NPD_DB_USER: ${NPD_DB_USER:-postgres}
+ NPD_DB_PASSWORD: ${NPD_DB_PASSWORD:-postgres}
+ NPD_DB_HOST: ${NPD_DB_HOST:-db}
+ NPD_DB_PORT: ${NPD_DB_PORT:-5432}
+ NPD_REQUIRE_AUTHENTICATION: False
+ ports:
+ - '8000:8000'
+ volumes:
+ - './backend/:/app'
+ - ./backend/artifacts:/app/artifacts:rw
+ - ./backend/provider_directory/static:/app/provider_directory/static:rw
+ depends_on:
+ - db
+
+ db:
+ image: 'postgres:17'
+ env_file:
+ - path: backend/.env
+ required: false
+ environment:
+ POSTGRES_DB: ${NPD_DB_NAME:-npd_development}
+ POSTGRES_USER: ${NPD_DB_USER:-postgres}
+ POSTGRES_PASSWORD: ${NPD_DB_PASSWORD:-postgres}
+ PGUSER: ${NPD_DB_USER:-postgres}
+ ports:
+ - ${NPD_DB_PORT:-5432}:5432
+ volumes:
+ - postgres_data:/var/lib/postgresql/data
+ healthcheck:
+ test: ["CMD-SHELL", "pg_isready", "-d", "${NPD_DB_NAME:-npd_development}"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+
+ db-migrations:
+ image: 'flyway/flyway:10'
+ env_file:
+ - path: backend/.env
+ required: false
+ environment:
+ FLYWAY_URL: jdbc:postgresql://${NPD_DB_HOST:-db}:5432/${NPD_DB_NAME:-npd_development}
+ FLYWAY_USER: ${NPD_DB_USER:-postgres}
+ FLYWAY_PASSWORD: ${NPD_DB_PASSWORD:-postgres}
+ FLYWAY_PLACEHOLDERS_apiSchema: ${NPD_DB_SCHEMA:-npd}
+ FLYWAY_PLACEHOLDERS_superuserDefaultPassword: ""
+ FLYWAY_DEFAULT_SCHEMA: public
+ volumes:
+ - ./flyway/sql:/flyway/sql
+ depends_on:
+ - db
+ command: migrate -environment=development -outputType=json
+
+ ## frontend
+
+ web:
+ build:
+ context: frontend
+ ports:
+ - 3000:3000
+ volumes:
+ - ./frontend/:/app
+ # Container-based build output should be written to the appropriate backend static directory
+ - ./backend/provider_directory/static:/app/dist
+ # Ensure node_modules are not overwritten by the volume mount
+ - node_modules:/app/node_modules
+ environment:
+ CHOKIDAR_USEPOLLING: "true" # For file watching in Docker
+ BUILD_OUTPUT_DIR: "/app/dist"
+ VITE_API_BASE_URL: "http://localhost:8000" # assume django-web is also running
+ IN_DOCKER: 1
+ command: ["npm", "run", "watch"]
+
+volumes:
+ postgres_data:
+ node_modules:
\ No newline at end of file
diff --git a/etls/loadFIPS/requirements.txt b/etls/loadFIPS/requirements.txt
index a1237e1b..2377a623 100644
--- a/etls/loadFIPS/requirements.txt
+++ b/etls/loadFIPS/requirements.txt
@@ -1,6 +1,7 @@
-requests==2.32.3
+requests==2.32.4
pandas==2.3.1
sqlalchemy==2.0.41
python-dotenv==1.1.1
pangres==4.2.1
-psycopg2-binary==2.9.10
\ No newline at end of file
+psycopg2-binary==2.9.10
+urllib3==2.6.3
\ No newline at end of file
diff --git a/flyway/sql/django_migrations/README.md b/flyway/sql/django_migrations/README.md
index e998ee4b..1f55c944 100644
--- a/flyway/sql/django_migrations/README.md
+++ b/flyway/sql/django_migrations/README.md
@@ -1,3 +1,9 @@
+- [Dealing with Django migrations in Flyway](#dealing-with-django-migrations-in-flyway)
+- [Django is still complaining](#django-is-still-complaining)
+ - [Generating Django migration entries for Flyway](#generating-django-migration-entries-for-flyway)
+
+## Dealing with Django migrations in Flyway
+
This folder contains Django `django.contrib` system migrations for `contenttypes`, `sessions`, `auth`, and `admin` defaults.
You can see a list of the default migrations and their status with:
@@ -6,15 +12,15 @@ You can see a list of the default migrations and their status with:
docker compose run --rm django-web python manage.py showmigrations
```
-SQL was generated by running a command like this with each app + migration:
+SQL was generated by running a command like this with each app + migration while stripping out lines matching `/(BEGIN;|COMMIT;)`.
```sh
docker compose run --rm django-web python manage.py sqlmigrate contenttypes 0001_initial
```
-while stripping out lines matching `/(BEGIN;|COMMIT;)`.
+
-Something like this should work:
+Here's a Python script that will process `sqlmigrate` output to clean up the SQL.
```python
from datetime import datetime
@@ -52,12 +58,85 @@ with open(outpath, 'w') as outf:
outf.write(filtered_sql)
```
-## What about the actual django migrations?
+
+
+## Django is still complaining
+
+Django really wants to manage migrations, but that's flyway's job in this project.
+
+If you, for example, add a new Django plugin that wants to manage some database tables, you will have to create a Flyway data migration to write the appropriate entries to the table, `django_migrate`.
+
+If you don't, Django will add warnings to almost all console output, forever.
-Django really wants to manage migrations, but that's flyway's job.
+### Generating Django migration entries for Flyway
-You can populate the django migration history table to prevent it from complaining about unprocessed migrations with the `migrate --fake` command:
+To generate the appropriate migrations, we can use `manage.py migrate --fake` command for an application. In this example, we're migrating for [`django-flags`](https://cfpb.github.io/django-flags/).
```sh
-docker compose run --rm django-web python manage.py migrate --fake
+docker compose run --rm django-web python manage.py migrate flags --fake
+```
+
+Then reading out the `django_migrations` table:
+
+```sh
+docker compose run --rm \
+ -e PGPASSWORD=postgres \
+ db \
+ psql -h db -p 5432 -U postgres npd_development \
+ -c "select app, name from npd.django_migrations where app = 'flags'"
+```
+
+Example output of that query:
+
+```
+ app | name
+-------+--------------------------------------------------
+ flags | 0001_initial
+ flags | 0002_auto_20151030_1401
+ flags | 0003_flag_hidden
+ flags | 0004_remove_flag_hidden
+ flags | 0005_flag_enabled_by_default
+ flags | 0006_auto_20151217_2003
+ flags | 0007_unique_flag_site
+ flags | 0008_add_state_conditions
+ flags | 0009_migrate_to_conditional_state
+ flags | 0010_delete_flag_site_fk
+ flags | 0011_migrate_path_data_startswith_to_matches
+ flags | 0012_replace_migrations_for_wagtail_independence
+ flags | 0013_add_required_field
+```
+
+Now we convert that output to a SQL insert statement, with the appropriate schema placeholder:
+
+```sql
+INSERT INTO
+ ${apiSchema}.django_migrations (app, name, applied)
+VALUES
+ ('flags', '0001_initial', now()),
+ ('flags', '0002_auto_20151030_1401', now()),
+ ('flags', '0003_flag_hidden', now()),
+ ('flags', '0004_remove_flag_hidden', now()),
+ ('flags', '0005_flag_enabled_by_default', now()),
+ ('flags', '0006_auto_20151217_2003', now()),
+ ('flags', '0007_unique_flag_site', now()),
+ ('flags', '0008_add_state_conditions', now()),
+ ('flags', '0009_migrate_to_conditional_state', now()),
+ ('flags', '0010_delete_flag_site_fk', now()),
+ ('flags', '0011_migrate_path_data_startswith_to_matches', now()),
+ ('flags', '0012_replace_migrations_for_wagtail_independence', now()),
+ ('flags', '0013_add_required_field', now());
+```
+
+And add it to a new file with the appropriate Flyway version name in `flyway/sql/django_migrations`.
+
+After flyway migrations are run on a database from scratc, Django should report that the `flags` app migrations have been executed:
+
+```console
+$ docker compose run --rm django-web python manage.py showmigrations
+
+...
+flags
+ [X] 0012_replace_migrations_for_wagtail_independence (11 squashed migrations)
+ [X] 0013_add_required_field
+...
```
\ No newline at end of file
diff --git a/flyway/sql/django_migrations/V12.1__flags__0012_replace_migrations_for_wagtail_independence.sql b/flyway/sql/django_migrations/V12.1__flags__0012_replace_migrations_for_wagtail_independence.sql
new file mode 100644
index 00000000..38ecee9f
--- /dev/null
+++ b/flyway/sql/django_migrations/V12.1__flags__0012_replace_migrations_for_wagtail_independence.sql
@@ -0,0 +1,16 @@
+--
+-- Create model FlagState
+--
+CREATE TABLE
+ "flags_flagstate" (
+ "id" integer NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY,
+ "name" varchar(64) NOT NULL,
+ "condition" varchar(64) NOT NULL,
+ "value" varchar(127) NOT NULL
+ );
+
+--
+-- Alter unique_together for flagstate (1 constraint(s))
+--
+ALTER TABLE "flags_flagstate"
+ADD CONSTRAINT "flags_flagstate_name_condition_value_4e81ec48_uniq" UNIQUE ("name", "condition", "value");
\ No newline at end of file
diff --git a/flyway/sql/django_migrations/V12.2__flags__0013_add_required_field.sql b/flyway/sql/django_migrations/V12.2__flags__0013_add_required_field.sql
new file mode 100644
index 00000000..3039cdfe
--- /dev/null
+++ b/flyway/sql/django_migrations/V12.2__flags__0013_add_required_field.sql
@@ -0,0 +1,9 @@
+--
+-- Add field required to flagstate
+--
+ALTER TABLE "flags_flagstate"
+ADD COLUMN "required" boolean DEFAULT false NOT NULL;
+
+ALTER TABLE "flags_flagstate"
+ALTER COLUMN "required"
+DROP DEFAULT;
\ No newline at end of file
diff --git a/flyway/sql/django_migrations/V12.3__flags__django_migrations.sql b/flyway/sql/django_migrations/V12.3__flags__django_migrations.sql
new file mode 100644
index 00000000..a485a35f
--- /dev/null
+++ b/flyway/sql/django_migrations/V12.3__flags__django_migrations.sql
@@ -0,0 +1,22 @@
+-- ensure that `python manage.py showmigrations` returns:
+--
+-- flags
+-- [X] 0012_replace_migrations_for_wagtail_independence (11 squashed migrations)
+-- [X] 0013_add_required_field
+--
+INSERT INTO
+ ${apiSchema}.django_migrations (app, name, applied)
+VALUES
+ ('flags', '0001_initial', now()),
+ ('flags', '0002_auto_20151030_1401', now()),
+ ('flags', '0003_flag_hidden', now()),
+ ('flags', '0004_remove_flag_hidden', now()),
+ ('flags', '0005_flag_enabled_by_default', now()),
+ ('flags', '0006_auto_20151217_2003', now()),
+ ('flags', '0007_unique_flag_site', now()),
+ ('flags', '0008_add_state_conditions', now()),
+ ('flags', '0009_migrate_to_conditional_state', now()),
+ ('flags', '0010_delete_flag_site_fk', now()),
+ ('flags', '0011_migrate_path_data_startswith_to_matches', now()),
+ ('flags', '0012_replace_migrations_for_wagtail_independence', now()),
+ ('flags', '0013_add_required_field', now());
diff --git a/flyway/sql/local_dev/R__seed_feature_flags.sql b/flyway/sql/local_dev/R__seed_feature_flags.sql
new file mode 100644
index 00000000..ecff3cbd
--- /dev/null
+++ b/flyway/sql/local_dev/R__seed_feature_flags.sql
@@ -0,0 +1,30 @@
+-- seed default feature flags
+INSERT INTO
+ flags_flagstate (name, condition, value, required)
+values
+ ('SEARCH_APP', 'in_group', 'Developers', false),
+ (
+ 'PROVIDER_LOOKUP',
+ 'in_group',
+ 'Developers',
+ false
+ ),
+ (
+ 'PROVIDER_LOOKUP_DETAILS',
+ 'in_group',
+ 'Developers',
+ false
+ ),
+ (
+ 'ORGANIZATION_LOOKUP',
+ 'in_group',
+ 'Developers',
+ false
+ ),
+ (
+ 'ORGANIZATION_LOOKUP_DETAILS',
+ 'in_group',
+ 'Developers',
+ false
+ ) ON CONFLICT
+DO NOTHING;
\ No newline at end of file
diff --git a/flyway/sql/local_dev/R__seed_user.sql b/flyway/sql/local_dev/R__seed_user.sql
new file mode 100644
index 00000000..2fd70025
--- /dev/null
+++ b/flyway/sql/local_dev/R__seed_user.sql
@@ -0,0 +1,59 @@
+-- Seed the local development DB with a superuser in the "Developers" group
+INSERT INTO
+ auth_user (
+ password,
+ is_superuser,
+ username,
+ email,
+ first_name,
+ last_name,
+ is_staff,
+ is_active,
+ date_joined
+ )
+VALUES
+ -- PBKDF2 encoded string for "password123", generated with:
+ -- echo '{"password_input":"password123"}' | python infrastructure/modules/fhir-api/generate_hash.py
+ (
+ 'pbkdf2_sha256$1000000$abb7cc0b98bc51048fd48d8d47f86744$9+dkxu3JWeQ74YAzLusq4Rwq1jYnS+fjepjGfo3Cppo=',
+ true,
+ 'developer@cms.hhs.gov',
+ 'developer@cms.hhs.gov',
+ 'NPD',
+ 'Developer',
+ true,
+ true,
+ now()
+ ) ON CONFLICT
+DO NOTHING;
+
+-- add the Developers group
+INSERT INTO
+ auth_group (name)
+VALUES
+ ('Developers') ON CONFLICT
+DO NOTHING;
+
+-- ensure the superuser is in the group
+INSERT INTO
+ auth_user_groups (user_id, group_id)
+VALUES
+ (
+ (
+ select
+ id
+ from
+ auth_user
+ where
+ username = 'developer@cms.hhs.gov'
+ ),
+ (
+ select
+ id
+ from
+ auth_group
+ where
+ name = 'Developers'
+ )
+ ) ON CONFLICT
+DO NOTHING;
\ No newline at end of file
diff --git a/flyway/sql/local_dev/R__test_non_clinical_organization.sql b/flyway/sql/local_dev/R__test_non_clinical_organization.sql
deleted file mode 100644
index fd76fd03..00000000
--- a/flyway/sql/local_dev/R__test_non_clinical_organization.sql
+++ /dev/null
@@ -1,18 +0,0 @@
-INSERT INTO npd.individual (id, gender, birth_date) VALUES ('11111111-1111-1111-1111-111111111111', 'F', '1738-06-07') ON CONFLICT DO NOTHING;
-INSERT INTO npd.individual_to_name (individual_id, first_name, last_name, name_use_id) VALUES ('11111111-1111-1111-1111-111111111111', 'Jersey', 'Joe', 1) ON CONFLICT DO NOTHING;
-INSERT INTO npd.legal_entity (ein_id, dba_name) VALUES ('22222222-2222-2222-2222-222222222222', 'Joe Administrative Services LLC') ON CONFLICT DO NOTHING;
-INSERT INTO npd.organization (id, authorized_official_id, ein_id, parent_id) VALUES ('33333333-3333-3333-3333-333333333333', '11111111-1111-1111-1111-111111111111', '22222222-2222-2222-2222-222222222222', NULL) ON CONFLICT DO NOTHING;
-INSERT INTO npd.organization_to_name (organization_id, name, is_primary) VALUES ('33333333-3333-3333-3333-333333333333', 'Joe Health Incorporated', true) ON CONFLICT DO NOTHING;
-INSERT INTO npd.address_us (id, delivery_line_1, city_name, state_code, zipcode) VALUES ('TEST00001', '123 Joe Street', 'Buffalo', '36', '14201') ON CONFLICT DO NOTHING;
-INSERT INTO npd.address (id, address_us_id) VALUES ('55555555-5555-5555-5555-555555555555', 'TEST00001') ON CONFLICT DO NOTHING;
-INSERT INTO npd.organization_to_address (organization_id, address_id, address_use_id) VALUES ('33333333-3333-3333-3333-333333333333', '55555555-5555-5555-5555-555555555555', 1) ON CONFLICT DO NOTHING;
-INSERT INTO npd.individual_to_phone (individual_id, phone_number, phone_use_id) VALUES ('11111111-1111-1111-1111-111111111111', '5551234567', 1) ON CONFLICT DO NOTHING;
-
-INSERT INTO npd.individual (id, gender, birth_date) VALUES ('44444444-4444-4444-4444-444444444444', 'M', '1750-03-10') ON CONFLICT DO NOTHING;
-INSERT INTO npd.individual_to_name (individual_id, first_name, last_name, name_use_id) VALUES ('44444444-4444-4444-4444-444444444444', 'Mike', 'Beaver', 1) ON CONFLICT DO NOTHING;
-INSERT INTO npd.organization (id, authorized_official_id, ein_id, parent_id) VALUES ('66666666-6666-6666-6666-666666666666', '44444444-4444-4444-4444-444444444444', NULL, NULL) ON CONFLICT DO NOTHING;
-INSERT INTO npd.organization_to_name (organization_id, name, is_primary) VALUES ('66666666-6666-6666-6666-666666666666', 'Beaver Clinicals', true) ON CONFLICT DO NOTHING;
-INSERT INTO npd.npi (npi, entity_type_code, enumeration_date, last_update_date) VALUES (9999999999, 2, '2020-01-01', '2020-01-01') ON CONFLICT DO NOTHING;
-INSERT INTO npd.clinical_organization (npi, organization_id) VALUES (9999999999, '66666666-6666-6666-6666-666666666666') ON CONFLICT DO NOTHING;
-INSERT INTO npd.other_id_type (id, value) VALUES (1, 'CmsMBI') ON CONFLICT DO NOTHING;
-INSERT INTO npd.organization_to_other_id (npi, other_id, other_id_type_id, state_code, issuer) VALUES (9999999999, 'testMBI', 1, 'NY', 'New York State Medicaid') ON CONFLICT DO NOTHING;
\ No newline at end of file
diff --git a/flyway/sql/migrations/V13__reacting_to_full_dataset.sql b/flyway/sql/migrations/V13__reacting_to_full_dataset.sql
new file mode 100644
index 00000000..e02b0480
--- /dev/null
+++ b/flyway/sql/migrations/V13__reacting_to_full_dataset.sql
@@ -0,0 +1,3 @@
+create index on npd.organization_to_name(name);
+
+create index on npd.individual_to_name(last_name, first_name);
\ No newline at end of file
diff --git a/flyway/sql/migrations/V14__alter_replacement_npi_column.sql b/flyway/sql/migrations/V14__alter_replacement_npi_column.sql
new file mode 100644
index 00000000..2ec536c0
--- /dev/null
+++ b/flyway/sql/migrations/V14__alter_replacement_npi_column.sql
@@ -0,0 +1,5 @@
+alter table npd.npi alter column replacement_npi type bigint using replacement_npi::bigint;
+alter table npd.address drop constraint fk_address_address_us_id;
+alter table npd.address_us alter column id type bigint using id::bigint;
+alter table npd.address alter column address_us_id type bigint using address_us_id::bigint;
+alter table npd.address add constraint fk_address_address_us_id foreign key (address_us_id) references npd.address_us(id);
\ No newline at end of file
diff --git a/flyway/sql/migrations/V15__provider_pk_change.sql b/flyway/sql/migrations/V15__provider_pk_change.sql
new file mode 100644
index 00000000..cf9b7e12
--- /dev/null
+++ b/flyway/sql/migrations/V15__provider_pk_change.sql
@@ -0,0 +1,11 @@
+-- DRF Spectacular documentation was reading from the model definition and not the API construction (indicating that Practitioners could be retrieved by NPI), so these migrations update the database to reflect the querying pattern in the API (Practitioners retrieved by individual_id)
+alter table npd.provider_education drop constraint fk_provider_education_npi;
+alter table npd.provider_to_other_id drop constraint fk_provider_to_other_id_npi;
+alter table npd.provider_to_taxonomy drop constraint fk_provider_to_taxonomy_npi;
+alter table npd.provider drop constraint pk_provider;
+alter table npd.provider alter column individual_id set not null;
+alter table npd.provider add constraint pk_provider primary key (individual_id);
+alter table npd.provider add constraint uc_provider_npi UNIQUE (npi);
+alter table npd.provider_education add constraint fk_provider_education_npi foreign key (npi) references npd.provider(npi);
+alter table npd.provider_to_other_id add constraint fk_provider_to_other_id_npi foreign key (npi) references npd.provider(npi);
+alter table npd.provider_to_taxonomy add constraint fk_provider_to_taxonomy_npi foreign key (npi) references npd.provider(npi);
\ No newline at end of file
diff --git a/frontend/README.md b/frontend/README.md
index 2df5492a..6011a045 100644
--- a/frontend/README.md
+++ b/frontend/README.md
@@ -9,10 +9,10 @@
- [Local development](#local-development)
- [Prerequesites](#prerequesites)
- [Running the frontend app](#running-the-frontend-app)
- - [Option 1: vite dev server](#option-1-vite-dev-server)
- - [Option 2: build assets for the backend](#option-2-build-assets-for-the-backend)
+ - [Option 1: build assets for the backend](#option-1-build-assets-for-the-backend)
+ - [Option 2: vite dev server](#option-2-vite-dev-server)
- [Adding dependencies](#adding-dependencies)
-
+ - [Example dependency addition](#example-dependency-addition)
The NPD front end consists of React + TypeScript built by vite.dev, deployed as static assets and developed as part of the NPD `backend/` Django application.
@@ -52,7 +52,9 @@ const MyComponent = () => {
return (
)
}
@@ -61,15 +63,14 @@ const MyComponent = () => {
For larger or full-page content, we're using Markdown files, conventionally named `*.content.md`, loaded with the `?raw` query supported by vite.
```tsx
-import Markdown from 'react-markdown'
+import Markdown from "react-markdown"
-import content from './MyPage.content.md?raw'
+import content from "./MyPage.content.md?raw"
const MyPage = () => {
-
return (
- { content }
+ {content}
)
}
@@ -103,24 +104,7 @@ code.blocks()
There are two options for running the frontend application in development. Each has a time and a place
-#### Option 1: vite dev server
-
-From the `npd` project root:
-
-1. Run the frontend vite dev server
- - in docker with: `docker compose up web`
- - on host with: `cd frontend; npm run dev`
-3. Visit http://localhost:3000 in your browser
-
-**Limitations:**
-
-- requires CORS and CSRF workarounds for API requests and ` |