diff --git a/.dockerignore b/.dockerignore index 3edb0b5e..3de26468 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,6 +6,8 @@ **/.DS_Store **/__pycache__ +**/.pytest_cache +**/.mypy_cache **/.venv **/.classpath **/.dockerignore diff --git a/.env.local b/.env.example similarity index 95% rename from .env.local rename to .env.example index 69654efb..6718ae99 100644 --- a/.env.local +++ b/.env.example @@ -15,4 +15,5 @@ SECRET_KEY=default-secret-key_local # Replace with your actual secret key # Frontend Environment Variables NEXT_PUBLIC_API_URL=http://localhost:8000/api # The base URL for API calls to the backend +NEXT_PUBLIC_MAPBOX_TOKEN=dummy-mapbox-token NODE_ENV=development # For libraries and general Node.js practices diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..8492e69b --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,92 @@ +name: CI Pipeline + +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] + branches: + - master + +jobs: + eslint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: '18.x' + + - name: Install Node.js dependencies + run: npm ci + + - name: Run ESLint + run: npm run lint + + black_lint_and_mypy_type_check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + pip install black mypy + pip install fastapi pydantic pydantic-settings sqlalchemy GeoAlchemy2 pytest + - name: Check code formatting with black + run: black --check . + + - name: Type check with mypy + run: mypy --config-file mypy.ini . + + docker_build_test: + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@v3 + + - name: Get Run ID of Most Recent Successful Run + id: get_run_id + run: | + response=$(curl -s -H "Authorization: token ${{ secrets.GH_PAT }}" \ + "https://api.github.com/repos/sfbrigade/datasci-earthquake/actions/workflows/env_vars.yml/runs?status=completed&conclusion=success") + run_id=$(echo $response | jq '.workflow_runs[0].id') + echo "Run ID: $run_id" + echo "run_id=$run_id" >> $GITHUB_ENV + - name: Download .env Artifact + uses: actions/download-artifact@v4 + with: + name: env-file + github-token: ${{ secrets.GH_PAT }} + repository: sfbrigade/datasci-earthquake + run-id: ${{ env.run_id }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Cache Docker layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + - name: Build Docker Containers + run: docker compose build + + - name: Start Services + run: docker compose up -d + + - name: Run Backend Tests + run: docker compose run backend pytest backend/database/tests + + - name: Run Frontend Tests + run: docker compose run frontend npm test + + - name: Clean Up + run: docker compose down --volumes \ No newline at end of file diff --git a/.github/workflows/env_vars.yml b/.github/workflows/env_vars.yml new file mode 100644 index 00000000..c5df3371 --- /dev/null +++ b/.github/workflows/env_vars.yml @@ -0,0 +1,42 @@ +name: Generate .env File + +on: + workflow_dispatch: + +jobs: + create-envfile: + + runs-on: ubuntu-latest + + steps: + - name: Make envfile + uses: SpicyPizza/create-envfile@v2.0 + with: + envkey_POSTGRES_USER: ${{ secrets.POSTGRES_USER }} + envkey_POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }} + envkey_POSTGRES_DB: ${{ secrets.POSTGRES_DB }} + envkey_POSTGIS_VERSION: ${{ secrets.POSTGIS_VERSION }} + + envkey_FRONTEND_HOST: ${{ secrets.FRONTEND_HOST }} + envkey_DATABASE_URL: ${{ secrets.DATABASE_URL }} + envkey_LOCALHOST_DATABASE_URL: ${{ secrets.LOCALHOST_DATABASE_URL }} + envkey_DATABASE_URL_SQLALCHEMY: ${{ secrets.DATABASE_URL_SQLALCHEMY }} + envkey_LOCALHOST_DATABASE_URL_SQLALCHEMY: ${{ secrets.LOCALHOST_DATABASE_URL_SQLALCHEMY }} + envkey_ENVIRONMENT: ${{ secrets.ENVIRONMENT }} + envkey_SECRET_KEY: ${{ secrets.SECRET_KEY }} + + envkey_NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }} + envkey_NEXT_PUBLIC_MAPBOX_TOKEN: ${{ secrets.NEXT_PUBLIC_MAPBOX_TOKEN }} + envkey_NODE_ENV: ${{ secrets.NODE_ENV }} + + file_name: .env + directory: './' + fail_on_empty: false + sort_keys: false + + - name: Upload .env as Artifact + uses: actions/upload-artifact@v3 + with: + name: env-file + include-hidden-files: true + path: ./.env \ No newline at end of file diff --git a/.gitignore b/.gitignore index c4014525..2d8227d1 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,8 @@ # Python __pycache__/ +pytest_cache/ +mypy_cache/ *.py[cod] bin/ build/ @@ -19,4 +21,5 @@ node_modules /.next/ # Sensitive config -.env.development.local \ No newline at end of file +.env.development.local +.env \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..317b9ba1 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,33 @@ +repos: + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.13.0" + hooks: + - id: mypy + args: ["--config-file", "mypy.ini"] + additional_dependencies: + - "pydantic>=2.9.0" + - "sqlalchemy>=2.0.35" + - "pydantic-settings>=2.5.2" + - "fastapi>=0.114.0" + - "GeoAlchemy2>=0.15.2" + - "pytest>=8.3.3" + - repo: https://github.com/pre-commit/mirrors-eslint + rev: "v9.14.0" + hooks: + - id: eslint + args: + - "--config=.eslintrc.js" + - "--cache" + - "--ignore-pattern=node_modules/*" + entry: npm run lint + language: node + files: \.[jt]sx?$ + additional_dependencies: + - "eslint" + - "eslint-plugin-prettier" + - "eslint-config-prettier" + - "prettier" diff --git a/.prettierignore b/.prettierignore index a7ae198c..d22ecfde 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,3 +3,5 @@ build api .github pull_request_template.md +.mypy_cache/ +__pycache__/ \ No newline at end of file diff --git a/README.md b/README.md index d9ba222c..87397260 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ pnpm dev Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. -The FastApi server will be running on [http://127.0.0.1:8000](http://127.0.0.1:8000) – feel free to change the port in `package.json` (you'll also need to update it in `next.config.js`). +The FastApi server will be running on [http://127.0.0.1:8000](http://127.0.0.1:8000) – feel free to change the port in `package.json` (you'll also need to update it in `next.config.js`). ## Learn More @@ -42,6 +42,31 @@ You can check out [the Next.js GitHub repository](https://github.com/vercel/next --- +# Formatting with a Pre-Commit Hook + +This repository uses `Black` for Python and `ESLint` for JS/TS to enforce code style standards. We also use `MyPy` to perform static type checking on Python code. The pre-commit hook runs the formatters automatically before each commit, helping maintain code consistency across the project. + +## Prerequisites + +- If you haven't already, install pre-commit: + `pip install pre-commit` +- Run the following command to install the pre-commit hooks defined in the configuration file `.pre-commit-config.yaml`: + `pre-commit install` + This command sets up pre-commit to automatically run ESLint, Black, and MyPy before each commit. + +## Usage + +- **Running Black Automatically**: After setup, every time you attempt to commit code, Black will check the staged files and apply formatting if necessary. If files are reformatted, the commit will be stopped, and you’ll need to review the changes before committing again. +- **Bypassing the Hook**: If you want to skip the pre-commit hook for a specific commit, use the --no-verify flag with your commit command: + `git commit -m "your commit message" --no-verify`. + + **Note**: The `--no-verify` flag is helpful in cases where you need to make a quick commit without running the pre-commit checks, but it should be used sparingly to maintain code quality. CI pipeline will fail during the `pull request` action if the code is not formatted. + +- **Running Pre-commit on All Files**: If you want to format all files in the repository, use: + `pre-commit run --all-files` + +--- + # Docker This project uses Docker and Docker Compose to run the application, which includes the frontend, backend, and postgres database. @@ -81,12 +106,21 @@ To stop and shut down the application: # Configuration of environment variables -The `.env.local` file contains environment variables used in the application to configure settings for both the backend and frontend components. If it contains sensitive information, `.env.local` should not be checked into version control for security reasons. Right now there is no sensitive information but later secret management tools will be introduced. +We use GitHub Secrets to store sensitive environment variables. A template `.env.example` file is provided in the repository as a reference. Only users with **write** access to the repository can manually trigger the `Generate .env File` workflow, which creates and uploads the actual `.env` file as an artifact. + +**Note**: Before starting work on the project, make sure to: + +1. Get **write** access to the repository +2. Trigger the `Generate .env File` workflow and download the artifact. +3. Place the artifact in the root folder of the project. Make sure the file is named `.env`. + The file is organized into three main sections: - - **Postgres Environment Variables**. This section contains the credentials to connect to the PostgreSQL database, such as the username, password, and the name of the database. - - **Backend Environment Variables**. These variables are used by the backend (i.e., FastAPI) to configure its behavior and to connect to the database and the frontend application. - - **Frontend Environment Variables**. This section contains the base URL for API calls to the backend and ```NODE_ENV``` variable that determines in which environment the Node.js application is running. -*** + +- **Postgres Environment Variables**. This section contains the credentials to connect to the PostgreSQL database, such as the username, password, and the name of the database. +- **Backend Environment Variables**. These variables are used by the backend (e.g., FastAPI) to configure its behavior and to connect to the database and the frontend application. +- **Frontend Environment Variables**. This section contains the base URL for API calls to the backend and `NODE_ENV` variable that determines in which environment the Node.js application is running. + +--- # Disclaimer diff --git a/app/components/__tests__/search-bar.test.jsx b/app/components/__tests__/search-bar.test.jsx index 16db0b87..183b0e01 100644 --- a/app/components/__tests__/search-bar.test.jsx +++ b/app/components/__tests__/search-bar.test.jsx @@ -4,7 +4,9 @@ import "@testing-library/jest-dom"; jest.mock("@mapbox/search-js-react", () => ({ AddressAutofill: ({ children, onRetrieve }) => ( -
onRetrieve({ features: [{ place_name: "Mock Address" }] })}> +
onRetrieve({ features: [{ place_name: "Mock Address" }] })} + > {children}
), @@ -13,7 +15,7 @@ jest.mock("@mapbox/search-js-react", () => ({ describe("SearchBar Component", () => { it("renders search input and icons correctly", () => { render(); - + const input = screen.getByPlaceholderText("Search San Francisco address"); const searchIcon = screen.getByTestId("search-icon"); const clearIcon = screen.getByTestId("clear-icon"); diff --git a/backend/api/config.py b/backend/api/config.py index e212d822..ef64a019 100644 --- a/backend/api/config.py +++ b/backend/api/config.py @@ -21,10 +21,11 @@ class Settings(BaseSettings): environment: str secret_key: str next_public_api_url: str + next_public_mapbox_token: str node_env: str class Config: - env_file = ".env.local" + env_file = ".env" env_file_encoding = "utf-8" diff --git a/backend/api/models/landslide_zones.py b/backend/api/models/landslide_zones.py index 375b3015..0110e6d5 100644 --- a/backend/api/models/landslide_zones.py +++ b/backend/api/models/landslide_zones.py @@ -1,11 +1,11 @@ """All data of the Landslide Zones table from SFData.""" -from sqlalchemy import String, Integer +from sqlalchemy import String, Integer, Float, DateTime, func from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from geoalchemy2 import Geometry -from datetime import datetime, DateTime +from datetime import datetime from .base import Base @@ -23,17 +23,13 @@ class LandslideZone(Base): sum_shape: Mapped[float] = mapped_column(Float) shape_length: Mapped[float] = mapped_column(Float) created_us: Mapped[str] = mapped_column(String) - created_da: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow - ) + created_da: Mapped[datetime] = mapped_column(DateTime(timezone=True)) last_edited: Mapped[str] = mapped_column(String) - last_edi_1: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow - ) + last_edi_1: Mapped[datetime] = mapped_column(DateTime(timezone=True)) shape_Le_1: Mapped[float] = mapped_column(Float) shape_area: Mapped[float] = mapped_column(Float) update_timestamp: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow + DateTime(timezone=True), server_default=func.now() ) def __repr__(self) -> str: diff --git a/backend/api/models/liquefaction_zones.py b/backend/api/models/liquefaction_zones.py index 95386a0f..22b529bf 100644 --- a/backend/api/models/liquefaction_zones.py +++ b/backend/api/models/liquefaction_zones.py @@ -1,15 +1,15 @@ """All data of the Liquefaction Zones table from SFData.""" -from sqlalchemy import String +from sqlalchemy import String, Float, DateTime, func from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from geoalchemy2 import Geometry -from datetime import datetime, DateTime +from datetime import datetime from .base import Base -class LiquefactionZons(Base): +class LiquefactionZone(Base): """ All data of the Liquefaction Zones table from SFData. Contains multipolygon geometries defining soil liquefaction zones as High (H) or @@ -24,7 +24,7 @@ class LiquefactionZons(Base): shape_length: Mapped[float] = mapped_column(Float) shape_area: Mapped[float] = mapped_column(Float) update_timestamp: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow + DateTime(timezone=True), server_default=func.now() ) def __repr__(self) -> str: diff --git a/backend/api/models/neighborhoods.py b/backend/api/models/neighborhoods.py index 19e80afe..270cd062 100644 --- a/backend/api/models/neighborhoods.py +++ b/backend/api/models/neighborhoods.py @@ -1,13 +1,14 @@ """Neighborhood boundaries in San Francisco""" -from sqlalchemy import String, Integer +from sqlalchemy import String, Integer, DateTime, func from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from geoalchemy2 import Geometry -from datetime import datetime, DateTime +from datetime import datetime from .base import Base + MAPPED_COLUMN_STRING_LENGTH = 200 @@ -22,7 +23,7 @@ class Neighborhood(Base): neighborhood: Mapped[str] = mapped_column(String(MAPPED_COLUMN_STRING_LENGTH)) geometry: Mapped[Geometry] = mapped_column(Geometry("MULTIPOLYGON", srid=4326)) update_timestamp: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utc_now + DateTime(timezone=True), server_default=func.now() ) def __repr__(self) -> str: diff --git a/backend/api/models/seismic_hazard_zones.py b/backend/api/models/seismic_hazard_zones.py index 18c06ab9..4a4ab128 100644 --- a/backend/api/models/seismic_hazard_zones.py +++ b/backend/api/models/seismic_hazard_zones.py @@ -1,11 +1,11 @@ """All data of the Seismic Hazard table from SFData.""" -from sqlalchemy import Integer +from sqlalchemy import Integer, DateTime, func from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from geoalchemy2 import Geometry -from datetime import datetime, DateTime +from datetime import datetime from .base import Base @@ -20,7 +20,7 @@ class SeismicHazardZone(Base): identifier: Mapped[int] = mapped_column(Integer, primary_key=True) geometry: Mapped[Geometry] = mapped_column(Geometry("MULTIPOLYGON", srid=4326)) update_timestamp: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow + DateTime(timezone=True), server_default=func.now() ) def __repr__(self) -> str: diff --git a/backend/api/models/soft_story_properties.py b/backend/api/models/soft_story_properties.py index 118652b2..29a20e3e 100644 --- a/backend/api/models/soft_story_properties.py +++ b/backend/api/models/soft_story_properties.py @@ -1,13 +1,14 @@ """All data of the Soft Story table from SFData.""" -from sqlalchemy import String, Integer +from sqlalchemy import String, Integer, DateTime, func from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from geoalchemy2 import Geometry -from datetime import datetime, DateTime +from datetime import datetime from .base import Base + MAPPED_COLUMN_STRING_LENGTH = 200 @@ -31,14 +32,10 @@ class SoftStoryProperty(Base): status: Mapped[str] = mapped_column(String(MAPPED_COLUMN_STRING_LENGTH)) bos_district: Mapped[int] = mapped_column(Integer) point: Mapped[Geometry] = mapped_column(Geometry("POINT", srid=4326)) - sfdata_as_of: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow - ) - sfdata_loaded_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow - ) + sfdata_as_of: Mapped[datetime] = mapped_column(DateTime(timezone=True)) + sfdata_loaded_at: Mapped[datetime] = mapped_column(DateTime(timezone=True)) update_timestamp: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow + DateTime(timezone=True), server_default=func.now() ) def __repr__(self) -> str: diff --git a/backend/api/models/tsunami.py b/backend/api/models/tsunami.py index a6b21b50..67eae1ba 100644 --- a/backend/api/models/tsunami.py +++ b/backend/api/models/tsunami.py @@ -1,17 +1,17 @@ """Tsunami Risk Zone data""" -from sqlalchemy import String, Integer +from sqlalchemy import String, Integer, Float, DateTime, func from sqlalchemy.orm import DeclarativeBase from sqlalchemy.orm import Mapped from sqlalchemy.orm import mapped_column from geoalchemy2 import Geometry -from datetime import datetime, DateTime +from datetime import datetime MAPPED_COLUMN_STRING_LENGTH = 200 -class TsunamiZones(DeclarativeBase): +class TsunamiZone(DeclarativeBase): """ All data of the Tsunami Hazard table from conservation.ca.gov. """ @@ -30,8 +30,8 @@ class TsunamiZones(DeclarativeBase): # This data is ingested as PolygonZ but should be stored as MultiPolygon geometry: Mapped[Geometry] = mapped_column(Geometry("MULTIPOLYGON", srid=4326)) update_timestamp: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=datetime.utcnow + DateTime(timezone=True), server_default=func.now() ) def __repr__(self) -> str: - return f"" + return f"" diff --git a/backend/api/routers/combined_risk.py b/backend/api/routers/combined_risk.py index 42a9fe91..80a00848 100644 --- a/backend/api/routers/combined_risk.py +++ b/backend/api/routers/combined_risk.py @@ -16,7 +16,7 @@ async def delete_combined_risks(address: str): Remove the combined seismic risks of an address from the database. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.put("/{address}") @@ -24,15 +24,15 @@ async def put_combined_risks(address: str, risks: dict): """ Add the combined seismic risks of an address to the database. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.post("/{address}") -async def put_combined_risks(address: str, risks: dict): +async def post_combined_risks(address: str, risks: dict): """ Add the combined seismic risks of an address to the database. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.get("/{address}") @@ -42,4 +42,4 @@ async def get_combined_risks(address: str) -> dict: three booleans. """ # TODO: Return a dictionary to avoid validation error - pass + return {"message": "This endpoint is not yet implemented"} diff --git a/backend/api/routers/polygons.py b/backend/api/routers/polygons.py index 932fa5d7..fa630f9c 100644 --- a/backend/api/routers/polygons.py +++ b/backend/api/routers/polygons.py @@ -19,7 +19,7 @@ async def delete_polygon(id: int, table_name: str): """ Delete this polygon from that table. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.put("/{id}") @@ -27,7 +27,7 @@ async def put_polygon(id: int, polygon: Polygon, table_name: str): """ Put this polygon into that table. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.post("/") @@ -35,12 +35,12 @@ async def post_polygon(id: int, polygon: Polygon, table_name: str): """ Post this polygon to that table. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.get("/{id}") -async def get_polygon(id: int, table_name: str) -> Polygon: +async def get_polygon(id: int, table_name: str): """ Get this polygon from that table. """ - pass + return {"message": "This endpoint is not yet implemented"} diff --git a/backend/api/routers/reinforced_soft_story.py b/backend/api/routers/reinforced_soft_story.py index e33d9434..a7f925d5 100644 --- a/backend/api/routers/reinforced_soft_story.py +++ b/backend/api/routers/reinforced_soft_story.py @@ -18,7 +18,7 @@ async def delete_reinforced(address: str): Check a small group of reinforced soft stories and raise an exception if the building lacks an original soft story. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.put("/{address}") @@ -32,7 +32,7 @@ async def put_reinforced( Check a small group of reinforced soft stories and raise an exception if the building lacks an original soft story. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.post("/{address}") @@ -50,7 +50,7 @@ async def post_reinforced( @router.get("/{address}") -async def get_reinforced(address: str) -> bool: +async def get_reinforced(address: str): """ Return whether the building at an address, having a soft story, has been reinforced. diff --git a/backend/api/routers/seismic.py b/backend/api/routers/seismic.py index b38ec112..c6d84889 100644 --- a/backend/api/routers/seismic.py +++ b/backend/api/routers/seismic.py @@ -11,10 +11,10 @@ @router.get("/risk/{address}") -async def get_seismic_risk(address: str) -> bool: +async def get_seismic_risk(address: str): """ Return whether this address is in the current seismic risk polygon. """ # TODO: Change return type to boolean to avoid validation error - pass + return {"message": "This endpoint is not yet implemented"} diff --git a/backend/api/routers/soft_story.py b/backend/api/routers/soft_story.py index bbfe070f..1678be2e 100644 --- a/backend/api/routers/soft_story.py +++ b/backend/api/routers/soft_story.py @@ -15,7 +15,7 @@ async def delete_soft_story(address: str): Delete the record that the building at an address has a soft story. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.put("/{address}") @@ -26,7 +26,7 @@ async def put_soft_story( Update whether the building at an address has a soft story to the database. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.post("/{address}") @@ -37,13 +37,13 @@ async def post_soft_story( Add that the building at an address has a soft story to the database. """ - pass + return {"message": "This endpoint is not yet implemented"} @router.get("/{address}") -async def get_soft_story(address: str) -> bool: +async def get_soft_story(address: str): """ Return whether the building at an address has a soft story. """ # TODO: Change return type to boolean to avoid validation error - pass + return {"message": "This endpoint is not yet implemented"} diff --git a/backend/api/routers/tsunami.py b/backend/api/routers/tsunami.py index 0ce41e6c..5f28148e 100644 --- a/backend/api/routers/tsunami.py +++ b/backend/api/routers/tsunami.py @@ -11,10 +11,10 @@ @router.get("/risk/{address}") -async def get_risk(address: str) -> bool: +async def get_risk(address: str): """ Return whether this address is in the current tsunami risk polygon. """ # TODO: Change return type to boolean to avoid validation error - pass + return {"message": "This endpoint is not yet implemented"} diff --git a/backend/api/tests/test_combined_risks.py b/backend/api/tests/test_combined_risks.py index b1a22ff3..3f22d051 100644 --- a/backend/api/tests/test_combined_risks.py +++ b/backend/api/tests/test_combined_risks.py @@ -7,7 +7,7 @@ # Will the .. be stable? from ..main import app -from ..schemas import Polygon +from ..schemas.geo import Polygon @pytest.fixture diff --git a/backend/api/tests/test_polygons.py b/backend/api/tests/test_polygons.py index a21df0ab..666e4f01 100644 --- a/backend/api/tests/test_polygons.py +++ b/backend/api/tests/test_polygons.py @@ -4,8 +4,8 @@ import pytest from fastapi.testclient import TestClient -from ..schemas import Polygon -from ..index import app +from ..schemas.geo import Polygon +from ..main import app @pytest.fixture diff --git a/backend/api/tests/test_seismic.py b/backend/api/tests/test_seismic.py index 88ee2bf1..09fea25c 100644 --- a/backend/api/tests/test_seismic.py +++ b/backend/api/tests/test_seismic.py @@ -3,7 +3,7 @@ # Will the .. be stable? from ..main import app -from ..schemas import Polygon +from ..schemas.geo import Polygon @pytest.fixture diff --git a/backend/api/tests/test_soft_story.py b/backend/api/tests/test_soft_story.py index 083efcf8..2f9591d6 100644 --- a/backend/api/tests/test_soft_story.py +++ b/backend/api/tests/test_soft_story.py @@ -7,7 +7,7 @@ # Will the .. be stable? from ..main import app -from ..schemas import Polygon +from ..schemas.geo import Polygon @pytest.fixture diff --git a/backend/api/tests/test_tsunami.py b/backend/api/tests/test_tsunami.py index 99ea3f33..14290c5a 100644 --- a/backend/api/tests/test_tsunami.py +++ b/backend/api/tests/test_tsunami.py @@ -7,7 +7,7 @@ # Will the .. be stable? from ..main import app -from ..schemas import Polygon +from ..schemas.geo import Polygon @pytest.fixture diff --git a/backend/database/init.sql b/backend/database/init.sql index a175e395..c130bd99 100644 --- a/backend/database/init.sql +++ b/backend/database/init.sql @@ -39,7 +39,7 @@ VALUES (495990, 764765, '495990-764765-0', '46 AUBURN ST', '', 46, '', 'AUBURN', 'ST', '', '', '', 830000, -122.41228, 37.77967, 94133, ST_SetSRID(ST_MakePoint(-122.41228, 37.77967), 4326), 'SUPERVISORIAL DISTRICT 3', 3, 3, 'THREE', 'Aaron Peskin', 'Nob Hill', '', '2024/10/28 03:40:00 AM', '2024/10/28 10:11:26 PM'); -CREATE TABLE IF NOT EXISTS combined_risk ( +CREATE TABLE IF NOT EXISTS combined_risks ( id SERIAL PRIMARY KEY, address VARCHAR(50) NOT NULL UNIQUE, soft_story_risk BOOLEAN NOT NULL DEFAULT FALSE, @@ -49,7 +49,7 @@ CREATE TABLE IF NOT EXISTS combined_risk ( ); -INSERT INTO combined_risk (address, soft_story_risk, seismic_hazard_risk, landslide_risk, liquefaction_risk) VALUES +INSERT INTO combined_risks (address, soft_story_risk, seismic_hazard_risk, landslide_risk, liquefaction_risk) VALUES ('3560 PIERCE ST, SAN FRANCISCO CA', TRUE, FALSE, FALSE, FALSE), ('3484 18TH ST, SAN FRANCISCO CA', TRUE, TRUE, FALSE, TRUE), ('175 ALHAMBRA ST, SAN FRANCISCO CA', FALSE, FALSE, FALSE, FALSE), diff --git a/backend/database/tests/test_database.py b/backend/database/tests/test_database.py index dd4c68d0..025488b6 100644 --- a/backend/database/tests/test_database.py +++ b/backend/database/tests/test_database.py @@ -8,7 +8,7 @@ @pytest.fixture(scope="module") def test_db(): # Create a session using the existing database - engine = create_engine(settings.localhost_database_url) + engine = create_engine(settings.database_url) connection = engine.connect() # Begin a transaction diff --git a/compose.yaml b/compose.yaml index a145bc8d..3a1941c4 100644 --- a/compose.yaml +++ b/compose.yaml @@ -4,7 +4,7 @@ services: context: . dockerfile: Dockerfile env_file: - - .env.local + - .env ports: - 3000:3000 depends_on: @@ -18,7 +18,7 @@ services: context: . dockerfile: backend/Dockerfile env_file: - - .env.local + - .env environment: - PYTHONPATH=/backend ports: @@ -36,14 +36,14 @@ services: container_name: my_postgis_db restart: always env_file: - - .env.local + - .env volumes: - db-data:/var/lib/postgresql/data - ./backend/database:/docker-entrypoint-initdb.d # Mount the SQL scripts directory ports: - 5432:5432 healthcheck: - test: [ "CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}" ] + test: ["CMD-SHELL", "pg_isready -d $${POSTGRES_DB} -U $${POSTGRES_USER}"] interval: 10s timeout: 5s retries: 5 diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..bc0e166f --- /dev/null +++ b/mypy.ini @@ -0,0 +1,7 @@ +# Global options: +[mypy] +plugins = pydantic.mypy, sqlalchemy.ext.mypy.plugin +ignore_missing_imports = True + +[pytest] +addopts = --mypy \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json index 25cf6015..388c9410 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -28,7 +28,9 @@ "**/*.ts", "**/*.tsx", ".next/types/**/*.ts", - "app/components/search-bar.jsx" -, "app/components/__tests__/autofill.test.js", "app/components/__tests__/search-bar.test.jsx" ], + "app/components/search-bar.jsx", + "app/components/__tests__/autofill.test.js", + "app/components/__tests__/search-bar.test.jsx" + ], "exclude": ["node_modules"] }