mirror of
https://github.com/elisiariocouto/leggen.git
synced 2025-12-13 21:52:40 +00:00
Compare commits
88 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e0b8eb2a4 | ||
|
|
f2e05484dc | ||
|
|
37949a4e1f | ||
|
|
abf39abe74 | ||
|
|
957099786c | ||
|
|
2191fe9066 | ||
|
|
bc947183e3 | ||
|
|
16afa1ed8a | ||
|
|
541cb262ee | ||
|
|
eaaea6e459 | ||
|
|
34501f5f0d | ||
|
|
dcac53d181 | ||
|
|
cb2e70e42d | ||
|
|
417b77539f | ||
|
|
947342e196 | ||
|
|
c5fd26cb3e | ||
|
|
6c8b8ed3cc | ||
|
|
abacfd78c8 | ||
|
|
26487cff89 | ||
|
|
46f3f5c498 | ||
|
|
6bce7eb6be | ||
|
|
155c30559f | ||
|
|
ec8ef8346a | ||
|
|
de3da84dff | ||
|
|
47164e8546 | ||
|
|
34e793c75c | ||
|
|
4018b263f2 | ||
|
|
f0fee4fd82 | ||
|
|
91f53b35b1 | ||
|
|
73d6bd32db | ||
|
|
6b2c19778b | ||
|
|
355fa5cfb6 | ||
|
|
7cf471402b | ||
|
|
7480094419 | ||
|
|
d69bd5d115 | ||
|
|
ca29d527c9 | ||
|
|
4ed1bf5abe | ||
|
|
eb73401896 | ||
|
|
33006f8f43 | ||
|
|
6b2cb8a52f | ||
|
|
75ca7f177f | ||
|
|
7efbccfc90 | ||
|
|
e7662bc3dd | ||
|
|
59346334db | ||
|
|
c70a4e5cb8 | ||
|
|
a29bd1ab68 | ||
|
|
a8fb3ad931 | ||
|
|
effabf0695 | ||
|
|
758a3a2257 | ||
|
|
6f5b5dc679 | ||
|
|
6c44beda67 | ||
|
|
ebe0a2fe86 | ||
|
|
3cb38e2e9f | ||
|
|
ad40b2207a | ||
|
|
9402c2535b | ||
|
|
e0351a8771 | ||
|
|
b60ba068cd | ||
|
|
70cfe34476 | ||
|
|
3b1738bae4 | ||
|
|
332d4d51d0 | ||
|
|
7672533e86 | ||
|
|
410e600673 | ||
|
|
798a8f1880 | ||
|
|
7401ca62d2 | ||
|
|
e46634cf27 | ||
|
|
7b48bc080c | ||
|
|
0cb339366c | ||
|
|
3d36198b06 | ||
|
|
2352ea9e58 | ||
|
|
b559376116 | ||
|
|
cb6682ea2e | ||
|
|
6d2f1b7b2f | ||
|
|
fcb0f1edd7 | ||
|
|
0c8f68adfd | ||
|
|
7f71589af1 | ||
|
|
f7ef4b32ca | ||
|
|
ee30bff5ef | ||
|
|
4f2daa7953 | ||
|
|
d8aa1ef90d | ||
|
|
f3ad639a01 | ||
|
|
facf6ac94e | ||
|
|
d8fde49da4 | ||
|
|
460fed3ed0 | ||
|
|
78b08c17ee | ||
|
|
f9ab3ae0a8 | ||
|
|
433d17371e | ||
|
|
de17cf44ec | ||
|
|
91c74b0412 |
22
.claude/settings.local.json
Normal file
22
.claude/settings.local.json
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(mkdir:*)",
|
||||||
|
"Bash(uv sync:*)",
|
||||||
|
"Bash(uv run pytest:*)",
|
||||||
|
"Bash(git commit:*)",
|
||||||
|
"Bash(ruff check:*)",
|
||||||
|
"Bash(git add:*)",
|
||||||
|
"Bash(mypy:*)",
|
||||||
|
"WebFetch(domain:localhost)",
|
||||||
|
"Bash(npm create:*)",
|
||||||
|
"Bash(npm install)",
|
||||||
|
"Bash(npm install:*)",
|
||||||
|
"Bash(npx tailwindcss init:*)",
|
||||||
|
"Bash(./node_modules/.bin/tailwindcss:*)",
|
||||||
|
"Bash(npm run build:*)"
|
||||||
|
],
|
||||||
|
"deny": [],
|
||||||
|
"ask": []
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,5 @@
|
|||||||
.git/
|
.git/
|
||||||
data/
|
data/
|
||||||
docker-compose.dev.yml
|
docker-compose.dev.yml
|
||||||
|
frontend/node_modules/
|
||||||
|
.venv/
|
||||||
|
|||||||
167
.github/workflows/release.yml
vendored
167
.github/workflows/release.yml
vendored
@@ -6,67 +6,126 @@ on:
|
|||||||
- "**"
|
- "**"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish-pypi:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- name: Install uv
|
||||||
- name: Set up Python
|
uses: astral-sh/setup-uv@v5
|
||||||
uses: actions/setup-python@v4
|
- name: "Set up Python"
|
||||||
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version-file: "pyproject.toml"
|
||||||
|
|
||||||
- name: Build Package
|
- name: Build Package
|
||||||
run: |
|
run: uv build
|
||||||
python -m pip install --upgrade pip
|
- name: Store the distribution packages
|
||||||
pip install poetry
|
uses: actions/upload-artifact@v4
|
||||||
poetry config virtualenvs.create false
|
with:
|
||||||
poetry build -f wheel
|
name: python-package-distributions
|
||||||
|
path: dist/
|
||||||
|
|
||||||
|
publish-to-pypi:
|
||||||
|
name: Publish Python distribution to PyPI
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
id-token: write # IMPORTANT: mandatory for trusted publishing
|
||||||
|
needs:
|
||||||
|
- build
|
||||||
|
steps:
|
||||||
|
- name: Download all the dists
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: python-package-distributions
|
||||||
|
path: dist/
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
- name: Publish package
|
- name: Publish package
|
||||||
env:
|
run: uv publish
|
||||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
|
|
||||||
run: poetry publish
|
|
||||||
|
|
||||||
push-docker:
|
push-docker-backend:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: elisiariocouto
|
username: elisiariocouto
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Docker meta
|
- name: Docker meta backend
|
||||||
id: meta
|
id: meta-backend
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
# list of Docker images to use as base name for tags
|
# list of Docker images to use as base name for tags
|
||||||
images: |
|
images: |
|
||||||
elisiariocouto/leggen
|
elisiariocouto/leggen
|
||||||
ghcr.io/elisiariocouto/leggen
|
ghcr.io/elisiariocouto/leggen
|
||||||
# generate Docker tags based on the following events/attributes
|
# generate Docker tags based on the following events/attributes
|
||||||
tags: |
|
tags: |
|
||||||
type=ref,event=tag
|
type=ref,event=tag
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
- name: Build and push
|
type=raw,value=latest
|
||||||
uses: docker/build-push-action@v5
|
- name: Build and push backend
|
||||||
with:
|
uses: docker/build-push-action@v5
|
||||||
context: .
|
with:
|
||||||
platforms: linux/amd64,linux/arm64
|
context: .
|
||||||
push: true
|
file: ./Dockerfile
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
platforms: linux/amd64,linux/arm64
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
push: true
|
||||||
|
tags: ${{ steps.meta-backend.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-backend.outputs.labels }}
|
||||||
|
|
||||||
|
push-docker-frontend:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: elisiariocouto
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Docker meta frontend
|
||||||
|
id: meta-frontend
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
# list of Docker images to use as base name for tags
|
||||||
|
images: |
|
||||||
|
elisiariocouto/leggen
|
||||||
|
ghcr.io/elisiariocouto/leggen
|
||||||
|
# generate Docker tags based on the following events/attributes
|
||||||
|
tags: |
|
||||||
|
type=ref,event=tag,suffix=-frontend
|
||||||
|
type=semver,pattern={{version}},suffix=-frontend
|
||||||
|
type=semver,pattern={{major}}.{{minor}},suffix=-frontend
|
||||||
|
type=raw,value=latest-frontend
|
||||||
|
- name: Build and push frontend
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ./frontend
|
||||||
|
file: ./frontend/Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta-frontend.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-frontend.outputs.labels }}
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -14,7 +14,6 @@ dist/
|
|||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
lib/
|
|
||||||
lib64/
|
lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
@@ -160,3 +159,6 @@ cython_debug/
|
|||||||
#.idea/
|
#.idea/
|
||||||
data/
|
data/
|
||||||
docker-compose.dev.yml
|
docker-compose.dev.yml
|
||||||
|
nocodb/
|
||||||
|
sql/
|
||||||
|
leggen.db
|
||||||
|
|||||||
@@ -1,18 +1,23 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
|
||||||
rev: 24.2.0
|
|
||||||
hooks:
|
|
||||||
- id: black
|
|
||||||
language_version: python3.12
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
# Ruff version.
|
rev: "v0.12.11"
|
||||||
rev: "v0.2.1"
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
- id: ruff-format
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.5.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude: ".*\\.md$"
|
exclude: ".*\\.md$"
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: mypy
|
||||||
|
name: Static type check with mypy
|
||||||
|
entry: uv run mypy leggen leggend --check-untyped-defs
|
||||||
|
files: "^leggen(d)?/.*"
|
||||||
|
language: "system"
|
||||||
|
types: ["python"]
|
||||||
|
always_run: true
|
||||||
|
pass_filenames: false
|
||||||
|
|||||||
41
AGENTS.md
Normal file
41
AGENTS.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Agent Guidelines for Leggen
|
||||||
|
|
||||||
|
## Build/Lint/Test Commands
|
||||||
|
|
||||||
|
### Frontend (React/TypeScript)
|
||||||
|
- **Dev server**: `cd frontend && npm run dev`
|
||||||
|
- **Build**: `cd frontend && npm run build`
|
||||||
|
- **Lint**: `cd frontend && npm run lint`
|
||||||
|
|
||||||
|
### Backend (Python)
|
||||||
|
- **Lint**: `uv run ruff check .`
|
||||||
|
- **Format**: `uv run ruff format .`
|
||||||
|
- **Type check**: `uv run mypy leggen leggend --check-untyped-defs`
|
||||||
|
- **All checks**: `uv run pre-commit run --all-files`
|
||||||
|
- **Run all tests**: `uv run pytest`
|
||||||
|
- **Run single test**: `uv run pytest tests/unit/test_api_accounts.py::TestAccountsAPI::test_get_all_accounts_success -v`
|
||||||
|
- **Run tests by marker**: `uv run pytest -m "api"` or `uv run pytest -m "unit"`
|
||||||
|
|
||||||
|
## Code Style Guidelines
|
||||||
|
|
||||||
|
### Python
|
||||||
|
- **Imports**: Standard library → Third-party → Local (blank lines between groups)
|
||||||
|
- **Naming**: snake_case for variables/functions, PascalCase for classes
|
||||||
|
- **Types**: Use type hints for all function parameters and return values
|
||||||
|
- **Error handling**: Use specific exceptions, loguru for logging
|
||||||
|
- **Path handling**: Use `pathlib.Path` instead of `os.path`
|
||||||
|
- **CLI**: Use Click framework with proper option decorators
|
||||||
|
|
||||||
|
### TypeScript/React
|
||||||
|
- **Imports**: React hooks first, then third-party, then local components/types
|
||||||
|
- **Naming**: PascalCase for components, camelCase for variables/functions
|
||||||
|
- **Types**: Use `import type` for type-only imports, define interfaces/types
|
||||||
|
- **Styling**: Tailwind CSS with `clsx` utility for conditional classes
|
||||||
|
- **Icons**: lucide-react with consistent naming
|
||||||
|
- **Data fetching**: @tanstack/react-query with proper error handling
|
||||||
|
- **Components**: Functional components with hooks, proper TypeScript typing
|
||||||
|
|
||||||
|
### General
|
||||||
|
- **Formatting**: ruff for Python, ESLint for TypeScript
|
||||||
|
- **Commits**: Use conventional commits, run pre-commit hooks before pushing
|
||||||
|
- **Security**: Never log sensitive data, use environment variables for secrets
|
||||||
295
CHANGELOG.md
295
CHANGELOG.md
@@ -1,3 +1,298 @@
|
|||||||
|
|
||||||
|
## 2025.9.0 (2025/09/09)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **cli:** Show transactions without internal ID when using --full. ([46f3f5c4](https://github.com/elisiariocouto/leggen/commit/46f3f5c4984224c3f4b421e1a06dcf44d4f211e0))
|
||||||
|
- Do not install development dependencies. ([73d6bd32](https://github.com/elisiariocouto/leggen/commit/73d6bd32dbc59608ef1472dc65d9e18450f00896))
|
||||||
|
- Implement proper GoCardless authentication and add dev features ([f0fee4fd](https://github.com/elisiariocouto/leggen/commit/f0fee4fd82e1c788614d73fcd0075f5e16976650))
|
||||||
|
- Make internal transcation ID optional. ([6bce7eb6](https://github.com/elisiariocouto/leggen/commit/6bce7eb6be5f9a5286eb27e777fbf83a6b1c5f8d))
|
||||||
|
- Resolve 404 balances endpoint and currency formatting errors ([417b7753](https://github.com/elisiariocouto/leggen/commit/417b77539fc275493d55efb29f92abcea666b210))
|
||||||
|
- Merge account details into balance data to prevent unknown/N/A values ([eaaea6e4](https://github.com/elisiariocouto/leggen/commit/eaaea6e4598e9c81997573e19f4ef1c58ebe320f))
|
||||||
|
- Use account status for balance records instead of hardcoded 'active' ([541cb262](https://github.com/elisiariocouto/leggen/commit/541cb262ee5783eedf2b154c148c28ec89845da5))
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Update README for new web architecture ([4018b263](https://github.com/elisiariocouto/leggen/commit/4018b263f27c2b59af31428d7a0878280a291c85))
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Transform to web architecture with FastAPI backend ([91f53b35](https://github.com/elisiariocouto/leggen/commit/91f53b35b18740869ee9cebfac394db2e12db099))
|
||||||
|
- Add comprehensive test suite with 46 passing tests ([34e793c7](https://github.com/elisiariocouto/leggen/commit/34e793c75c8df1e57ea240b92ccf0843a80c2a14))
|
||||||
|
- Add mypy to pre-commit. ([ec8ef834](https://github.com/elisiariocouto/leggen/commit/ec8ef8346add878f3ff4e8ed928b952d9b5dd584))
|
||||||
|
- Implement database-first architecture to minimize GoCardless API calls ([155c3055](https://github.com/elisiariocouto/leggen/commit/155c30559f4cacd76ef01e50ec29ee436d3f9d56))
|
||||||
|
- Implement dynamic API connection status ([cb2e70e4](https://github.com/elisiariocouto/leggen/commit/cb2e70e42d1122e9c2e5420b095aeb1e55454c24))
|
||||||
|
- Add automatic balance timestamp migration mechanism ([34501f5f](https://github.com/elisiariocouto/leggen/commit/34501f5f0d3b3dff68364b60be77bfb99071b269))
|
||||||
|
- Improve notification filters configuration format ([2191fe90](https://github.com/elisiariocouto/leggen/commit/2191fe906659f4fd22c25b6cb9fbb95c03472f00))
|
||||||
|
- Add notifications view and update branding ([abf39abe](https://github.com/elisiariocouto/leggen/commit/abf39abe74b75d8cb980109fbcbdd940066cc90b))
|
||||||
|
- Make API URL configurable and improve code quality ([37949a4e](https://github.com/elisiariocouto/leggen/commit/37949a4e1f25a2656f6abef75ba942f7b205c130))
|
||||||
|
- Change versioning scheme to calver. ([f2e05484](https://github.com/elisiariocouto/leggen/commit/f2e05484dc688409b6db6bd16858b066d3a16976))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Implement code review suggestions and format code. ([de3da84d](https://github.com/elisiariocouto/leggen/commit/de3da84dffd83e0b232cf76836935a66eb704aee))
|
||||||
|
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
- Remove MongoDB support, simplify to SQLite-only architecture ([47164e85](https://github.com/elisiariocouto/leggen/commit/47164e854600dfcac482449769b1d2e55c842570))
|
||||||
|
- Remove unused amount_threshold and keywords from notification filters ([95709978](https://github.com/elisiariocouto/leggen/commit/957099786cb0e48c9ffbda11b3172ec9fae9ac37))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 2025.9.0 (2025/09/09)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **cli:** Show transactions without internal ID when using --full. ([46f3f5c4](https://github.com/elisiariocouto/leggen/commit/46f3f5c4984224c3f4b421e1a06dcf44d4f211e0))
|
||||||
|
- Do not install development dependencies. ([73d6bd32](https://github.com/elisiariocouto/leggen/commit/73d6bd32dbc59608ef1472dc65d9e18450f00896))
|
||||||
|
- Implement proper GoCardless authentication and add dev features ([f0fee4fd](https://github.com/elisiariocouto/leggen/commit/f0fee4fd82e1c788614d73fcd0075f5e16976650))
|
||||||
|
- Make internal transcation ID optional. ([6bce7eb6](https://github.com/elisiariocouto/leggen/commit/6bce7eb6be5f9a5286eb27e777fbf83a6b1c5f8d))
|
||||||
|
- Resolve 404 balances endpoint and currency formatting errors ([417b7753](https://github.com/elisiariocouto/leggen/commit/417b77539fc275493d55efb29f92abcea666b210))
|
||||||
|
- Merge account details into balance data to prevent unknown/N/A values ([eaaea6e4](https://github.com/elisiariocouto/leggen/commit/eaaea6e4598e9c81997573e19f4ef1c58ebe320f))
|
||||||
|
- Use account status for balance records instead of hardcoded 'active' ([541cb262](https://github.com/elisiariocouto/leggen/commit/541cb262ee5783eedf2b154c148c28ec89845da5))
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Update README for new web architecture ([4018b263](https://github.com/elisiariocouto/leggen/commit/4018b263f27c2b59af31428d7a0878280a291c85))
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Transform to web architecture with FastAPI backend ([91f53b35](https://github.com/elisiariocouto/leggen/commit/91f53b35b18740869ee9cebfac394db2e12db099))
|
||||||
|
- Add comprehensive test suite with 46 passing tests ([34e793c7](https://github.com/elisiariocouto/leggen/commit/34e793c75c8df1e57ea240b92ccf0843a80c2a14))
|
||||||
|
- Add mypy to pre-commit. ([ec8ef834](https://github.com/elisiariocouto/leggen/commit/ec8ef8346add878f3ff4e8ed928b952d9b5dd584))
|
||||||
|
- Implement database-first architecture to minimize GoCardless API calls ([155c3055](https://github.com/elisiariocouto/leggen/commit/155c30559f4cacd76ef01e50ec29ee436d3f9d56))
|
||||||
|
- Implement dynamic API connection status ([cb2e70e4](https://github.com/elisiariocouto/leggen/commit/cb2e70e42d1122e9c2e5420b095aeb1e55454c24))
|
||||||
|
- Add automatic balance timestamp migration mechanism ([34501f5f](https://github.com/elisiariocouto/leggen/commit/34501f5f0d3b3dff68364b60be77bfb99071b269))
|
||||||
|
- Improve notification filters configuration format ([2191fe90](https://github.com/elisiariocouto/leggen/commit/2191fe906659f4fd22c25b6cb9fbb95c03472f00))
|
||||||
|
- Add notifications view and update branding ([abf39abe](https://github.com/elisiariocouto/leggen/commit/abf39abe74b75d8cb980109fbcbdd940066cc90b))
|
||||||
|
- Make API URL configurable and improve code quality ([37949a4e](https://github.com/elisiariocouto/leggen/commit/37949a4e1f25a2656f6abef75ba942f7b205c130))
|
||||||
|
- Change versioning scheme to calver. ([f2e05484](https://github.com/elisiariocouto/leggen/commit/f2e05484dc688409b6db6bd16858b066d3a16976))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Implement code review suggestions and format code. ([de3da84d](https://github.com/elisiariocouto/leggen/commit/de3da84dffd83e0b232cf76836935a66eb704aee))
|
||||||
|
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
- Remove MongoDB support, simplify to SQLite-only architecture ([47164e85](https://github.com/elisiariocouto/leggen/commit/47164e854600dfcac482449769b1d2e55c842570))
|
||||||
|
- Remove unused amount_threshold and keywords from notification filters ([95709978](https://github.com/elisiariocouto/leggen/commit/957099786cb0e48c9ffbda11b3172ec9fae9ac37))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.11 (2025/02/23)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Add workdir to dockerfile last stage. ([355fa5cf](https://github.com/elisiariocouto/leggen/commit/355fa5cfb6ccc4ca225d921cdc2ad77d6bb9b2e6))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.10 (2025/01/14)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **ci:** Install uv before publishing. ([74800944](https://github.com/elisiariocouto/leggen/commit/7480094419697a46515a88a635d4e73820b0d283))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.9 (2025/01/14)
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Setup PyPI Trusted Publishing. ([ca29d527](https://github.com/elisiariocouto/leggen/commit/ca29d527c9e5f9391dfcad6601ad9c585b511b47))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.8 (2025/01/13)
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Migrate from Poetry to uv, bump dependencies and python version. ([33006f8f](https://github.com/elisiariocouto/leggen/commit/33006f8f437da2b9b3c860f22a1fda2a2e5b19a1))
|
||||||
|
- Fix typo in release script. ([eb734018](https://github.com/elisiariocouto/leggen/commit/eb734018964d8281450a8713d0a15688d2cb42bf))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.7 (2024/09/15)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **notifications/telegram:** Escape characters when notifying via Telegram. ([7efbccfc](https://github.com/elisiariocouto/leggen/commit/7efbccfc90ea601da9029909bdd4f21640d73e6a))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Bump dependencies. ([75ca7f17](https://github.com/elisiariocouto/leggen/commit/75ca7f177fb9992395e576ba9038a63e90612e5c))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.6 (2024/08/21)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **commands/status:** Handle exception when no `last_accessed` is returned from GoCardless API. ([c70a4e5c](https://github.com/elisiariocouto/leggen/commit/c70a4e5cb87a19a5a0ed194838e323c6246856ab))
|
||||||
|
- **notifications/telegram:** Escape parenthesis. ([a29bd1ab](https://github.com/elisiariocouto/leggen/commit/a29bd1ab683bc9e068aefb722e9e87bb4fe6aa76))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Update dependencies, use ruff to format code. ([59346334](https://github.com/elisiariocouto/leggen/commit/59346334dbe999ccfd70f6687130aaedb50254fa))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.5 (2024/07/05)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **sync:** Continue on account deactivation. ([758a3a22](https://github.com/elisiariocouto/leggen/commit/758a3a2257c490a92fb0b0673c74d720ad7e87f7))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Bump dependencies. ([effabf06](https://github.com/elisiariocouto/leggen/commit/effabf06954b08e05e3084fdbc54518ea5d947dc))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.4 (2024/06/07)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **sync:** Correctly calculate days left. ([6c44beda](https://github.com/elisiariocouto/leggen/commit/6c44beda672242714bab1100b1f0576cdce255ca))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.3 (2024/06/07)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **sync:** Correctly calculate days left, based on the default 90 days period. ([3cb38e2e](https://github.com/elisiariocouto/leggen/commit/3cb38e2e9fb08e07664caa7daa9aa651262bd213))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.2 (2024/06/07)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **sync:** Use timezone-aware datetime objects. ([9402c253](https://github.com/elisiariocouto/leggen/commit/9402c2535baade84128bdfd0fc314d5225bbd822))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.1 (2024/06/07)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **sync:** Get correct parameter for requisition creation time. ([b60ba068](https://github.com/elisiariocouto/leggen/commit/b60ba068cd7facea5f60fca61bf5845cabf0c2c6))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.6.0 (2024/06/07)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **sync:** Save account balances in new table. ([332d4d51](https://github.com/elisiariocouto/leggen/commit/332d4d51d00286ecec71703aaa39e590f506d2cb))
|
||||||
|
- **sync:** Enable expiration notifications. ([3b1738ba](https://github.com/elisiariocouto/leggen/commit/3b1738bae491f78788b37c32d2e733f7741d41f3))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- **deps:** Bump the pip group across 1 directory with 3 updates ([410e6006](https://github.com/elisiariocouto/leggen/commit/410e600673a1aabcede6f9961c1d10f476ae1077))
|
||||||
|
- **deps:** Update black, ruff and pre-commit to latest versions. ([7672533e](https://github.com/elisiariocouto/leggen/commit/7672533e8626f5cb04e2bf1f00fbe389f6135f5c))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.5.0 (2024/03/29)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **notifications:** Add support for Telegram notifications. ([7401ca62](https://github.com/elisiariocouto/leggen/commit/7401ca62d2ff23c4100ed9d1c8b7450289337553))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Rename docker-compose.yml to compose.yml and remove obsolete 'version' key. ([e46634cf](https://github.com/elisiariocouto/leggen/commit/e46634cf27046bfc8d638a0cd4910a4a8a42648a))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.4.0 (2024/03/28)
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **notifications:** Add support for transaction filter and notifications via Discord. ([0cb33936](https://github.com/elisiariocouto/leggen/commit/0cb339366cc5965223144d2829312d9416d4bc46))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- **deps-dev:** Bump black from 24.2.0 to 24.3.0 ([2352ea9e](https://github.com/elisiariocouto/leggen/commit/2352ea9e58f14250b819e02fa59879e7ff200764))
|
||||||
|
- Update dependencies. ([3d36198b](https://github.com/elisiariocouto/leggen/commit/3d36198b06eebc9d7480eb020d1a713e8637b31a))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.3.0 (2024/03/08)
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Improve README.md. ([cb6682ea](https://github.com/elisiariocouto/leggen/commit/cb6682ea2e7e842806f668fdf4ed34fd0278fd04))
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **commands:** Add new `leggen bank delete` command to delete a bank connection. ([fcb0f1ed](https://github.com/elisiariocouto/leggen/commit/fcb0f1edd7f7ebd556ee31912ba25ee0b01d7edc))
|
||||||
|
- **commands/bank/add:** Add all supported GoCardless country ISO codes. ([0c8f68ad](https://github.com/elisiariocouto/leggen/commit/0c8f68adfddbda08ee90c58e1c69035a0f873a40))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Update dependencies. ([6d2f1b7b](https://github.com/elisiariocouto/leggen/commit/6d2f1b7b2f2bf4e4e6d64804adccd74dfb38dcf6))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.2.3 (2024/03/06)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Print HTTP response body on errors. ([ee30bff5](https://github.com/elisiariocouto/leggen/commit/ee30bff5ef0e40245004e1811a3a62c9caf4f30f))
|
||||||
|
|
||||||
|
|
||||||
|
### Miscellaneous Tasks
|
||||||
|
|
||||||
|
- Update dependencies. ([f7ef4b32](https://github.com/elisiariocouto/leggen/commit/f7ef4b32cae347ae05ae763cb169d6b6c09bde99))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.2.2 (2024/03/01)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **sync:** Pending dates can be null. ([d8aa1ef9](https://github.com/elisiariocouto/leggen/commit/d8aa1ef90d263771b080194adc9e983b1b3d56fe))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.2.1 (2024/02/29)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- Fix compose volumes and dependencies. ([460fed3e](https://github.com/elisiariocouto/leggen/commit/460fed3ed0ca694eab6e80f98392edbe5d5b83fd))
|
||||||
|
- Deduplicate accounts. ([facf6ac9](https://github.com/elisiariocouto/leggen/commit/facf6ac94e533087846fca297520c311a81b6692))
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Add NocoDB information to README.md. ([d8fde49d](https://github.com/elisiariocouto/leggen/commit/d8fde49da4e34457a7564655dd42bb6f0d427b4b))
|
||||||
|
|
||||||
|
|
||||||
|
## 0.2.0 (2024/02/27)
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- **compose:** Fix ofelia configuration, add sync command as the default. ([433d1737](https://github.com/elisiariocouto/leggen/commit/433d17371ead323ca9b793a2dd5782cca598ffcf))
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Improve README.md. ([de17cf44](https://github.com/elisiariocouto/leggen/commit/de17cf44ec5260305de8aa053582744ec69d705f))
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Add periodic sync, handled by ofelia. ([91c74b04](https://github.com/elisiariocouto/leggen/commit/91c74b0412713ef8305fbe7fcf7c53e4cf8948fe))
|
||||||
|
- Change default database engine to SQLite, change schema. ([f9ab3ae0](https://github.com/elisiariocouto/leggen/commit/f9ab3ae0a813f2a512b4f5fa57e0da089f823783))
|
||||||
|
|
||||||
|
|
||||||
## 0.1.1 (2024/02/18)
|
## 0.1.1 (2024/02/18)
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
|||||||
37
Dockerfile
37
Dockerfile
@@ -1,24 +1,33 @@
|
|||||||
FROM python:3.12-alpine as builder
|
FROM python:3.13-alpine AS builder
|
||||||
ARG POETRY_VERSION="1.7.1"
|
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN apk add --no-cache gcc libffi-dev musl-dev && \
|
|
||||||
pip install --no-cache-dir --upgrade pip && \
|
|
||||||
pip install --no-cache-dir -q poetry=="${POETRY_VERSION}"
|
|
||||||
COPY . .
|
|
||||||
RUN poetry config virtualenvs.create false && poetry build -f wheel
|
|
||||||
|
|
||||||
FROM python:3.12-alpine
|
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||||
|
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||||
|
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||||
|
uv sync --locked --no-install-project --no-editable
|
||||||
|
|
||||||
|
COPY . /app
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||||
|
uv sync --locked --no-editable --no-group dev
|
||||||
|
|
||||||
|
FROM python:3.13-alpine
|
||||||
|
|
||||||
LABEL org.opencontainers.image.source="https://github.com/elisiariocouto/leggen"
|
LABEL org.opencontainers.image.source="https://github.com/elisiariocouto/leggen"
|
||||||
LABEL org.opencontainers.image.authors="Elisiário Couto <elisiario@couto.io>"
|
LABEL org.opencontainers.image.authors="Elisiário Couto <elisiario@couto.io>"
|
||||||
LABEL org.opencontainers.image.licenses="MIT"
|
LABEL org.opencontainers.image.licenses="MIT"
|
||||||
LABEL org.opencontainers.image.title="leggen"
|
LABEL org.opencontainers.image.title="Leggend API"
|
||||||
LABEL org.opencontainers.image.description="An Open Banking CLI"
|
LABEL org.opencontainers.image.description="Open Banking API for Leggen"
|
||||||
LABEL org.opencontainers.image.url="https://github.com/elisiariocouto/leggen"
|
LABEL org.opencontainers.image.url="https://github.com/elisiariocouto/leggen"
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=builder /app/dist/ /app/
|
|
||||||
RUN pip --no-cache-dir install leggen*.whl && \
|
COPY --from=builder /app/.venv /app/.venv
|
||||||
rm leggen*.whl
|
|
||||||
ENTRYPOINT ["/usr/local/bin/leggen"]
|
EXPOSE 8000
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s CMD wget -q --spider http://127.0.0.1:8000/api/v1/health || exit 1
|
||||||
|
|
||||||
|
CMD ["/app/.venv/bin/leggend"]
|
||||||
|
|||||||
330
README.md
330
README.md
@@ -1,64 +1,322 @@
|
|||||||
# leggen
|
# 💲 leggen
|
||||||
|
|
||||||
An Open Banking CLI.
|
An Open Banking CLI and API service for managing bank connections and transactions.
|
||||||
|
|
||||||
## Features
|
This tool provides **FastAPI backend service** (`leggend`), a **React Web Interface** and a **command-line interface** (`leggen`) to connect to banks using the GoCardless Open Banking API.
|
||||||
- Connect to banks using GoCardless Open Banking API
|
|
||||||
- List all connected banks and their status
|
|
||||||
- List balances of all connected accounts
|
|
||||||
- List transactions for an account
|
|
||||||
- Sync all transactions with a MongoDB database
|
|
||||||
|
|
||||||
## Installation and Configuration
|
Having your bank data accessible through both CLI and REST API gives you the power to backup, analyze, create reports, and integrate with other applications.
|
||||||
|
|
||||||
In order to use `leggen`, you need to create a GoCardless account. GoCardless is a service that provides access to Open Banking APIs. You can create an account at https://gocardless.com/bank-account-data/.
|
## 🛠️ Technologies
|
||||||
|
|
||||||
After creating an account and getting your API keys, the best way is to use the [compose file](docker-compose.yml). Open the file and adapt it to your needs. Then run the following command:
|
### 🔌 API & Backend
|
||||||
|
- [FastAPI](https://fastapi.tiangolo.com/): High-performance async API backend (`leggend` service)
|
||||||
|
- [GoCardless Open Banking API](https://developer.gocardless.com/bank-account-data/overview): for connecting to banks
|
||||||
|
- [APScheduler](https://apscheduler.readthedocs.io/): Background job scheduling with configurable cron
|
||||||
|
|
||||||
|
### 📦 Storage
|
||||||
|
- [SQLite](https://www.sqlite.org): for storing transactions, simple and easy to use
|
||||||
|
|
||||||
|
### Frontend
|
||||||
|
- [React](https://reactjs.org/): Modern web interface with TypeScript
|
||||||
|
- [Vite](https://vitejs.dev/): Fast build tool and development server
|
||||||
|
- [Tailwind CSS](https://tailwindcss.com/): Utility-first CSS framework
|
||||||
|
- [TanStack Query](https://tanstack.com/query): Powerful data synchronization for React
|
||||||
|
|
||||||
|
## ✨ Features
|
||||||
|
|
||||||
|
### 🎯 Core Banking Features
|
||||||
|
- Connect to banks using GoCardless Open Banking API (30+ EU countries)
|
||||||
|
- List all connected banks and their connection statuses
|
||||||
|
- View balances of all connected accounts
|
||||||
|
- List and filter transactions across all accounts
|
||||||
|
- Support for both booked and pending transactions
|
||||||
|
|
||||||
|
### 🔄 Data Management
|
||||||
|
- Sync all transactions with SQLite database
|
||||||
|
- Background sync scheduling with configurable cron expressions
|
||||||
|
- Automatic transaction deduplication and status tracking
|
||||||
|
- Real-time sync status monitoring
|
||||||
|
|
||||||
|
### 📡 API & Integration
|
||||||
|
- **REST API**: Complete FastAPI backend with comprehensive endpoints
|
||||||
|
- **CLI Interface**: Enhanced command-line tools with new options
|
||||||
|
|
||||||
|
### 🔔 Notifications & Monitoring
|
||||||
|
- Discord and Telegram notifications for filtered transactions
|
||||||
|
- Configurable transaction filters (case-sensitive/insensitive)
|
||||||
|
- Account expiry notifications and status alerts
|
||||||
|
- Comprehensive logging and error handling
|
||||||
|
|
||||||
|
## 🚀 Quick Start
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
1. Create a GoCardless account at [https://gocardless.com/bank-account-data/](https://gocardless.com/bank-account-data/)
|
||||||
|
2. Get your API credentials (key and secret)
|
||||||
|
|
||||||
|
### Installation Options
|
||||||
|
|
||||||
|
#### Option 1: Docker Compose (Recommended)
|
||||||
|
The easiest way to get started is with Docker Compose, which includes both the React frontend and FastAPI backend:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker-compose up -d
|
# Clone the repository
|
||||||
|
git clone https://github.com/elisiariocouto/leggen.git
|
||||||
|
cd leggen
|
||||||
|
|
||||||
|
# Create your configuration
|
||||||
|
mkdir -p leggen && cp config.example.toml leggen/config.toml
|
||||||
|
# Edit leggen/config.toml with your GoCardless credentials
|
||||||
|
|
||||||
|
# Start all services (frontend + backend)
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# Access the web interface at http://localhost:3000
|
||||||
|
# API is available at http://localhost:8000
|
||||||
```
|
```
|
||||||
|
|
||||||
The leggen container will exit, this is expected. Now you can run the following command to create the configuration file:
|
#### Option 2: Local Development
|
||||||
|
For development or local installation:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker compose run leggen init
|
# Install with uv (recommended) or pip
|
||||||
|
uv sync # or pip install -e .
|
||||||
|
|
||||||
|
# Start the API service
|
||||||
|
uv run leggend --reload # Development mode with auto-reload
|
||||||
|
|
||||||
|
# Use the CLI (in another terminal)
|
||||||
|
uv run leggen --help
|
||||||
```
|
```
|
||||||
|
|
||||||
Now you need to connect your bank accounts. Run the following command and follow the instructions:
|
### Configuration
|
||||||
|
|
||||||
|
Create a configuration file at `~/.config/leggen/config.toml`:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[gocardless]
|
||||||
|
key = "your-api-key"
|
||||||
|
secret = "your-secret-key"
|
||||||
|
url = "https://bankaccountdata.gocardless.com/api/v2"
|
||||||
|
|
||||||
|
[database]
|
||||||
|
sqlite = true
|
||||||
|
|
||||||
|
# Optional: Background sync scheduling
|
||||||
|
[scheduler.sync]
|
||||||
|
enabled = true
|
||||||
|
hour = 3 # 3 AM
|
||||||
|
minute = 0
|
||||||
|
# cron = "0 3 * * *" # Alternative: use cron expression
|
||||||
|
|
||||||
|
# Optional: Discord notifications
|
||||||
|
[notifications.discord]
|
||||||
|
webhook = "https://discord.com/api/webhooks/..."
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
# Optional: Telegram notifications
|
||||||
|
[notifications.telegram]
|
||||||
|
token = "your-bot-token"
|
||||||
|
chat_id = 12345
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
# Optional: Transaction filters for notifications
|
||||||
|
[filters]
|
||||||
|
case-insensitive = ["salary", "utility"]
|
||||||
|
case-sensitive = ["SpecificStore"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## 📖 Usage
|
||||||
|
|
||||||
|
### API Service (`leggend`)
|
||||||
|
|
||||||
|
Start the FastAPI backend service:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker compose run leggen bank add
|
# Production mode
|
||||||
|
leggend
|
||||||
|
|
||||||
|
# Development mode with auto-reload
|
||||||
|
leggend --reload
|
||||||
|
|
||||||
|
# Custom host and port
|
||||||
|
leggend --host 127.0.0.1 --port 8080
|
||||||
```
|
```
|
||||||
|
|
||||||
To sync all transactions with the database, run the following command:
|
**API Documentation**: Visit `http://localhost:8000/docs` for interactive API documentation.
|
||||||
|
|
||||||
|
### CLI Commands (`leggen`)
|
||||||
|
|
||||||
|
#### Basic Commands
|
||||||
|
```bash
|
||||||
|
# Check connection status
|
||||||
|
leggen status
|
||||||
|
|
||||||
|
# Connect to a new bank
|
||||||
|
leggen bank add
|
||||||
|
|
||||||
|
# View account balances
|
||||||
|
leggen balances
|
||||||
|
|
||||||
|
# List recent transactions
|
||||||
|
leggen transactions --limit 20
|
||||||
|
|
||||||
|
# View detailed transactions
|
||||||
|
leggen transactions --full
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Sync Operations
|
||||||
|
```bash
|
||||||
|
# Start background sync
|
||||||
|
leggen sync
|
||||||
|
|
||||||
|
# Synchronous sync (wait for completion)
|
||||||
|
leggen sync --wait
|
||||||
|
|
||||||
|
# Force sync (override running sync)
|
||||||
|
leggen sync --force --wait
|
||||||
|
```
|
||||||
|
|
||||||
|
#### API Integration
|
||||||
|
```bash
|
||||||
|
# Use custom API URL
|
||||||
|
leggen --api-url http://localhost:8080 status
|
||||||
|
|
||||||
|
# Set via environment variable
|
||||||
|
export LEGGEND_API_URL=http://localhost:8080
|
||||||
|
leggen status
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Usage
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ docker compose run leggen sync
|
# Start all services (frontend + backend)
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# View service status
|
||||||
|
docker compose ps
|
||||||
|
|
||||||
|
# Check logs
|
||||||
|
docker compose logs frontend
|
||||||
|
docker compose logs leggend
|
||||||
|
|
||||||
|
# Access the web interface at http://localhost:3000
|
||||||
|
# API documentation at http://localhost:8000/docs
|
||||||
|
|
||||||
|
# Stop all services
|
||||||
|
docker compose down
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## 🔌 API Endpoints
|
||||||
|
|
||||||
```
|
The FastAPI backend provides comprehensive REST endpoints:
|
||||||
$ leggen --help
|
|
||||||
Usage: leggen [OPTIONS] COMMAND [ARGS]...
|
|
||||||
|
|
||||||
Leggen: An Open Banking CLI
|
### Banks & Connections
|
||||||
|
- `GET /api/v1/banks/institutions?country=PT` - List available banks
|
||||||
|
- `POST /api/v1/banks/connect` - Create bank connection
|
||||||
|
- `GET /api/v1/banks/status` - Connection status
|
||||||
|
- `GET /api/v1/banks/countries` - Supported countries
|
||||||
|
|
||||||
Options:
|
### Accounts & Balances
|
||||||
--version Show the version and exit.
|
- `GET /api/v1/accounts` - List all accounts
|
||||||
-h, --help Show this message and exit.
|
- `GET /api/v1/accounts/{id}` - Account details
|
||||||
|
- `GET /api/v1/accounts/{id}/balances` - Account balances
|
||||||
|
- `GET /api/v1/accounts/{id}/transactions` - Account transactions
|
||||||
|
|
||||||
Command Groups:
|
### Transactions
|
||||||
bank Manage banks connections
|
- `GET /api/v1/transactions` - All transactions with filtering
|
||||||
|
- `GET /api/v1/transactions/stats` - Transaction statistics
|
||||||
|
|
||||||
Commands:
|
### Sync & Scheduling
|
||||||
balances List balances of all connected accounts
|
- `POST /api/v1/sync` - Trigger background sync
|
||||||
init Create configuration file
|
- `POST /api/v1/sync/now` - Synchronous sync
|
||||||
status List all connected banks and their status
|
- `GET /api/v1/sync/status` - Sync status
|
||||||
sync Sync all transactions with database
|
- `GET/PUT /api/v1/sync/scheduler` - Scheduler configuration
|
||||||
transactions List transactions for an account
|
|
||||||
|
### Notifications
|
||||||
|
- `GET/PUT /api/v1/notifications/settings` - Manage notifications
|
||||||
|
- `POST /api/v1/notifications/test` - Test notifications
|
||||||
|
|
||||||
|
## 🛠️ Development
|
||||||
|
|
||||||
|
### Local Development Setup
|
||||||
|
```bash
|
||||||
|
# Clone and setup
|
||||||
|
git clone https://github.com/elisiariocouto/leggen.git
|
||||||
|
cd leggen
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
uv sync
|
||||||
|
|
||||||
|
# Start API service with auto-reload
|
||||||
|
uv run leggend --reload
|
||||||
|
|
||||||
|
# Use CLI commands
|
||||||
|
uv run leggen status
|
||||||
```
|
```
|
||||||
|
|
||||||
## Caveats
|
### Testing
|
||||||
- This project is still in early development.
|
|
||||||
|
Run the comprehensive test suite with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
uv run pytest
|
||||||
|
|
||||||
|
# Run unit tests only
|
||||||
|
uv run pytest tests/unit/
|
||||||
|
|
||||||
|
# Run with verbose output
|
||||||
|
uv run pytest tests/unit/ -v
|
||||||
|
|
||||||
|
# Run specific test files
|
||||||
|
uv run pytest tests/unit/test_config.py -v
|
||||||
|
uv run pytest tests/unit/test_scheduler.py -v
|
||||||
|
uv run pytest tests/unit/test_api_banks.py -v
|
||||||
|
|
||||||
|
# Run tests by markers
|
||||||
|
uv run pytest -m unit # Unit tests
|
||||||
|
uv run pytest -m api # API endpoint tests
|
||||||
|
uv run pytest -m cli # CLI tests
|
||||||
|
```
|
||||||
|
|
||||||
|
The test suite includes:
|
||||||
|
- **Configuration management tests** - TOML config loading/saving
|
||||||
|
- **API endpoint tests** - FastAPI route testing with mocked dependencies
|
||||||
|
- **CLI API client tests** - HTTP client integration testing
|
||||||
|
- **Background scheduler tests** - APScheduler job management
|
||||||
|
- **Mock data and fixtures** - Realistic test data for banks, accounts, transactions
|
||||||
|
|
||||||
|
### Code Structure
|
||||||
|
```
|
||||||
|
leggen/ # CLI application
|
||||||
|
├── commands/ # CLI command implementations
|
||||||
|
├── utils/ # Shared utilities
|
||||||
|
└── api_client.py # API client for leggend service
|
||||||
|
|
||||||
|
leggend/ # FastAPI backend service
|
||||||
|
├── api/ # API routes and models
|
||||||
|
├── services/ # Business logic
|
||||||
|
├── background/ # Background job scheduler
|
||||||
|
└── main.py # FastAPI application
|
||||||
|
|
||||||
|
tests/ # Test suite
|
||||||
|
├── conftest.py # Shared test fixtures
|
||||||
|
└── unit/ # Unit tests
|
||||||
|
├── test_config.py # Configuration tests
|
||||||
|
├── test_scheduler.py # Background scheduler tests
|
||||||
|
├── test_api_banks.py # Banks API tests
|
||||||
|
├── test_api_accounts.py # Accounts API tests
|
||||||
|
└── test_api_client.py # CLI API client tests
|
||||||
|
```
|
||||||
|
|
||||||
|
### Contributing
|
||||||
|
1. Fork the repository
|
||||||
|
2. Create a feature branch
|
||||||
|
3. Make your changes with tests
|
||||||
|
4. Submit a pull request
|
||||||
|
|
||||||
|
## ⚠️ Notes
|
||||||
|
- This project is in active development
|
||||||
|
- GoCardless API rate limits apply
|
||||||
|
- Some banks may require additional authorization steps
|
||||||
|
- Docker images are automatically built and published on releases
|
||||||
|
|||||||
25
compose.yml
Normal file
25
compose.yml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
services:
|
||||||
|
# React frontend service
|
||||||
|
frontend:
|
||||||
|
build:
|
||||||
|
context: ./frontend
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
restart: "unless-stopped"
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:3000:80"
|
||||||
|
environment:
|
||||||
|
- API_BACKEND_URL=${API_BACKEND_URL:-http://leggend:8000}
|
||||||
|
depends_on:
|
||||||
|
leggend:
|
||||||
|
condition: service_healthy
|
||||||
|
|
||||||
|
# FastAPI backend service
|
||||||
|
leggend:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
restart: "unless-stopped"
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:8000:8000"
|
||||||
|
volumes:
|
||||||
|
- "./data:/root/.config/leggen"
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
version: '3.1'
|
|
||||||
|
|
||||||
services:
|
|
||||||
mongo:
|
|
||||||
image: mongo:7
|
|
||||||
restart: "unless-stopped"
|
|
||||||
# If you want to expose the mongodb port to the host, uncomment the following lines
|
|
||||||
# ports:
|
|
||||||
# - 127.0.0.1:27017:27017
|
|
||||||
volumes:
|
|
||||||
- "./data:/data/db"
|
|
||||||
environment:
|
|
||||||
MONGO_INITDB_ROOT_USERNAME: "leggen"
|
|
||||||
MONGO_INITDB_ROOT_PASSWORD: "changeme"
|
|
||||||
|
|
||||||
leggen:
|
|
||||||
image: elisiariocouto/leggen:latest
|
|
||||||
restart: "no"
|
|
||||||
environment:
|
|
||||||
LEGGEN_MONGO_URI: mongodb://leggen:changeme@mongo:27017/
|
|
||||||
LEGGEN_GC_API_KEY: "changeme"
|
|
||||||
LEGGEN_GC_API_SECRET: "changeme"
|
|
||||||
volumes:
|
|
||||||
- "./leggen:/root/.config/leggen"
|
|
||||||
depends_on:
|
|
||||||
- mongo
|
|
||||||
|
|
||||||
# If you want to have an admin interface for your mongodb, uncomment the following lines
|
|
||||||
# mongo-express:
|
|
||||||
# image: mongo-express
|
|
||||||
# restart: "unless-stopped"
|
|
||||||
# # By default, we are exposing the mongo-express port to the host
|
|
||||||
# ports:
|
|
||||||
# - 127.0.0.1:8081:8081
|
|
||||||
# environment:
|
|
||||||
# ME_CONFIG_MONGODB_URL: "mongodb://leggen:changeme@mongo:27017/"
|
|
||||||
# ME_CONFIG_BASICAUTH_USERNAME: ""
|
|
||||||
# depends_on:
|
|
||||||
# - mongo
|
|
||||||
24
frontend/.gitignore
vendored
Normal file
24
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
pnpm-debug.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
|
||||||
|
node_modules
|
||||||
|
dist
|
||||||
|
dist-ssr
|
||||||
|
*.local
|
||||||
|
|
||||||
|
# Editor directories and files
|
||||||
|
.vscode/*
|
||||||
|
!.vscode/extensions.json
|
||||||
|
.idea
|
||||||
|
.DS_Store
|
||||||
|
*.suo
|
||||||
|
*.ntvs*
|
||||||
|
*.njsproj
|
||||||
|
*.sln
|
||||||
|
*.sw?
|
||||||
34
frontend/Dockerfile
Normal file
34
frontend/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Build stage
|
||||||
|
FROM node:20-alpine AS builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy package files
|
||||||
|
COPY package*.json ./
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
RUN npm i
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Build the application
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
# Production stage
|
||||||
|
FROM nginx:alpine
|
||||||
|
|
||||||
|
# Copy built application from builder stage
|
||||||
|
COPY --from=builder /app/dist /usr/share/nginx/html
|
||||||
|
|
||||||
|
# Copy server configuration template
|
||||||
|
COPY default.conf.template /etc/nginx/templates/default.conf.template
|
||||||
|
|
||||||
|
# Set default API backend URL (can be overridden at runtime)
|
||||||
|
ENV API_BACKEND_URL=http://leggend:8000
|
||||||
|
|
||||||
|
# Expose port 80
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
# Start nginx
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
124
frontend/README.md
Normal file
124
frontend/README.md
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# Leggen Frontend
|
||||||
|
|
||||||
|
A modern React dashboard for the Leggen Open Banking CLI tool. This frontend provides a user-friendly interface to view bank accounts, transactions, and balances.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- **Modern Dashboard**: Clean, responsive interface built with React and TypeScript
|
||||||
|
- **Bank Accounts Overview**: View all connected bank accounts with real-time balances
|
||||||
|
- **Transaction Management**: Browse, search, and filter transactions across all accounts
|
||||||
|
- **Responsive Design**: Works seamlessly on desktop, tablet, and mobile devices
|
||||||
|
- **Real-time Data**: Powered by React Query for efficient data fetching and caching
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Node.js 18+ and npm
|
||||||
|
- Leggen API server running (configurable via environment variables)
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
1. **Install dependencies:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Start the development server:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Open your browser to:**
|
||||||
|
```
|
||||||
|
http://localhost:5173
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Scripts
|
||||||
|
|
||||||
|
- `npm run dev` - Start development server
|
||||||
|
- `npm run build` - Build for production
|
||||||
|
- `npm run preview` - Preview production build
|
||||||
|
- `npm run lint` - Run ESLint
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Key Technologies
|
||||||
|
|
||||||
|
- **React 18** - Modern React with hooks and concurrent features
|
||||||
|
- **TypeScript** - Type-safe JavaScript development
|
||||||
|
- **Vite** - Fast build tool and development server
|
||||||
|
- **Tailwind CSS** - Utility-first CSS framework
|
||||||
|
- **React Query** - Data fetching and caching
|
||||||
|
- **Axios** - HTTP client for API calls
|
||||||
|
- **Lucide React** - Modern icon library
|
||||||
|
|
||||||
|
### Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
├── components/ # React components
|
||||||
|
│ ├── Dashboard.tsx # Main dashboard layout
|
||||||
|
│ ├── AccountsOverview.tsx
|
||||||
|
│ └── TransactionsList.tsx
|
||||||
|
├── lib/ # Utilities and API client
|
||||||
|
│ ├── api.ts # API client and endpoints
|
||||||
|
│ └── utils.ts # Helper functions
|
||||||
|
├── types/ # TypeScript type definitions
|
||||||
|
│ └── api.ts # API response types
|
||||||
|
└── App.tsx # Main application component
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Integration
|
||||||
|
|
||||||
|
The frontend connects to the Leggen API server (configurable via environment variables). The API client handles:
|
||||||
|
|
||||||
|
- Account retrieval and management
|
||||||
|
- Transaction fetching with filtering
|
||||||
|
- Balance information
|
||||||
|
- Error handling and loading states
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### API URL Configuration
|
||||||
|
|
||||||
|
The frontend supports configurable API URLs through environment variables:
|
||||||
|
|
||||||
|
**Development:**
|
||||||
|
|
||||||
|
- Set `VITE_API_URL` to call external APIs during development
|
||||||
|
- Example: `VITE_API_URL=https://staging-api.example.com npm run dev`
|
||||||
|
|
||||||
|
**Production:**
|
||||||
|
|
||||||
|
- Uses relative URLs (`/api/v1`) that nginx proxies to the backend
|
||||||
|
- Configure nginx proxy target via `API_BACKEND_URL` environment variable
|
||||||
|
- Default: `http://leggend:8000`
|
||||||
|
|
||||||
|
**Docker Compose:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Override API backend URL
|
||||||
|
API_BACKEND_URL=https://prod-api.example.com docker-compose up
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
The dashboard is designed to work with the Leggen CLI tool's API endpoints. Make sure your Leggen server is running before starting the frontend development server.
|
||||||
|
|
||||||
|
### Adding New Features
|
||||||
|
|
||||||
|
1. Define TypeScript types in `src/types/api.ts`
|
||||||
|
2. Add API methods to `src/lib/api.ts`
|
||||||
|
3. Create React components in `src/components/`
|
||||||
|
4. Use React Query for data fetching and state management
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
Build the application for production:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
The built files will be in the `dist/` directory, ready to be served by any static web server.
|
||||||
33
frontend/default.conf.template
Normal file
33
frontend/default.conf.template
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name localhost;
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
|
||||||
|
# Enable gzip compression
|
||||||
|
gzip on;
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_min_length 1024;
|
||||||
|
gzip_proxied expired no-cache no-store private auth;
|
||||||
|
gzip_types text/plain text/css text/xml text/javascript application/javascript application/xml+rss application/json;
|
||||||
|
|
||||||
|
# Handle client-side routing
|
||||||
|
location / {
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
}
|
||||||
|
|
||||||
|
# API proxy to backend (configurable via API_BACKEND_URL env var)
|
||||||
|
location /api/ {
|
||||||
|
proxy_pass ${API_BACKEND_URL};
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Cache static assets
|
||||||
|
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ {
|
||||||
|
expires 1y;
|
||||||
|
add_header Cache-Control "public, immutable";
|
||||||
|
}
|
||||||
|
}
|
||||||
23
frontend/eslint.config.js
Normal file
23
frontend/eslint.config.js
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import js from "@eslint/js";
|
||||||
|
import globals from "globals";
|
||||||
|
import reactHooks from "eslint-plugin-react-hooks";
|
||||||
|
import reactRefresh from "eslint-plugin-react-refresh";
|
||||||
|
import tseslint from "typescript-eslint";
|
||||||
|
import { globalIgnores } from "eslint/config";
|
||||||
|
|
||||||
|
export default tseslint.config([
|
||||||
|
globalIgnores(["dist"]),
|
||||||
|
{
|
||||||
|
files: ["**/*.{ts,tsx}"],
|
||||||
|
extends: [
|
||||||
|
js.configs.recommended,
|
||||||
|
tseslint.configs.recommended,
|
||||||
|
reactHooks.configs["recommended-latest"],
|
||||||
|
reactRefresh.configs.vite,
|
||||||
|
],
|
||||||
|
languageOptions: {
|
||||||
|
ecmaVersion: 2020,
|
||||||
|
globals: globals.browser,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
13
frontend/index.html
Normal file
13
frontend/index.html
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<title>Leggen</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div id="root"></div>
|
||||||
|
<script type="module" src="/src/main.tsx"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
4629
frontend/package-lock.json
generated
Normal file
4629
frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
37
frontend/package.json
Normal file
37
frontend/package.json
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
{
|
||||||
|
"name": "frontend",
|
||||||
|
"private": true,
|
||||||
|
"version": "0.0.0",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite",
|
||||||
|
"build": "tsc -b && vite build",
|
||||||
|
"lint": "eslint .",
|
||||||
|
"preview": "vite preview"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@tailwindcss/forms": "^0.5.10",
|
||||||
|
"@tanstack/react-query": "^5.87.1",
|
||||||
|
"autoprefixer": "^10.4.21",
|
||||||
|
"axios": "^1.11.0",
|
||||||
|
"clsx": "^2.1.1",
|
||||||
|
"lucide-react": "^0.542.0",
|
||||||
|
"postcss": "^8.5.6",
|
||||||
|
"react": "^19.1.1",
|
||||||
|
"react-dom": "^19.1.1",
|
||||||
|
"tailwindcss": "^3.4.17"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^9.33.0",
|
||||||
|
"@types/react": "^19.1.10",
|
||||||
|
"@types/react-dom": "^19.1.7",
|
||||||
|
"@vitejs/plugin-react": "^5.0.0",
|
||||||
|
"eslint": "^9.33.0",
|
||||||
|
"eslint-plugin-react-hooks": "^5.2.0",
|
||||||
|
"eslint-plugin-react-refresh": "^0.4.20",
|
||||||
|
"globals": "^16.3.0",
|
||||||
|
"typescript": "~5.8.3",
|
||||||
|
"typescript-eslint": "^8.39.1",
|
||||||
|
"vite": "^7.1.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
6
frontend/postcss.config.js
Normal file
6
frontend/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export default {
|
||||||
|
plugins: {
|
||||||
|
tailwindcss: {},
|
||||||
|
autoprefixer: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
4
frontend/public/favicon.svg
Normal file
4
frontend/public/favicon.svg
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" fill="none">
|
||||||
|
<rect width="32" height="32" rx="6" fill="#3B82F6"/>
|
||||||
|
<path d="M8 24V8h6c2.2 0 4 1.8 4 4v4c0 2.2-1.8 4-4 4H12v4H8zm4-8h2c.6 0 1-.4 1-1v-2c0-.6-.4-1-1-1h-2v4z" fill="white"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 257 B |
1
frontend/public/vite.svg
Normal file
1
frontend/public/vite.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||||
|
After Width: | Height: | Size: 1.5 KiB |
1
frontend/src/App.css
Normal file
1
frontend/src/App.css
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/* Additional styles if needed */
|
||||||
23
frontend/src/App.tsx
Normal file
23
frontend/src/App.tsx
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||||
|
import Dashboard from "./components/Dashboard";
|
||||||
|
|
||||||
|
const queryClient = new QueryClient({
|
||||||
|
defaultOptions: {
|
||||||
|
queries: {
|
||||||
|
refetchOnWindowFocus: false,
|
||||||
|
retry: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
function App() {
|
||||||
|
return (
|
||||||
|
<QueryClientProvider client={queryClient}>
|
||||||
|
<div className="min-h-screen bg-gray-50">
|
||||||
|
<Dashboard />
|
||||||
|
</div>
|
||||||
|
</QueryClientProvider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default App;
|
||||||
1
frontend/src/assets/react.svg
Normal file
1
frontend/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||||
|
After Width: | Height: | Size: 4.0 KiB |
214
frontend/src/components/AccountsOverview.tsx
Normal file
214
frontend/src/components/AccountsOverview.tsx
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
CreditCard,
|
||||||
|
TrendingUp,
|
||||||
|
TrendingDown,
|
||||||
|
Building2,
|
||||||
|
RefreshCw,
|
||||||
|
AlertCircle,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { apiClient } from "../lib/api";
|
||||||
|
import { formatCurrency, formatDate } from "../lib/utils";
|
||||||
|
import LoadingSpinner from "./LoadingSpinner";
|
||||||
|
import type { Account, Balance } from "../types/api";
|
||||||
|
|
||||||
|
export default function AccountsOverview() {
|
||||||
|
const {
|
||||||
|
data: accounts,
|
||||||
|
isLoading: accountsLoading,
|
||||||
|
error: accountsError,
|
||||||
|
refetch: refetchAccounts,
|
||||||
|
} = useQuery<Account[]>({
|
||||||
|
queryKey: ["accounts"],
|
||||||
|
queryFn: apiClient.getAccounts,
|
||||||
|
});
|
||||||
|
|
||||||
|
const { data: balances } = useQuery<Balance[]>({
|
||||||
|
queryKey: ["balances"],
|
||||||
|
queryFn: () => apiClient.getBalances(),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (accountsLoading) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<LoadingSpinner message="Loading accounts..." />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accountsError) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-center text-center">
|
||||||
|
<div>
|
||||||
|
<AlertCircle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
Failed to load accounts
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600 mb-4">
|
||||||
|
Unable to connect to the Leggen API. Please check your
|
||||||
|
configuration and ensure the API server is running.
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
onClick={() => refetchAccounts()}
|
||||||
|
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4 mr-2" />
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalBalance =
|
||||||
|
accounts?.reduce((sum, account) => {
|
||||||
|
// Get the first available balance from the balances array
|
||||||
|
const primaryBalance = account.balances?.[0]?.amount || 0;
|
||||||
|
return sum + primaryBalance;
|
||||||
|
}, 0) || 0;
|
||||||
|
const totalAccounts = accounts?.length || 0;
|
||||||
|
const uniqueBanks = new Set(accounts?.map((acc) => acc.institution_id) || [])
|
||||||
|
.size;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Summary Cards */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-3 gap-6">
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<p className="text-sm font-medium text-gray-600">Total Balance</p>
|
||||||
|
<p className="text-2xl font-bold text-gray-900">
|
||||||
|
{formatCurrency(totalBalance)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="p-3 bg-green-100 rounded-full">
|
||||||
|
<TrendingUp className="h-6 w-6 text-green-600" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<p className="text-sm font-medium text-gray-600">
|
||||||
|
Total Accounts
|
||||||
|
</p>
|
||||||
|
<p className="text-2xl font-bold text-gray-900">
|
||||||
|
{totalAccounts}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="p-3 bg-blue-100 rounded-full">
|
||||||
|
<CreditCard className="h-6 w-6 text-blue-600" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div>
|
||||||
|
<p className="text-sm font-medium text-gray-600">
|
||||||
|
Connected Banks
|
||||||
|
</p>
|
||||||
|
<p className="text-2xl font-bold text-gray-900">{uniqueBanks}</p>
|
||||||
|
</div>
|
||||||
|
<div className="p-3 bg-purple-100 rounded-full">
|
||||||
|
<Building2 className="h-6 w-6 text-purple-600" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Accounts List */}
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<div className="px-6 py-4 border-b border-gray-200">
|
||||||
|
<h3 className="text-lg font-medium text-gray-900">Bank Accounts</h3>
|
||||||
|
<p className="text-sm text-gray-600">
|
||||||
|
Manage your connected bank accounts
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!accounts || accounts.length === 0 ? (
|
||||||
|
<div className="p-6 text-center">
|
||||||
|
<CreditCard className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
No accounts found
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600">
|
||||||
|
Connect your first bank account to get started with Leggen.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="divide-y divide-gray-200">
|
||||||
|
{accounts.map((account) => {
|
||||||
|
// Get balance from account's balances array or fallback to balances query
|
||||||
|
const accountBalance = account.balances?.[0];
|
||||||
|
const fallbackBalance = balances?.find(
|
||||||
|
(b) => b.account_id === account.id,
|
||||||
|
);
|
||||||
|
const balance =
|
||||||
|
accountBalance?.amount || fallbackBalance?.balance_amount || 0;
|
||||||
|
const currency =
|
||||||
|
accountBalance?.currency ||
|
||||||
|
fallbackBalance?.currency ||
|
||||||
|
account.currency ||
|
||||||
|
"EUR";
|
||||||
|
const isPositive = balance >= 0;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={account.id}
|
||||||
|
className="p-6 hover:bg-gray-50 transition-colors"
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="flex items-center space-x-4">
|
||||||
|
<div className="p-3 bg-gray-100 rounded-full">
|
||||||
|
<Building2 className="h-6 w-6 text-gray-600" />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h4 className="text-lg font-medium text-gray-900">
|
||||||
|
{account.name || "Unnamed Account"}
|
||||||
|
</h4>
|
||||||
|
<p className="text-sm text-gray-600">
|
||||||
|
{account.institution_id} • {account.status}
|
||||||
|
</p>
|
||||||
|
{account.iban && (
|
||||||
|
<p className="text-xs text-gray-500 mt-1">
|
||||||
|
IBAN: {account.iban}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="text-right">
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
{isPositive ? (
|
||||||
|
<TrendingUp className="h-4 w-4 text-green-500" />
|
||||||
|
) : (
|
||||||
|
<TrendingDown className="h-4 w-4 text-red-500" />
|
||||||
|
)}
|
||||||
|
<p
|
||||||
|
className={`text-lg font-semibold ${
|
||||||
|
isPositive ? "text-green-600" : "text-red-600"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{formatCurrency(balance, currency)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm text-gray-500">
|
||||||
|
Updated{" "}
|
||||||
|
{formatDate(account.last_accessed || account.created)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
197
frontend/src/components/Dashboard.tsx
Normal file
197
frontend/src/components/Dashboard.tsx
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
CreditCard,
|
||||||
|
TrendingUp,
|
||||||
|
Activity,
|
||||||
|
Menu,
|
||||||
|
X,
|
||||||
|
Home,
|
||||||
|
List,
|
||||||
|
BarChart3,
|
||||||
|
Wifi,
|
||||||
|
WifiOff,
|
||||||
|
Bell,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { apiClient } from "../lib/api";
|
||||||
|
import AccountsOverview from "./AccountsOverview";
|
||||||
|
import TransactionsList from "./TransactionsList";
|
||||||
|
import Notifications from "./Notifications";
|
||||||
|
import ErrorBoundary from "./ErrorBoundary";
|
||||||
|
import { cn } from "../lib/utils";
|
||||||
|
import type { Account } from "../types/api";
|
||||||
|
|
||||||
|
type TabType = "overview" | "transactions" | "analytics" | "notifications";
|
||||||
|
|
||||||
|
export default function Dashboard() {
|
||||||
|
const [activeTab, setActiveTab] = useState<TabType>("overview");
|
||||||
|
const [sidebarOpen, setSidebarOpen] = useState(false);
|
||||||
|
|
||||||
|
const { data: accounts } = useQuery<Account[]>({
|
||||||
|
queryKey: ["accounts"],
|
||||||
|
queryFn: apiClient.getAccounts,
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
data: healthStatus,
|
||||||
|
isLoading: healthLoading,
|
||||||
|
isError: healthError,
|
||||||
|
} = useQuery({
|
||||||
|
queryKey: ["health"],
|
||||||
|
queryFn: async () => {
|
||||||
|
return await apiClient.getHealth();
|
||||||
|
},
|
||||||
|
refetchInterval: 30000, // Check every 30 seconds
|
||||||
|
retry: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
const navigation = [
|
||||||
|
{ name: "Overview", icon: Home, id: "overview" as TabType },
|
||||||
|
{ name: "Transactions", icon: List, id: "transactions" as TabType },
|
||||||
|
{ name: "Analytics", icon: BarChart3, id: "analytics" as TabType },
|
||||||
|
{ name: "Notifications", icon: Bell, id: "notifications" as TabType },
|
||||||
|
];
|
||||||
|
|
||||||
|
const totalBalance =
|
||||||
|
accounts?.reduce((sum, account) => {
|
||||||
|
// Get the first available balance from the balances array
|
||||||
|
const primaryBalance = account.balances?.[0]?.amount || 0;
|
||||||
|
return sum + primaryBalance;
|
||||||
|
}, 0) || 0;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex h-screen bg-gray-100">
|
||||||
|
{/* Sidebar */}
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"fixed inset-y-0 left-0 z-50 w-64 bg-white shadow-lg transform transition-transform duration-300 ease-in-out lg:translate-x-0 lg:static lg:inset-0",
|
||||||
|
sidebarOpen ? "translate-x-0" : "-translate-x-full",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between h-16 px-6 border-b border-gray-200">
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
<CreditCard className="h-8 w-8 text-blue-600" />
|
||||||
|
<h1 className="text-xl font-bold text-gray-900">Leggen</h1>
|
||||||
|
</div>
|
||||||
|
<button
|
||||||
|
onClick={() => setSidebarOpen(false)}
|
||||||
|
className="lg:hidden p-1 rounded-md text-gray-400 hover:text-gray-500"
|
||||||
|
>
|
||||||
|
<X className="h-6 w-6" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<nav className="px-6 py-4">
|
||||||
|
<div className="space-y-1">
|
||||||
|
{navigation.map((item) => (
|
||||||
|
<button
|
||||||
|
key={item.id}
|
||||||
|
onClick={() => {
|
||||||
|
setActiveTab(item.id);
|
||||||
|
setSidebarOpen(false);
|
||||||
|
}}
|
||||||
|
className={cn(
|
||||||
|
"flex items-center w-full px-3 py-2 text-sm font-medium rounded-md transition-colors",
|
||||||
|
activeTab === item.id
|
||||||
|
? "bg-blue-100 text-blue-700"
|
||||||
|
: "text-gray-700 hover:text-gray-900 hover:bg-gray-100",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<item.icon className="mr-3 h-5 w-5" />
|
||||||
|
{item.name}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
{/* Account Summary in Sidebar */}
|
||||||
|
<div className="px-6 py-4 border-t border-gray-200 mt-auto">
|
||||||
|
<div className="bg-gray-50 rounded-lg p-4">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-sm font-medium text-gray-600">
|
||||||
|
Total Balance
|
||||||
|
</span>
|
||||||
|
<TrendingUp className="h-4 w-4 text-green-500" />
|
||||||
|
</div>
|
||||||
|
<p className="text-2xl font-bold text-gray-900 mt-1">
|
||||||
|
{new Intl.NumberFormat("en-US", {
|
||||||
|
style: "currency",
|
||||||
|
currency: "EUR",
|
||||||
|
}).format(totalBalance)}
|
||||||
|
</p>
|
||||||
|
<p className="text-sm text-gray-500 mt-1">
|
||||||
|
{accounts?.length || 0} accounts
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overlay for mobile */}
|
||||||
|
{sidebarOpen && (
|
||||||
|
<div
|
||||||
|
className="fixed inset-0 z-40 bg-gray-600 bg-opacity-75 lg:hidden"
|
||||||
|
onClick={() => setSidebarOpen(false)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Main content */}
|
||||||
|
<div className="flex flex-col flex-1 overflow-hidden">
|
||||||
|
{/* Header */}
|
||||||
|
<header className="bg-white shadow-sm border-b border-gray-200">
|
||||||
|
<div className="flex items-center justify-between h-16 px-6">
|
||||||
|
<div className="flex items-center">
|
||||||
|
<button
|
||||||
|
onClick={() => setSidebarOpen(true)}
|
||||||
|
className="lg:hidden p-1 rounded-md text-gray-400 hover:text-gray-500"
|
||||||
|
>
|
||||||
|
<Menu className="h-6 w-6" />
|
||||||
|
</button>
|
||||||
|
<h2 className="text-lg font-semibold text-gray-900 lg:ml-0 ml-4">
|
||||||
|
{navigation.find((item) => item.id === activeTab)?.name}
|
||||||
|
</h2>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
<div className="flex items-center space-x-1">
|
||||||
|
{healthLoading ? (
|
||||||
|
<>
|
||||||
|
<Activity className="h-4 w-4 text-yellow-500 animate-pulse" />
|
||||||
|
<span className="text-sm text-gray-600">Checking...</span>
|
||||||
|
</>
|
||||||
|
) : healthError || healthStatus?.status !== "healthy" ? (
|
||||||
|
<>
|
||||||
|
<WifiOff className="h-4 w-4 text-red-500" />
|
||||||
|
<span className="text-sm text-red-500">Disconnected</span>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<Wifi className="h-4 w-4 text-green-500" />
|
||||||
|
<span className="text-sm text-gray-600">Connected</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
{/* Main content area */}
|
||||||
|
<main className="flex-1 overflow-y-auto p-6">
|
||||||
|
<ErrorBoundary>
|
||||||
|
{activeTab === "overview" && <AccountsOverview />}
|
||||||
|
{activeTab === "transactions" && <TransactionsList />}
|
||||||
|
{activeTab === "analytics" && (
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-4">
|
||||||
|
Analytics
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600">
|
||||||
|
Analytics dashboard coming soon...
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{activeTab === "notifications" && <Notifications />}
|
||||||
|
</ErrorBoundary>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
89
frontend/src/components/ErrorBoundary.tsx
Normal file
89
frontend/src/components/ErrorBoundary.tsx
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import { Component } from "react";
|
||||||
|
import type { ErrorInfo, ReactNode } from "react";
|
||||||
|
import { AlertTriangle, RefreshCw } from "lucide-react";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
children: ReactNode;
|
||||||
|
fallback?: ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface State {
|
||||||
|
hasError: boolean;
|
||||||
|
error?: Error;
|
||||||
|
errorInfo?: ErrorInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
class ErrorBoundary extends Component<Props, State> {
|
||||||
|
constructor(props: Props) {
|
||||||
|
super(props);
|
||||||
|
this.state = { hasError: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
static getDerivedStateFromError(error: Error): State {
|
||||||
|
return { hasError: true, error };
|
||||||
|
}
|
||||||
|
|
||||||
|
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
|
||||||
|
console.error("ErrorBoundary caught an error:", error, errorInfo);
|
||||||
|
this.setState({ error, errorInfo });
|
||||||
|
}
|
||||||
|
|
||||||
|
handleReset = () => {
|
||||||
|
this.setState({ hasError: false, error: undefined, errorInfo: undefined });
|
||||||
|
};
|
||||||
|
|
||||||
|
render() {
|
||||||
|
if (this.state.hasError) {
|
||||||
|
if (this.props.fallback) {
|
||||||
|
return this.props.fallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-center text-center">
|
||||||
|
<div>
|
||||||
|
<AlertTriangle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
Something went wrong
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600 mb-4">
|
||||||
|
An error occurred while rendering this component. Please try
|
||||||
|
refreshing or check the console for more details.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{this.state.error && (
|
||||||
|
<div className="bg-red-50 border border-red-200 rounded-md p-3 mb-4 text-left">
|
||||||
|
<p className="text-sm font-mono text-red-800">
|
||||||
|
<strong>Error:</strong> {this.state.error.message}
|
||||||
|
</p>
|
||||||
|
{this.state.error.stack && (
|
||||||
|
<details className="mt-2">
|
||||||
|
<summary className="text-sm text-red-600 cursor-pointer">
|
||||||
|
Stack trace
|
||||||
|
</summary>
|
||||||
|
<pre className="text-xs text-red-700 mt-1 whitespace-pre-wrap">
|
||||||
|
{this.state.error.stack}
|
||||||
|
</pre>
|
||||||
|
</details>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<button
|
||||||
|
onClick={this.handleReset}
|
||||||
|
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4 mr-2" />
|
||||||
|
Try Again
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.props.children;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ErrorBoundary;
|
||||||
18
frontend/src/components/LoadingSpinner.tsx
Normal file
18
frontend/src/components/LoadingSpinner.tsx
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import { RefreshCw } from "lucide-react";
|
||||||
|
|
||||||
|
interface LoadingSpinnerProps {
|
||||||
|
message?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function LoadingSpinner({
|
||||||
|
message = "Loading...",
|
||||||
|
}: LoadingSpinnerProps) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center p-8">
|
||||||
|
<div className="text-center">
|
||||||
|
<RefreshCw className="h-8 w-8 animate-spin text-blue-600 mx-auto mb-2" />
|
||||||
|
<p className="text-gray-600 text-sm">{message}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
320
frontend/src/components/Notifications.tsx
Normal file
320
frontend/src/components/Notifications.tsx
Normal file
@@ -0,0 +1,320 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
Bell,
|
||||||
|
MessageSquare,
|
||||||
|
Send,
|
||||||
|
Trash2,
|
||||||
|
RefreshCw,
|
||||||
|
AlertCircle,
|
||||||
|
CheckCircle,
|
||||||
|
Settings,
|
||||||
|
TestTube,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { apiClient } from "../lib/api";
|
||||||
|
import LoadingSpinner from "./LoadingSpinner";
|
||||||
|
import type { NotificationSettings, NotificationService } from "../types/api";
|
||||||
|
|
||||||
|
export default function Notifications() {
|
||||||
|
const [testService, setTestService] = useState("");
|
||||||
|
const [testMessage, setTestMessage] = useState(
|
||||||
|
"Test notification from Leggen",
|
||||||
|
);
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
|
const {
|
||||||
|
data: settings,
|
||||||
|
isLoading: settingsLoading,
|
||||||
|
error: settingsError,
|
||||||
|
refetch: refetchSettings,
|
||||||
|
} = useQuery<NotificationSettings>({
|
||||||
|
queryKey: ["notificationSettings"],
|
||||||
|
queryFn: apiClient.getNotificationSettings,
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
data: services,
|
||||||
|
isLoading: servicesLoading,
|
||||||
|
error: servicesError,
|
||||||
|
refetch: refetchServices,
|
||||||
|
} = useQuery<NotificationService[]>({
|
||||||
|
queryKey: ["notificationServices"],
|
||||||
|
queryFn: apiClient.getNotificationServices,
|
||||||
|
});
|
||||||
|
|
||||||
|
const testMutation = useMutation({
|
||||||
|
mutationFn: apiClient.testNotification,
|
||||||
|
onSuccess: () => {
|
||||||
|
// Could show a success toast here
|
||||||
|
console.log("Test notification sent successfully");
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error("Failed to send test notification:", error);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const deleteServiceMutation = useMutation({
|
||||||
|
mutationFn: apiClient.deleteNotificationService,
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ["notificationSettings"] });
|
||||||
|
queryClient.invalidateQueries({ queryKey: ["notificationServices"] });
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (settingsLoading || servicesLoading) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<LoadingSpinner message="Loading notifications..." />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (settingsError || servicesError) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-center text-center">
|
||||||
|
<div>
|
||||||
|
<AlertCircle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
Failed to load notifications
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600 mb-4">
|
||||||
|
Unable to connect to the Leggen API. Please check your
|
||||||
|
configuration and ensure the API server is running.
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
onClick={() => {
|
||||||
|
refetchSettings();
|
||||||
|
refetchServices();
|
||||||
|
}}
|
||||||
|
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4 mr-2" />
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleTestNotification = () => {
|
||||||
|
if (!testService) return;
|
||||||
|
|
||||||
|
testMutation.mutate({
|
||||||
|
service: testService,
|
||||||
|
message: testMessage,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleDeleteService = (serviceName: string) => {
|
||||||
|
if (
|
||||||
|
confirm(
|
||||||
|
`Are you sure you want to delete the ${serviceName} notification service?`,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
deleteServiceMutation.mutate(serviceName);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Test Notification Section */}
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center space-x-2 mb-4">
|
||||||
|
<TestTube className="h-5 w-5 text-blue-600" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900">
|
||||||
|
Test Notifications
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-gray-700 mb-2">
|
||||||
|
Service
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
value={testService}
|
||||||
|
onChange={(e) => setTestService(e.target.value)}
|
||||||
|
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||||
|
>
|
||||||
|
<option value="">Select a service...</option>
|
||||||
|
{services?.map((service) => (
|
||||||
|
<option key={service.name} value={service.name}>
|
||||||
|
{service.name} {service.enabled ? "(Enabled)" : "(Disabled)"}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-gray-700 mb-2">
|
||||||
|
Message
|
||||||
|
</label>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={testMessage}
|
||||||
|
onChange={(e) => setTestMessage(e.target.value)}
|
||||||
|
className="w-full px-3 py-2 border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||||
|
placeholder="Test message..."
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-4">
|
||||||
|
<button
|
||||||
|
onClick={handleTestNotification}
|
||||||
|
disabled={!testService || testMutation.isPending}
|
||||||
|
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 disabled:opacity-50 disabled:cursor-not-allowed transition-colors"
|
||||||
|
>
|
||||||
|
<Send className="h-4 w-4 mr-2" />
|
||||||
|
{testMutation.isPending ? "Sending..." : "Send Test Notification"}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Notification Services */}
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<div className="px-6 py-4 border-b border-gray-200">
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
<Bell className="h-5 w-5 text-blue-600" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900">
|
||||||
|
Notification Services
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm text-gray-600 mt-1">
|
||||||
|
Manage your notification services
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{!services || services.length === 0 ? (
|
||||||
|
<div className="p-6 text-center">
|
||||||
|
<Bell className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
No notification services configured
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600">
|
||||||
|
Configure notification services in your backend to receive alerts.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="divide-y divide-gray-200">
|
||||||
|
{services.map((service) => (
|
||||||
|
<div
|
||||||
|
key={service.name}
|
||||||
|
className="p-6 hover:bg-gray-50 transition-colors"
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="flex items-center space-x-4">
|
||||||
|
<div className="p-3 bg-gray-100 rounded-full">
|
||||||
|
{service.name.toLowerCase().includes("discord") ? (
|
||||||
|
<MessageSquare className="h-6 w-6 text-gray-600" />
|
||||||
|
) : service.name.toLowerCase().includes("telegram") ? (
|
||||||
|
<Send className="h-6 w-6 text-gray-600" />
|
||||||
|
) : (
|
||||||
|
<Bell className="h-6 w-6 text-gray-600" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h4 className="text-lg font-medium text-gray-900 capitalize">
|
||||||
|
{service.name}
|
||||||
|
</h4>
|
||||||
|
<div className="flex items-center space-x-2 mt-1">
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium ${
|
||||||
|
service.enabled
|
||||||
|
? "bg-green-100 text-green-800"
|
||||||
|
: "bg-red-100 text-red-800"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{service.enabled ? (
|
||||||
|
<CheckCircle className="h-3 w-3 mr-1" />
|
||||||
|
) : (
|
||||||
|
<AlertCircle className="h-3 w-3 mr-1" />
|
||||||
|
)}
|
||||||
|
{service.enabled ? "Enabled" : "Disabled"}
|
||||||
|
</span>
|
||||||
|
<span
|
||||||
|
className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium ${
|
||||||
|
service.configured
|
||||||
|
? "bg-blue-100 text-blue-800"
|
||||||
|
: "bg-yellow-100 text-yellow-800"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{service.configured ? "Configured" : "Not Configured"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
<button
|
||||||
|
onClick={() => handleDeleteService(service.name)}
|
||||||
|
disabled={deleteServiceMutation.isPending}
|
||||||
|
className="p-2 text-red-600 hover:text-red-800 hover:bg-red-50 rounded-md transition-colors"
|
||||||
|
title={`Delete ${service.name} service`}
|
||||||
|
>
|
||||||
|
<Trash2 className="h-4 w-4" />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Notification Settings */}
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center space-x-2 mb-4">
|
||||||
|
<Settings className="h-5 w-5 text-blue-600" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900">
|
||||||
|
Notification Settings
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{settings && (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div>
|
||||||
|
<h4 className="text-sm font-medium text-gray-700 mb-2">
|
||||||
|
Filters
|
||||||
|
</h4>
|
||||||
|
<div className="bg-gray-50 rounded-md p-4">
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 gap-4">
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-medium text-gray-600 mb-1">
|
||||||
|
Case Insensitive Filters
|
||||||
|
</label>
|
||||||
|
<p className="text-sm text-gray-900">
|
||||||
|
{settings.filters.case_insensitive.length > 0
|
||||||
|
? settings.filters.case_insensitive.join(", ")
|
||||||
|
: "None"}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<label className="block text-xs font-medium text-gray-600 mb-1">
|
||||||
|
Case Sensitive Filters
|
||||||
|
</label>
|
||||||
|
<p className="text-sm text-gray-900">
|
||||||
|
{settings.filters.case_sensitive &&
|
||||||
|
settings.filters.case_sensitive.length > 0
|
||||||
|
? settings.filters.case_sensitive.join(", ")
|
||||||
|
: "None"}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="text-sm text-gray-600">
|
||||||
|
<p>
|
||||||
|
Configure notification settings through your backend API to
|
||||||
|
customize filters and service configurations.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
339
frontend/src/components/TransactionsList.tsx
Normal file
339
frontend/src/components/TransactionsList.tsx
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import {
|
||||||
|
Filter,
|
||||||
|
Search,
|
||||||
|
TrendingUp,
|
||||||
|
TrendingDown,
|
||||||
|
Calendar,
|
||||||
|
RefreshCw,
|
||||||
|
AlertCircle,
|
||||||
|
X,
|
||||||
|
} from "lucide-react";
|
||||||
|
import { apiClient } from "../lib/api";
|
||||||
|
import { formatCurrency, formatDate } from "../lib/utils";
|
||||||
|
import LoadingSpinner from "./LoadingSpinner";
|
||||||
|
import type { Account, Transaction } from "../types/api";
|
||||||
|
|
||||||
|
export default function TransactionsList() {
|
||||||
|
const [searchTerm, setSearchTerm] = useState("");
|
||||||
|
const [selectedAccount, setSelectedAccount] = useState<string>("");
|
||||||
|
const [startDate, setStartDate] = useState("");
|
||||||
|
const [endDate, setEndDate] = useState("");
|
||||||
|
const [showFilters, setShowFilters] = useState(false);
|
||||||
|
|
||||||
|
const { data: accounts } = useQuery<Account[]>({
|
||||||
|
queryKey: ["accounts"],
|
||||||
|
queryFn: apiClient.getAccounts,
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
data: transactions,
|
||||||
|
isLoading: transactionsLoading,
|
||||||
|
error: transactionsError,
|
||||||
|
refetch: refetchTransactions,
|
||||||
|
} = useQuery<Transaction[]>({
|
||||||
|
queryKey: ["transactions", selectedAccount, startDate, endDate],
|
||||||
|
queryFn: () =>
|
||||||
|
apiClient.getTransactions({
|
||||||
|
accountId: selectedAccount || undefined,
|
||||||
|
startDate: startDate || undefined,
|
||||||
|
endDate: endDate || undefined,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
const filteredTransactions = (transactions || []).filter((transaction) => {
|
||||||
|
// Additional validation (API client should have already filtered out invalid ones)
|
||||||
|
if (!transaction || !transaction.account_id) {
|
||||||
|
console.warn(
|
||||||
|
"Invalid transaction found after API filtering:",
|
||||||
|
transaction,
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const description = transaction.description || "";
|
||||||
|
const creditorName = transaction.creditor_name || "";
|
||||||
|
const debtorName = transaction.debtor_name || "";
|
||||||
|
const reference = transaction.reference || "";
|
||||||
|
|
||||||
|
const matchesSearch =
|
||||||
|
searchTerm === "" ||
|
||||||
|
description.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||||
|
creditorName.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||||
|
debtorName.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||||
|
reference.toLowerCase().includes(searchTerm.toLowerCase());
|
||||||
|
|
||||||
|
return matchesSearch;
|
||||||
|
});
|
||||||
|
|
||||||
|
const clearFilters = () => {
|
||||||
|
setSearchTerm("");
|
||||||
|
setSelectedAccount("");
|
||||||
|
setStartDate("");
|
||||||
|
setEndDate("");
|
||||||
|
};
|
||||||
|
|
||||||
|
const hasActiveFilters =
|
||||||
|
searchTerm || selectedAccount || startDate || endDate;
|
||||||
|
|
||||||
|
if (transactionsLoading) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<LoadingSpinner message="Loading transactions..." />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (transactionsError) {
|
||||||
|
return (
|
||||||
|
<div className="bg-white rounded-lg shadow p-6">
|
||||||
|
<div className="flex items-center justify-center text-center">
|
||||||
|
<div>
|
||||||
|
<AlertCircle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
Failed to load transactions
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600 mb-4">
|
||||||
|
Unable to fetch transactions from the Leggen API.
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
onClick={() => refetchTransactions()}
|
||||||
|
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||||
|
>
|
||||||
|
<RefreshCw className="h-4 w-4 mr-2" />
|
||||||
|
Retry
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Filters */}
|
||||||
|
<div className="bg-white rounded-lg shadow">
|
||||||
|
<div className="px-6 py-4 border-b border-gray-200">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h3 className="text-lg font-medium text-gray-900">Transactions</h3>
|
||||||
|
<div className="flex items-center space-x-2">
|
||||||
|
{hasActiveFilters && (
|
||||||
|
<button
|
||||||
|
onClick={clearFilters}
|
||||||
|
className="inline-flex items-center px-3 py-1 text-sm bg-gray-100 text-gray-700 rounded-full hover:bg-gray-200 transition-colors"
|
||||||
|
>
|
||||||
|
<X className="h-3 w-3 mr-1" />
|
||||||
|
Clear filters
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
<button
|
||||||
|
onClick={() => setShowFilters(!showFilters)}
|
||||||
|
className="inline-flex items-center px-3 py-2 bg-blue-100 text-blue-700 rounded-md hover:bg-blue-200 transition-colors"
|
||||||
|
>
|
||||||
|
<Filter className="h-4 w-4 mr-2" />
|
||||||
|
Filters
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{showFilters && (
|
||||||
|
<div className="px-6 py-4 border-b border-gray-200 bg-gray-50">
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-4 gap-4">
|
||||||
|
{/* Search */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||||
|
Search
|
||||||
|
</label>
|
||||||
|
<div className="relative">
|
||||||
|
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-gray-400" />
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={searchTerm}
|
||||||
|
onChange={(e) => setSearchTerm(e.target.value)}
|
||||||
|
placeholder="Description, name, reference..."
|
||||||
|
className="pl-10 pr-3 py-2 w-full border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Account Filter */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||||
|
Account
|
||||||
|
</label>
|
||||||
|
<select
|
||||||
|
value={selectedAccount}
|
||||||
|
onChange={(e) => setSelectedAccount(e.target.value)}
|
||||||
|
className="w-full border border-gray-300 rounded-md px-3 py-2 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||||
|
>
|
||||||
|
<option value="">All accounts</option>
|
||||||
|
{accounts?.map((account) => (
|
||||||
|
<option key={account.id} value={account.id}>
|
||||||
|
{account.name || "Unnamed Account"} (
|
||||||
|
{account.institution_id})
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Start Date */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||||
|
Start Date
|
||||||
|
</label>
|
||||||
|
<div className="relative">
|
||||||
|
<Calendar className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-gray-400" />
|
||||||
|
<input
|
||||||
|
type="date"
|
||||||
|
value={startDate}
|
||||||
|
onChange={(e) => setStartDate(e.target.value)}
|
||||||
|
className="pl-10 pr-3 py-2 w-full border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* End Date */}
|
||||||
|
<div>
|
||||||
|
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||||
|
End Date
|
||||||
|
</label>
|
||||||
|
<div className="relative">
|
||||||
|
<Calendar className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-gray-400" />
|
||||||
|
<input
|
||||||
|
type="date"
|
||||||
|
value={endDate}
|
||||||
|
onChange={(e) => setEndDate(e.target.value)}
|
||||||
|
className="pl-10 pr-3 py-2 w-full border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Results Summary */}
|
||||||
|
<div className="px-6 py-3 bg-gray-50 border-b border-gray-200">
|
||||||
|
<p className="text-sm text-gray-600">
|
||||||
|
Showing {filteredTransactions.length} transaction
|
||||||
|
{filteredTransactions.length !== 1 ? "s" : ""}
|
||||||
|
{selectedAccount && accounts && (
|
||||||
|
<span className="ml-1">
|
||||||
|
for {accounts.find((acc) => acc.id === selectedAccount)?.name}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Transactions List */}
|
||||||
|
{filteredTransactions.length === 0 ? (
|
||||||
|
<div className="bg-white rounded-lg shadow p-6 text-center">
|
||||||
|
<div className="text-gray-400 mb-4">
|
||||||
|
<TrendingUp className="h-12 w-12 mx-auto" />
|
||||||
|
</div>
|
||||||
|
<h3 className="text-lg font-medium text-gray-900 mb-2">
|
||||||
|
No transactions found
|
||||||
|
</h3>
|
||||||
|
<p className="text-gray-600">
|
||||||
|
{hasActiveFilters
|
||||||
|
? "Try adjusting your filters to see more results."
|
||||||
|
: "No transactions are available for the selected criteria."}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="bg-white rounded-lg shadow divide-y divide-gray-200">
|
||||||
|
{filteredTransactions.map((transaction) => {
|
||||||
|
const account = accounts?.find(
|
||||||
|
(acc) => acc.id === transaction.account_id,
|
||||||
|
);
|
||||||
|
const isPositive = transaction.amount > 0;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
key={
|
||||||
|
transaction.internal_transaction_id ||
|
||||||
|
`${transaction.account_id}-${transaction.date}-${transaction.amount}`
|
||||||
|
}
|
||||||
|
className="p-6 hover:bg-gray-50 transition-colors"
|
||||||
|
>
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div className="flex-1">
|
||||||
|
<div className="flex items-start space-x-3">
|
||||||
|
<div
|
||||||
|
className={`p-2 rounded-full ${
|
||||||
|
isPositive ? "bg-green-100" : "bg-red-100"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{isPositive ? (
|
||||||
|
<TrendingUp className="h-4 w-4 text-green-600" />
|
||||||
|
) : (
|
||||||
|
<TrendingDown className="h-4 w-4 text-red-600" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex-1">
|
||||||
|
<h4 className="text-sm font-medium text-gray-900 mb-1">
|
||||||
|
{transaction.description}
|
||||||
|
</h4>
|
||||||
|
|
||||||
|
<div className="text-xs text-gray-500 space-y-1">
|
||||||
|
{account && (
|
||||||
|
<p>
|
||||||
|
{account.name || "Unnamed Account"} •{" "}
|
||||||
|
{account.institution_id}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{(transaction.creditor_name ||
|
||||||
|
transaction.debtor_name) && (
|
||||||
|
<p>
|
||||||
|
{isPositive ? "From: " : "To: "}
|
||||||
|
{transaction.creditor_name ||
|
||||||
|
transaction.debtor_name}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{transaction.reference && (
|
||||||
|
<p>Ref: {transaction.reference}</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{transaction.internal_transaction_id && (
|
||||||
|
<p>ID: {transaction.internal_transaction_id}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="text-right ml-4">
|
||||||
|
<p
|
||||||
|
className={`text-lg font-semibold ${
|
||||||
|
isPositive ? "text-green-600" : "text-red-600"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{isPositive ? "+" : ""}
|
||||||
|
{formatCurrency(transaction.amount, transaction.currency)}
|
||||||
|
</p>
|
||||||
|
<p className="text-sm text-gray-500">
|
||||||
|
{transaction.date
|
||||||
|
? formatDate(transaction.date)
|
||||||
|
: "No date"}
|
||||||
|
</p>
|
||||||
|
{transaction.booking_date &&
|
||||||
|
transaction.booking_date !== transaction.date && (
|
||||||
|
<p className="text-xs text-gray-400">
|
||||||
|
Booked: {formatDate(transaction.booking_date)}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
3
frontend/src/index.css
Normal file
3
frontend/src/index.css
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
@tailwind utilities;
|
||||||
130
frontend/src/lib/api.ts
Normal file
130
frontend/src/lib/api.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import axios from "axios";
|
||||||
|
import type {
|
||||||
|
Account,
|
||||||
|
Transaction,
|
||||||
|
Balance,
|
||||||
|
ApiResponse,
|
||||||
|
NotificationSettings,
|
||||||
|
NotificationTest,
|
||||||
|
NotificationService,
|
||||||
|
NotificationServicesResponse,
|
||||||
|
HealthData,
|
||||||
|
} from "../types/api";
|
||||||
|
|
||||||
|
// Use VITE_API_URL for development, relative URLs for production
|
||||||
|
const API_BASE_URL = import.meta.env.VITE_API_URL || "/api/v1";
|
||||||
|
|
||||||
|
const api = axios.create({
|
||||||
|
baseURL: API_BASE_URL,
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const apiClient = {
|
||||||
|
// Get all accounts
|
||||||
|
getAccounts: async (): Promise<Account[]> => {
|
||||||
|
const response = await api.get<ApiResponse<Account[]>>("/accounts");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get account by ID
|
||||||
|
getAccount: async (id: string): Promise<Account> => {
|
||||||
|
const response = await api.get<ApiResponse<Account>>(`/accounts/${id}`);
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get all balances
|
||||||
|
getBalances: async (): Promise<Balance[]> => {
|
||||||
|
const response = await api.get<ApiResponse<Balance[]>>("/balances");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get balances for specific account
|
||||||
|
getAccountBalances: async (accountId: string): Promise<Balance[]> => {
|
||||||
|
const response = await api.get<ApiResponse<Balance[]>>(
|
||||||
|
`/accounts/${accountId}/balances`,
|
||||||
|
);
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get transactions with optional filters
|
||||||
|
getTransactions: async (params?: {
|
||||||
|
accountId?: string;
|
||||||
|
startDate?: string;
|
||||||
|
endDate?: string;
|
||||||
|
page?: number;
|
||||||
|
perPage?: number;
|
||||||
|
search?: string;
|
||||||
|
}): Promise<Transaction[]> => {
|
||||||
|
const queryParams = new URLSearchParams();
|
||||||
|
|
||||||
|
if (params?.accountId) queryParams.append("account_id", params.accountId);
|
||||||
|
if (params?.startDate) queryParams.append("start_date", params.startDate);
|
||||||
|
if (params?.endDate) queryParams.append("end_date", params.endDate);
|
||||||
|
if (params?.page) queryParams.append("page", params.page.toString());
|
||||||
|
if (params?.perPage)
|
||||||
|
queryParams.append("per_page", params.perPage.toString());
|
||||||
|
if (params?.search) queryParams.append("search", params.search);
|
||||||
|
|
||||||
|
const response = await api.get<ApiResponse<Transaction[]>>(
|
||||||
|
`/transactions?${queryParams.toString()}`,
|
||||||
|
);
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get transaction by ID
|
||||||
|
getTransaction: async (id: string): Promise<Transaction> => {
|
||||||
|
const response = await api.get<ApiResponse<Transaction>>(
|
||||||
|
`/transactions/${id}`,
|
||||||
|
);
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get notification settings
|
||||||
|
getNotificationSettings: async (): Promise<NotificationSettings> => {
|
||||||
|
const response = await api.get<ApiResponse<NotificationSettings>>(
|
||||||
|
"/notifications/settings",
|
||||||
|
);
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Update notification settings
|
||||||
|
updateNotificationSettings: async (
|
||||||
|
settings: NotificationSettings,
|
||||||
|
): Promise<NotificationSettings> => {
|
||||||
|
const response = await api.put<ApiResponse<NotificationSettings>>(
|
||||||
|
"/notifications/settings",
|
||||||
|
settings,
|
||||||
|
);
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
|
||||||
|
// Test notification
|
||||||
|
testNotification: async (test: NotificationTest): Promise<void> => {
|
||||||
|
await api.post("/notifications/test", test);
|
||||||
|
},
|
||||||
|
|
||||||
|
// Get notification services
|
||||||
|
getNotificationServices: async (): Promise<NotificationService[]> => {
|
||||||
|
const response = await api.get<ApiResponse<NotificationServicesResponse>>(
|
||||||
|
"/notifications/services",
|
||||||
|
);
|
||||||
|
// Convert object to array format
|
||||||
|
const servicesData = response.data.data;
|
||||||
|
return Object.values(servicesData);
|
||||||
|
},
|
||||||
|
|
||||||
|
// Delete notification service
|
||||||
|
deleteNotificationService: async (service: string): Promise<void> => {
|
||||||
|
await api.delete(`/notifications/settings/${service}`);
|
||||||
|
},
|
||||||
|
|
||||||
|
// Health check
|
||||||
|
getHealth: async (): Promise<HealthData> => {
|
||||||
|
const response = await api.get<ApiResponse<HealthData>>("/health");
|
||||||
|
return response.data.data;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default apiClient;
|
||||||
62
frontend/src/lib/utils.ts
Normal file
62
frontend/src/lib/utils.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import { clsx, type ClassValue } from "clsx";
|
||||||
|
|
||||||
|
export function cn(...inputs: ClassValue[]) {
|
||||||
|
return clsx(inputs);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatCurrency(
|
||||||
|
amount: number,
|
||||||
|
currency: string = "EUR",
|
||||||
|
): string {
|
||||||
|
// Validate currency code - must be 3 letters and a valid ISO 4217 code
|
||||||
|
const validCurrency =
|
||||||
|
currency && /^[A-Z]{3}$/.test(currency) ? currency : "EUR";
|
||||||
|
|
||||||
|
try {
|
||||||
|
return new Intl.NumberFormat("en-US", {
|
||||||
|
style: "currency",
|
||||||
|
currency: validCurrency,
|
||||||
|
}).format(amount);
|
||||||
|
} catch {
|
||||||
|
// Fallback if currency is still invalid
|
||||||
|
console.warn(`Invalid currency code: ${currency}, falling back to EUR`);
|
||||||
|
return new Intl.NumberFormat("en-US", {
|
||||||
|
style: "currency",
|
||||||
|
currency: "EUR",
|
||||||
|
}).format(amount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatDate(date: string): string {
|
||||||
|
if (!date) return "No date";
|
||||||
|
|
||||||
|
const parsedDate = new Date(date);
|
||||||
|
if (isNaN(parsedDate.getTime())) {
|
||||||
|
console.warn("Invalid date string:", date);
|
||||||
|
return "Invalid date";
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Intl.DateTimeFormat("en-US", {
|
||||||
|
year: "numeric",
|
||||||
|
month: "short",
|
||||||
|
day: "numeric",
|
||||||
|
}).format(parsedDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatDateTime(date: string): string {
|
||||||
|
if (!date) return "No date";
|
||||||
|
|
||||||
|
const parsedDate = new Date(date);
|
||||||
|
if (isNaN(parsedDate.getTime())) {
|
||||||
|
console.warn("Invalid date string:", date);
|
||||||
|
return "Invalid date";
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Intl.DateTimeFormat("en-US", {
|
||||||
|
year: "numeric",
|
||||||
|
month: "short",
|
||||||
|
day: "numeric",
|
||||||
|
hour: "2-digit",
|
||||||
|
minute: "2-digit",
|
||||||
|
}).format(parsedDate);
|
||||||
|
}
|
||||||
10
frontend/src/main.tsx
Normal file
10
frontend/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { StrictMode } from "react";
|
||||||
|
import { createRoot } from "react-dom/client";
|
||||||
|
import "./index.css";
|
||||||
|
import App from "./App.tsx";
|
||||||
|
|
||||||
|
createRoot(document.getElementById("root")!).render(
|
||||||
|
<StrictMode>
|
||||||
|
<App />
|
||||||
|
</StrictMode>,
|
||||||
|
);
|
||||||
135
frontend/src/types/api.ts
Normal file
135
frontend/src/types/api.ts
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
export interface AccountBalance {
|
||||||
|
amount: number;
|
||||||
|
currency: string;
|
||||||
|
balance_type: string;
|
||||||
|
last_change_date?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Account {
|
||||||
|
id: string;
|
||||||
|
institution_id: string;
|
||||||
|
status: string;
|
||||||
|
iban?: string;
|
||||||
|
name?: string;
|
||||||
|
currency?: string;
|
||||||
|
created: string;
|
||||||
|
last_accessed?: string;
|
||||||
|
balances: AccountBalance[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Transaction {
|
||||||
|
internal_transaction_id: string | null;
|
||||||
|
account_id: string;
|
||||||
|
amount: number;
|
||||||
|
currency: string;
|
||||||
|
description: string;
|
||||||
|
date: string;
|
||||||
|
status: string;
|
||||||
|
// Optional fields that may be present in some transactions
|
||||||
|
booking_date?: string;
|
||||||
|
value_date?: string;
|
||||||
|
creditor_name?: string;
|
||||||
|
debtor_name?: string;
|
||||||
|
reference?: string;
|
||||||
|
category?: string;
|
||||||
|
created_at?: string;
|
||||||
|
updated_at?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type for raw transaction data from API (before sanitization)
|
||||||
|
export interface RawTransaction {
|
||||||
|
id?: string;
|
||||||
|
internal_id?: string;
|
||||||
|
account_id?: string;
|
||||||
|
amount?: number;
|
||||||
|
currency?: string;
|
||||||
|
description?: string;
|
||||||
|
transaction_date?: string;
|
||||||
|
booking_date?: string;
|
||||||
|
value_date?: string;
|
||||||
|
creditor_name?: string;
|
||||||
|
debtor_name?: string;
|
||||||
|
reference?: string;
|
||||||
|
category?: string;
|
||||||
|
created_at?: string;
|
||||||
|
updated_at?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Balance {
|
||||||
|
id: string;
|
||||||
|
account_id: string;
|
||||||
|
balance_amount: number;
|
||||||
|
balance_type: string;
|
||||||
|
currency: string;
|
||||||
|
reference_date: string;
|
||||||
|
created_at: string;
|
||||||
|
updated_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Bank {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
country_code: string;
|
||||||
|
logo_url?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ApiResponse<T> {
|
||||||
|
data: T;
|
||||||
|
message?: string;
|
||||||
|
success: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PaginatedResponse<T> {
|
||||||
|
data: T[];
|
||||||
|
total: number;
|
||||||
|
page: number;
|
||||||
|
per_page: number;
|
||||||
|
total_pages: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notification types
|
||||||
|
export interface DiscordConfig {
|
||||||
|
webhook: string;
|
||||||
|
enabled: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TelegramConfig {
|
||||||
|
token: string;
|
||||||
|
chat_id: number;
|
||||||
|
enabled: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NotificationFilters {
|
||||||
|
case_insensitive: string[];
|
||||||
|
case_sensitive?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NotificationSettings {
|
||||||
|
discord?: DiscordConfig;
|
||||||
|
telegram?: TelegramConfig;
|
||||||
|
filters: NotificationFilters;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NotificationTest {
|
||||||
|
service: string;
|
||||||
|
message?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NotificationService {
|
||||||
|
name: string;
|
||||||
|
enabled: boolean;
|
||||||
|
configured: boolean;
|
||||||
|
active?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NotificationServicesResponse {
|
||||||
|
[serviceName: string]: NotificationService;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Health check response data
|
||||||
|
export interface HealthData {
|
||||||
|
status: string;
|
||||||
|
config_loaded?: boolean;
|
||||||
|
message?: string;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
1
frontend/src/vite-env.d.ts
vendored
Normal file
1
frontend/src/vite-env.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/// <reference types="vite/client" />
|
||||||
8
frontend/tailwind.config.js
Normal file
8
frontend/tailwind.config.js
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/** @type {import('tailwindcss').Config} */
|
||||||
|
export default {
|
||||||
|
content: ["./index.html", "./src/**/*.{js,ts,jsx,tsx}"],
|
||||||
|
theme: {
|
||||||
|
extend: {},
|
||||||
|
},
|
||||||
|
plugins: [require("@tailwindcss/forms")],
|
||||||
|
};
|
||||||
27
frontend/tsconfig.app.json
Normal file
27
frontend/tsconfig.app.json
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||||
|
"target": "ES2022",
|
||||||
|
"useDefineForClassFields": true,
|
||||||
|
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||||
|
"module": "ESNext",
|
||||||
|
"skipLibCheck": true,
|
||||||
|
|
||||||
|
/* Bundler mode */
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"noEmit": true,
|
||||||
|
"jsx": "react-jsx",
|
||||||
|
|
||||||
|
/* Linting */
|
||||||
|
"strict": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"erasableSyntaxOnly": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noUncheckedSideEffectImports": true
|
||||||
|
},
|
||||||
|
"include": ["src"]
|
||||||
|
}
|
||||||
7
frontend/tsconfig.json
Normal file
7
frontend/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"files": [],
|
||||||
|
"references": [
|
||||||
|
{ "path": "./tsconfig.app.json" },
|
||||||
|
{ "path": "./tsconfig.node.json" }
|
||||||
|
]
|
||||||
|
}
|
||||||
25
frontend/tsconfig.node.json
Normal file
25
frontend/tsconfig.node.json
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||||
|
"target": "ES2023",
|
||||||
|
"lib": ["ES2023"],
|
||||||
|
"module": "ESNext",
|
||||||
|
"skipLibCheck": true,
|
||||||
|
|
||||||
|
/* Bundler mode */
|
||||||
|
"moduleResolution": "bundler",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"verbatimModuleSyntax": true,
|
||||||
|
"moduleDetection": "force",
|
||||||
|
"noEmit": true,
|
||||||
|
|
||||||
|
/* Linting */
|
||||||
|
"strict": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"erasableSyntaxOnly": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noUncheckedSideEffectImports": true
|
||||||
|
},
|
||||||
|
"include": ["vite.config.ts"]
|
||||||
|
}
|
||||||
7
frontend/vite.config.ts
Normal file
7
frontend/vite.config.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import { defineConfig } from "vite";
|
||||||
|
import react from "@vitejs/plugin-react";
|
||||||
|
|
||||||
|
// https://vite.dev/config/
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [react()],
|
||||||
|
});
|
||||||
0
leggen/__init__.py
Normal file
0
leggen/__init__.py
Normal file
188
leggen/api_client.py
Normal file
188
leggen/api_client.py
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
from typing import Dict, Any, Optional, List, Union
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
|
from leggen.utils.text import error
|
||||||
|
|
||||||
|
|
||||||
|
class LeggendAPIClient:
|
||||||
|
"""Client for communicating with the leggend FastAPI service"""
|
||||||
|
|
||||||
|
base_url: str
|
||||||
|
|
||||||
|
def __init__(self, base_url: Optional[str] = None):
|
||||||
|
self.base_url = (
|
||||||
|
base_url
|
||||||
|
or os.environ.get("LEGGEND_API_URL", "http://localhost:8000")
|
||||||
|
or "http://localhost:8000"
|
||||||
|
)
|
||||||
|
self.session = requests.Session()
|
||||||
|
self.session.headers.update(
|
||||||
|
{"Content-Type": "application/json", "Accept": "application/json"}
|
||||||
|
)
|
||||||
|
|
||||||
|
def _make_request(self, method: str, endpoint: str, **kwargs) -> Dict[str, Any]:
|
||||||
|
"""Make HTTP request to the API"""
|
||||||
|
url = urljoin(self.base_url, endpoint)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = self.session.request(method, url, **kwargs)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
error("Could not connect to leggend service. Is it running?")
|
||||||
|
error(f"Trying to connect to: {self.base_url}")
|
||||||
|
raise
|
||||||
|
except requests.exceptions.HTTPError as e:
|
||||||
|
error(f"API request failed: {e}")
|
||||||
|
if response.text:
|
||||||
|
try:
|
||||||
|
error_data = response.json()
|
||||||
|
error(f"Error details: {error_data.get('detail', 'Unknown error')}")
|
||||||
|
except Exception:
|
||||||
|
error(f"Response: {response.text}")
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
error(f"Unexpected error: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def health_check(self) -> bool:
|
||||||
|
"""Check if the leggend service is healthy"""
|
||||||
|
try:
|
||||||
|
response = self._make_request("GET", "/health")
|
||||||
|
return response.get("status") == "healthy"
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Bank endpoints
|
||||||
|
def get_institutions(self, country: str = "PT") -> List[Dict[str, Any]]:
|
||||||
|
"""Get bank institutions for a country"""
|
||||||
|
response = self._make_request(
|
||||||
|
"GET", "/api/v1/banks/institutions", params={"country": country}
|
||||||
|
)
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
def connect_to_bank(
|
||||||
|
self, institution_id: str, redirect_url: str = "http://localhost:8000/"
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Connect to a bank"""
|
||||||
|
response = self._make_request(
|
||||||
|
"POST",
|
||||||
|
"/api/v1/banks/connect",
|
||||||
|
json={"institution_id": institution_id, "redirect_url": redirect_url},
|
||||||
|
)
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def get_bank_status(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Get bank connection status"""
|
||||||
|
response = self._make_request("GET", "/api/v1/banks/status")
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
def get_supported_countries(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Get supported countries"""
|
||||||
|
response = self._make_request("GET", "/api/v1/banks/countries")
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
# Account endpoints
|
||||||
|
def get_accounts(self) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all accounts"""
|
||||||
|
response = self._make_request("GET", "/api/v1/accounts")
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
def get_account_details(self, account_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get account details"""
|
||||||
|
response = self._make_request("GET", f"/api/v1/accounts/{account_id}")
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def get_account_balances(self, account_id: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Get account balances"""
|
||||||
|
response = self._make_request("GET", f"/api/v1/accounts/{account_id}/balances")
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
def get_account_transactions(
|
||||||
|
self, account_id: str, limit: int = 100, summary_only: bool = False
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Get account transactions"""
|
||||||
|
response = self._make_request(
|
||||||
|
"GET",
|
||||||
|
f"/api/v1/accounts/{account_id}/transactions",
|
||||||
|
params={"limit": limit, "summary_only": summary_only},
|
||||||
|
)
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
# Transaction endpoints
|
||||||
|
def get_all_transactions(
|
||||||
|
self, limit: int = 100, summary_only: bool = True, **filters
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Get all transactions with optional filters"""
|
||||||
|
params = {"limit": limit, "summary_only": summary_only}
|
||||||
|
params.update(filters)
|
||||||
|
|
||||||
|
response = self._make_request("GET", "/api/v1/transactions", params=params)
|
||||||
|
return response.get("data", [])
|
||||||
|
|
||||||
|
def get_transaction_stats(
|
||||||
|
self, days: int = 30, account_id: Optional[str] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get transaction statistics"""
|
||||||
|
params: Dict[str, Union[int, str]] = {"days": days}
|
||||||
|
if account_id:
|
||||||
|
params["account_id"] = account_id
|
||||||
|
|
||||||
|
response = self._make_request(
|
||||||
|
"GET", "/api/v1/transactions/stats", params=params
|
||||||
|
)
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
# Sync endpoints
|
||||||
|
def get_sync_status(self) -> Dict[str, Any]:
|
||||||
|
"""Get sync status"""
|
||||||
|
response = self._make_request("GET", "/api/v1/sync/status")
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def trigger_sync(
|
||||||
|
self, account_ids: Optional[List[str]] = None, force: bool = False
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Trigger a sync"""
|
||||||
|
data: Dict[str, Union[bool, List[str]]] = {"force": force}
|
||||||
|
if account_ids:
|
||||||
|
data["account_ids"] = account_ids
|
||||||
|
|
||||||
|
response = self._make_request("POST", "/api/v1/sync", json=data)
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def sync_now(
|
||||||
|
self, account_ids: Optional[List[str]] = None, force: bool = False
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Run sync synchronously"""
|
||||||
|
data: Dict[str, Union[bool, List[str]]] = {"force": force}
|
||||||
|
if account_ids:
|
||||||
|
data["account_ids"] = account_ids
|
||||||
|
|
||||||
|
response = self._make_request("POST", "/api/v1/sync/now", json=data)
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def get_scheduler_config(self) -> Dict[str, Any]:
|
||||||
|
"""Get scheduler configuration"""
|
||||||
|
response = self._make_request("GET", "/api/v1/sync/scheduler")
|
||||||
|
return response.get("data", {})
|
||||||
|
|
||||||
|
def update_scheduler_config(
|
||||||
|
self,
|
||||||
|
enabled: bool = True,
|
||||||
|
hour: int = 3,
|
||||||
|
minute: int = 0,
|
||||||
|
cron: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Update scheduler configuration"""
|
||||||
|
data: Dict[str, Union[bool, int, str]] = {
|
||||||
|
"enabled": enabled,
|
||||||
|
"hour": hour,
|
||||||
|
"minute": minute,
|
||||||
|
}
|
||||||
|
if cron:
|
||||||
|
data["cron"] = cron
|
||||||
|
|
||||||
|
response = self._make_request("PUT", "/api/v1/sync/scheduler", json=data)
|
||||||
|
return response.get("data", {})
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from leggen.main import cli
|
from leggen.main import cli
|
||||||
from leggen.utils.network import get
|
from leggen.api_client import LeggendAPIClient
|
||||||
from leggen.utils.text import datefmt, print_table
|
from leggen.utils.text import datefmt, print_table
|
||||||
|
|
||||||
|
|
||||||
@@ -11,36 +11,33 @@ def balances(ctx: click.Context):
|
|||||||
"""
|
"""
|
||||||
List balances of all connected accounts
|
List balances of all connected accounts
|
||||||
"""
|
"""
|
||||||
|
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||||
|
|
||||||
res = get(ctx, "/requisitions/")
|
# Check if leggend service is available
|
||||||
accounts = []
|
if not api_client.health_check():
|
||||||
for r in res.get("results", []):
|
click.echo(
|
||||||
accounts += r.get("accounts", [])
|
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
accounts = api_client.get_accounts()
|
||||||
|
|
||||||
all_balances = []
|
all_balances = []
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
account_ballances = get(ctx, f"/accounts/{account}/balances/").get(
|
for balance in account.get("balances", []):
|
||||||
"balances", []
|
amount = round(float(balance["amount"]), 2)
|
||||||
)
|
symbol = "€" if balance["currency"] == "EUR" else f" {balance['currency']}"
|
||||||
for balance in account_ballances:
|
|
||||||
balance_amount = balance["balanceAmount"]
|
|
||||||
amount = round(float(balance_amount["amount"]), 2)
|
|
||||||
symbol = (
|
|
||||||
"€"
|
|
||||||
if balance_amount["currency"] == "EUR"
|
|
||||||
else f" {balance_amount['currency']}"
|
|
||||||
)
|
|
||||||
amount_str = f"{amount}{symbol}"
|
amount_str = f"{amount}{symbol}"
|
||||||
date = (
|
date = (
|
||||||
datefmt(balance.get("lastChangeDateTime"))
|
datefmt(balance.get("last_change_date"))
|
||||||
if balance.get("lastChangeDateTime")
|
if balance.get("last_change_date")
|
||||||
else ""
|
else ""
|
||||||
)
|
)
|
||||||
all_balances.append(
|
all_balances.append(
|
||||||
{
|
{
|
||||||
"Account": account,
|
"Account": account["id"],
|
||||||
"Amount": amount_str,
|
"Amount": amount_str,
|
||||||
"Type": balance["balanceType"],
|
"Type": balance["balance_type"],
|
||||||
"Last change at": date,
|
"Last change at": date,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
import click
|
|
||||||
|
|
||||||
from leggen.main import cli
|
|
||||||
|
|
||||||
cmd_folder = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
class BankGroup(click.Group):
|
|
||||||
def list_commands(self, ctx):
|
|
||||||
rv = []
|
|
||||||
for filename in os.listdir(cmd_folder):
|
|
||||||
if filename.endswith(".py") and not filename.startswith("__init__"):
|
|
||||||
if filename == "list_banks.py":
|
|
||||||
rv.append("list")
|
|
||||||
else:
|
|
||||||
rv.append(filename[:-3])
|
|
||||||
rv.sort()
|
|
||||||
return rv
|
|
||||||
|
|
||||||
def get_command(self, ctx, name):
|
|
||||||
try:
|
|
||||||
if name == "list":
|
|
||||||
name = "list_banks"
|
|
||||||
mod = __import__(f"leggen.commands.bank.{name}", None, None, [name])
|
|
||||||
except ImportError:
|
|
||||||
return
|
|
||||||
return getattr(mod, name)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.group(cls=BankGroup)
|
|
||||||
@click.pass_context
|
|
||||||
def bank(ctx):
|
|
||||||
"""Manage banks connections"""
|
|
||||||
return
|
|
||||||
@@ -1,9 +1,9 @@
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from leggen.main import cli
|
from leggen.main import cli
|
||||||
|
from leggen.api_client import LeggendAPIClient
|
||||||
from leggen.utils.disk import save_file
|
from leggen.utils.disk import save_file
|
||||||
from leggen.utils.network import get, post
|
from leggen.utils.text import info, print_table, warning, success
|
||||||
from leggen.utils.text import info, print_table, warning
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@@ -12,34 +12,70 @@ def add(ctx):
|
|||||||
"""
|
"""
|
||||||
Connect to a bank
|
Connect to a bank
|
||||||
"""
|
"""
|
||||||
country = click.prompt(
|
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||||
"Bank Country",
|
|
||||||
type=click.Choice(["PT", "GB"], case_sensitive=True),
|
|
||||||
default="PT",
|
|
||||||
)
|
|
||||||
info(f"Getting bank list for country: {country}")
|
|
||||||
banks = get(ctx, "/institutions/", {"country": country})
|
|
||||||
filtered_banks = [
|
|
||||||
{
|
|
||||||
"id": bank["id"],
|
|
||||||
"name": bank["name"],
|
|
||||||
"max_transaction_days": bank["transaction_total_days"],
|
|
||||||
}
|
|
||||||
for bank in banks
|
|
||||||
]
|
|
||||||
print_table(filtered_banks)
|
|
||||||
allowed_ids = [str(bank["id"]) for bank in banks]
|
|
||||||
bank_id = click.prompt("Bank ID", type=click.Choice(allowed_ids))
|
|
||||||
click.confirm("Do you agree to connect to this bank?", abort=True)
|
|
||||||
|
|
||||||
info(f"Connecting to bank with ID: {bank_id}")
|
# Check if leggend service is available
|
||||||
|
if not api_client.health_check():
|
||||||
|
click.echo(
|
||||||
|
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
res = post(
|
try:
|
||||||
ctx,
|
# Get supported countries
|
||||||
"/requisitions/",
|
countries = api_client.get_supported_countries()
|
||||||
{"institution_id": bank_id, "redirect": "http://localhost:8000/"},
|
country_codes = [c["code"] for c in countries]
|
||||||
)
|
|
||||||
|
|
||||||
save_file(f"req_{res['id']}.json", res)
|
country = click.prompt(
|
||||||
|
"Bank Country",
|
||||||
|
type=click.Choice(country_codes, case_sensitive=True),
|
||||||
|
default="PT",
|
||||||
|
)
|
||||||
|
|
||||||
warning(f"Please open the following URL in your browser to accept: {res['link']}")
|
info(f"Getting bank list for country: {country}")
|
||||||
|
banks = api_client.get_institutions(country)
|
||||||
|
|
||||||
|
if not banks:
|
||||||
|
warning(f"No banks available for country {country}")
|
||||||
|
return
|
||||||
|
|
||||||
|
filtered_banks = [
|
||||||
|
{
|
||||||
|
"id": bank["id"],
|
||||||
|
"name": bank["name"],
|
||||||
|
"max_transaction_days": bank["transaction_total_days"],
|
||||||
|
}
|
||||||
|
for bank in banks
|
||||||
|
]
|
||||||
|
print_table(filtered_banks)
|
||||||
|
|
||||||
|
allowed_ids = [str(bank["id"]) for bank in banks]
|
||||||
|
bank_id = click.prompt("Bank ID", type=click.Choice(allowed_ids))
|
||||||
|
|
||||||
|
# Show bank details
|
||||||
|
selected_bank = next(bank for bank in banks if bank["id"] == bank_id)
|
||||||
|
info(f"Selected bank: {selected_bank['name']}")
|
||||||
|
|
||||||
|
click.confirm("Do you agree to connect to this bank?", abort=True)
|
||||||
|
|
||||||
|
info(f"Connecting to bank with ID: {bank_id}")
|
||||||
|
|
||||||
|
# Connect to bank via API
|
||||||
|
result = api_client.connect_to_bank(bank_id, "http://localhost:8000/")
|
||||||
|
|
||||||
|
# Save requisition details
|
||||||
|
save_file(f"req_{result['id']}.json", result)
|
||||||
|
|
||||||
|
success("Bank connection request created successfully!")
|
||||||
|
warning(
|
||||||
|
"Please open the following URL in your browser to complete the authorization:"
|
||||||
|
)
|
||||||
|
click.echo(f"\n{result['link']}\n")
|
||||||
|
|
||||||
|
info(f"Requisition ID: {result['id']}")
|
||||||
|
info(
|
||||||
|
"After completing the authorization, you can check the connection status with 'leggen status'"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"Error: Failed to connect to bank: {str(e)}")
|
||||||
|
|||||||
26
leggen/commands/bank/delete.py
Normal file
26
leggen/commands/bank/delete.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
from leggen.main import cli
|
||||||
|
from leggen.utils.text import info, success
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.argument("requisition_id", type=str, required=True, metavar="REQUISITION_ID")
|
||||||
|
@click.pass_context
|
||||||
|
def delete(ctx, requisition_id: str):
|
||||||
|
"""
|
||||||
|
Delete bank connection
|
||||||
|
|
||||||
|
REQUISITION_ID: The ID of the Bank Requisition to delete
|
||||||
|
|
||||||
|
Check `leggen status` to get the REQUISITION_ID
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
|
||||||
|
info(f"Deleting Bank Requisition: {requisition_id}")
|
||||||
|
|
||||||
|
api_url = ctx.obj.get("api_url", "http://localhost:8000")
|
||||||
|
res = requests.delete(f"{api_url}/requisitions/{requisition_id}")
|
||||||
|
res.raise_for_status()
|
||||||
|
|
||||||
|
success(f"Bank Requisition {requisition_id} deleted")
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
import click
|
|
||||||
|
|
||||||
from leggen.main import cli
|
|
||||||
from leggen.utils.auth import get_token
|
|
||||||
from leggen.utils.config import save_config
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
|
||||||
@click.option(
|
|
||||||
"--api-key", prompt=True, help="GoCardless API Key", envvar="LEGGEN_GC_API_KEY"
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--api-secret",
|
|
||||||
prompt=True,
|
|
||||||
help="GoCardless API Secret",
|
|
||||||
hide_input=True,
|
|
||||||
envvar="LEGGEN_GC_API_SECRET",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--api-url",
|
|
||||||
default="https://bankaccountdata.gocardless.com/api/v2",
|
|
||||||
help="GoCardless API URL",
|
|
||||||
show_default=True,
|
|
||||||
envvar="LEGGEN_GC_API_URL",
|
|
||||||
)
|
|
||||||
@click.option("--mongo-uri", prompt=True, help="MongoDB URI", envvar="LEGGEN_MONGO_URI")
|
|
||||||
@click.pass_context
|
|
||||||
def init(ctx: click.Context, api_key, api_secret, api_url, mongo_uri):
|
|
||||||
"""
|
|
||||||
Create configuration file
|
|
||||||
"""
|
|
||||||
config = {
|
|
||||||
"api_key": api_key,
|
|
||||||
"api_secret": api_secret,
|
|
||||||
"api_url": api_url,
|
|
||||||
"mongo_uri": mongo_uri,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just make sure this API credentials are valid
|
|
||||||
# if so, it will save the token in the auth file
|
|
||||||
_ = get_token(config)
|
|
||||||
|
|
||||||
# Save the configuration
|
|
||||||
save_config(config)
|
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from leggen.main import cli
|
from leggen.main import cli
|
||||||
from leggen.utils.gocardless import REQUISITION_STATUS
|
from leggen.api_client import LeggendAPIClient
|
||||||
from leggen.utils.network import get
|
|
||||||
from leggen.utils.text import datefmt, echo, info, print_table
|
from leggen.utils.text import datefmt, echo, info, print_table
|
||||||
|
|
||||||
|
|
||||||
@@ -12,33 +11,46 @@ def status(ctx: click.Context):
|
|||||||
"""
|
"""
|
||||||
List all connected banks and their status
|
List all connected banks and their status
|
||||||
"""
|
"""
|
||||||
|
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||||
|
|
||||||
res = get(ctx, "/requisitions/")
|
# Check if leggend service is available
|
||||||
|
if not api_client.health_check():
|
||||||
|
click.echo(
|
||||||
|
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get bank connection status
|
||||||
|
bank_connections = api_client.get_bank_status()
|
||||||
requisitions = []
|
requisitions = []
|
||||||
accounts = []
|
for conn in bank_connections:
|
||||||
for r in res["results"]:
|
|
||||||
requisitions.append(
|
requisitions.append(
|
||||||
{
|
{
|
||||||
"Bank": r["institution_id"],
|
"Bank": conn["bank_id"],
|
||||||
"Status": REQUISITION_STATUS.get(r["status"], "UNKNOWN"),
|
"Status": conn["status_display"],
|
||||||
"Created at": datefmt(r["created"]),
|
"Created at": datefmt(conn["created_at"]),
|
||||||
|
"Requisition ID": conn["requisition_id"],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
accounts += r.get("accounts", [])
|
|
||||||
info("Banks")
|
info("Banks")
|
||||||
print_table(requisitions)
|
print_table(requisitions)
|
||||||
|
|
||||||
|
# Get account details
|
||||||
|
accounts = api_client.get_accounts()
|
||||||
account_details = []
|
account_details = []
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
details = get(ctx, f"/accounts/{account}")
|
|
||||||
account_details.append(
|
account_details.append(
|
||||||
{
|
{
|
||||||
"ID": details["id"],
|
"ID": account["id"],
|
||||||
"Bank": details["institution_id"],
|
"Bank": account["institution_id"],
|
||||||
"Status": details["status"],
|
"Status": account["status"],
|
||||||
"IBAN": details.get("iban", "N/A"),
|
"IBAN": account.get("iban", "N/A"),
|
||||||
"Created at": datefmt(details["created"]),
|
"Created at": datefmt(account["created"]),
|
||||||
"Last accessed at": datefmt(details["last_accessed"]),
|
"Last accessed at": (
|
||||||
|
datefmt(account["last_accessed"])
|
||||||
|
if account.get("last_accessed")
|
||||||
|
else "N/A"
|
||||||
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
echo()
|
echo()
|
||||||
|
|||||||
@@ -1,74 +1,61 @@
|
|||||||
import click
|
import click
|
||||||
from pymongo import MongoClient
|
|
||||||
from pymongo.errors import DuplicateKeyError
|
|
||||||
|
|
||||||
from leggen.main import cli
|
from leggen.main import cli
|
||||||
from leggen.utils.network import get
|
from leggen.api_client import LeggendAPIClient
|
||||||
from leggen.utils.text import error, info, success, warning
|
from leggen.utils.text import error, info, success
|
||||||
|
|
||||||
|
|
||||||
def save_transactions(ctx: click.Context, account: str):
|
|
||||||
info(f"[{account}] Getting transactions")
|
|
||||||
all_transactions = []
|
|
||||||
account_transactions = get(ctx, f"/accounts/{account}/transactions/").get(
|
|
||||||
"transactions", []
|
|
||||||
)
|
|
||||||
|
|
||||||
for transaction in account_transactions.get("booked", []):
|
|
||||||
transaction["accountId"] = account
|
|
||||||
transaction["transactionStatus"] = "booked"
|
|
||||||
all_transactions.append(transaction)
|
|
||||||
|
|
||||||
for transaction in account_transactions.get("pending", []):
|
|
||||||
transaction["accountId"] = account
|
|
||||||
transaction["transactionStatus"] = "pending"
|
|
||||||
all_transactions.append(transaction)
|
|
||||||
|
|
||||||
info(f"[{account}] Fetched {len(all_transactions)} transactions, saving to MongoDB")
|
|
||||||
|
|
||||||
# Connect to MongoDB
|
|
||||||
mongo_uri = ctx.obj["mongo_uri"]
|
|
||||||
client = MongoClient(mongo_uri)
|
|
||||||
db = client["leggen"]
|
|
||||||
transactions_collection = db["transactions"]
|
|
||||||
|
|
||||||
# Create a unique index on transactionId
|
|
||||||
transactions_collection.create_index("transactionId", unique=True)
|
|
||||||
|
|
||||||
# Insert transactions into MongoDB
|
|
||||||
new_transactions_count = 0
|
|
||||||
duplicates_count = 0
|
|
||||||
|
|
||||||
for transaction in all_transactions:
|
|
||||||
try:
|
|
||||||
transactions_collection.insert_one(transaction)
|
|
||||||
new_transactions_count += 1
|
|
||||||
except DuplicateKeyError:
|
|
||||||
# A transaction with the same ID already exists, skip insertion
|
|
||||||
duplicates_count += 1
|
|
||||||
|
|
||||||
success(f"[{account}] Inserted {new_transactions_count} new transactions")
|
|
||||||
if duplicates_count:
|
|
||||||
warning(f"[{account}] Skipped {duplicates_count} duplicate transactions")
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@click.option("--wait", is_flag=True, help="Wait for sync to complete (synchronous)")
|
||||||
|
@click.option("--force", is_flag=True, help="Force sync even if already running")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def sync(ctx: click.Context):
|
def sync(ctx: click.Context, wait: bool, force: bool):
|
||||||
"""
|
"""
|
||||||
Sync all transactions with database
|
Sync all transactions with database
|
||||||
"""
|
"""
|
||||||
info("Getting accounts details")
|
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||||
res = get(ctx, "/requisitions/")
|
|
||||||
accounts = []
|
|
||||||
for r in res.get("results", []):
|
|
||||||
accounts += r.get("accounts", [])
|
|
||||||
accounts = list(set(accounts))
|
|
||||||
|
|
||||||
info(f"Syncing transactions for {len(accounts)} accounts")
|
# Check if leggend service is available
|
||||||
|
if not api_client.health_check():
|
||||||
|
error("Cannot connect to leggend service. Please ensure it's running.")
|
||||||
|
return
|
||||||
|
|
||||||
for account in accounts:
|
try:
|
||||||
try:
|
if wait:
|
||||||
save_transactions(ctx, account)
|
# Run sync synchronously and wait for completion
|
||||||
except Exception as e:
|
info("Starting synchronous sync...")
|
||||||
error(f"[{account}] Error: Sync failed, skipping account. Exception: {e}")
|
result = api_client.sync_now(force=force)
|
||||||
|
|
||||||
|
if result.get("success"):
|
||||||
|
success("Sync completed successfully!")
|
||||||
|
info(f"Accounts processed: {result.get('accounts_processed', 0)}")
|
||||||
|
info(f"Transactions added: {result.get('transactions_added', 0)}")
|
||||||
|
info(f"Balances updated: {result.get('balances_updated', 0)}")
|
||||||
|
if result.get("duration_seconds"):
|
||||||
|
info(f"Duration: {result['duration_seconds']:.2f} seconds")
|
||||||
|
|
||||||
|
if result.get("errors"):
|
||||||
|
error(f"Errors encountered: {len(result['errors'])}")
|
||||||
|
for err in result["errors"]:
|
||||||
|
error(f" - {err}")
|
||||||
|
else:
|
||||||
|
error("Sync failed")
|
||||||
|
if result.get("errors"):
|
||||||
|
for err in result["errors"]:
|
||||||
|
error(f" - {err}")
|
||||||
|
else:
|
||||||
|
# Trigger async sync
|
||||||
|
info("Starting background sync...")
|
||||||
|
result = api_client.trigger_sync(force=force)
|
||||||
|
|
||||||
|
if result.get("sync_started"):
|
||||||
|
success("Sync started successfully in the background")
|
||||||
|
info(
|
||||||
|
"Use 'leggen sync --wait' to run synchronously or check status with API"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
error("Failed to start sync")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error(f"Sync failed: {str(e)}")
|
||||||
|
return
|
||||||
|
|||||||
@@ -1,30 +1,95 @@
|
|||||||
import click
|
import click
|
||||||
|
|
||||||
from leggen.main import cli
|
from leggen.main import cli
|
||||||
from leggen.utils.network import get
|
from leggen.api_client import LeggendAPIClient
|
||||||
from leggen.utils.text import print_table
|
from leggen.utils.text import datefmt, info, print_table
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument("account", type=str)
|
@click.option("-a", "--account", type=str, help="Account ID")
|
||||||
|
@click.option(
|
||||||
|
"-l", "--limit", type=int, default=50, help="Number of transactions to show"
|
||||||
|
)
|
||||||
|
@click.option("--full", is_flag=True, help="Show full transaction details")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def transactions(ctx: click.Context, account: str):
|
def transactions(ctx: click.Context, account: str, limit: int, full: bool):
|
||||||
"""
|
"""
|
||||||
List transactions for an account
|
List transactions
|
||||||
|
|
||||||
ACCOUNT is the account id, see 'leggen status' for the account ids
|
By default, this command lists all transactions for all accounts.
|
||||||
|
|
||||||
|
If the --account option is used, it will only list transactions for that account.
|
||||||
"""
|
"""
|
||||||
all_transactions = []
|
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||||
account_transactions = get(ctx, f"/accounts/{account}/transactions/").get(
|
|
||||||
"transactions", []
|
|
||||||
)
|
|
||||||
|
|
||||||
for transaction in account_transactions.get("booked", []):
|
# Check if leggend service is available
|
||||||
transaction["TYPE"] = "booked"
|
if not api_client.health_check():
|
||||||
all_transactions.append(transaction)
|
click.echo(
|
||||||
|
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
for transaction in account_transactions.get("pending", []):
|
try:
|
||||||
transaction["TYPE"] = "pending"
|
if account:
|
||||||
all_transactions.append(transaction)
|
# Get transactions for specific account
|
||||||
|
account_details = api_client.get_account_details(account)
|
||||||
|
transactions_data = api_client.get_account_transactions(
|
||||||
|
account, limit=limit, summary_only=not full
|
||||||
|
)
|
||||||
|
|
||||||
print_table(all_transactions)
|
info(f"Bank: {account_details['institution_id']}")
|
||||||
|
info(f"IBAN: {account_details.get('iban', 'N/A')}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Get all transactions
|
||||||
|
transactions_data = api_client.get_all_transactions(
|
||||||
|
limit=limit, summary_only=not full, account_id=account
|
||||||
|
)
|
||||||
|
|
||||||
|
# Format transactions for display
|
||||||
|
if full:
|
||||||
|
# Full transaction details
|
||||||
|
formatted_transactions = []
|
||||||
|
for txn in transactions_data:
|
||||||
|
# Handle optional internal_transaction_id
|
||||||
|
txn_id = txn.get("internal_transaction_id")
|
||||||
|
txn_id_display = txn_id[:12] + "..." if txn_id else "N/A"
|
||||||
|
|
||||||
|
formatted_transactions.append(
|
||||||
|
{
|
||||||
|
"ID": txn_id_display,
|
||||||
|
"Date": datefmt(txn["transaction_date"]),
|
||||||
|
"Description": txn["description"][:50] + "..."
|
||||||
|
if len(txn["description"]) > 50
|
||||||
|
else txn["description"],
|
||||||
|
"Amount": f"{txn['transaction_value']:.2f} {txn['transaction_currency']}",
|
||||||
|
"Status": txn["transaction_status"].upper(),
|
||||||
|
"Account": txn["account_id"][:8] + "...",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Summary view
|
||||||
|
formatted_transactions = []
|
||||||
|
for txn in transactions_data:
|
||||||
|
# Handle optional internal_transaction_id
|
||||||
|
txn_id = txn.get("internal_transaction_id")
|
||||||
|
|
||||||
|
formatted_transactions.append(
|
||||||
|
{
|
||||||
|
"Date": datefmt(txn["date"]),
|
||||||
|
"Description": txn["description"][:60] + "..."
|
||||||
|
if len(txn["description"]) > 60
|
||||||
|
else txn["description"],
|
||||||
|
"Amount": f"{txn['amount']:.2f} {txn['currency']}",
|
||||||
|
"Status": txn["status"].upper(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if formatted_transactions:
|
||||||
|
print_table(formatted_transactions)
|
||||||
|
info(f"Showing {len(formatted_transactions)} transactions")
|
||||||
|
else:
|
||||||
|
info("No transactions found")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"Error: Failed to get transactions: {str(e)}")
|
||||||
|
|||||||
549
leggen/database/sqlite.py
Normal file
549
leggen/database/sqlite.py
Normal file
@@ -0,0 +1,549 @@
|
|||||||
|
import json
|
||||||
|
import sqlite3
|
||||||
|
from sqlite3 import IntegrityError
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from leggen.utils.text import success, warning
|
||||||
|
|
||||||
|
|
||||||
|
def persist_balances(ctx: click.Context, balance: dict):
|
||||||
|
# Connect to SQLite database
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create the accounts table if it doesn't exist
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS accounts (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
institution_id TEXT,
|
||||||
|
status TEXT,
|
||||||
|
iban TEXT,
|
||||||
|
name TEXT,
|
||||||
|
currency TEXT,
|
||||||
|
created DATETIME,
|
||||||
|
last_accessed DATETIME,
|
||||||
|
last_updated DATETIME
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for accounts table
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_accounts_institution_id
|
||||||
|
ON accounts(institution_id)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_accounts_status
|
||||||
|
ON accounts(status)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the balances table if it doesn't exist
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS balances (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
account_id TEXT,
|
||||||
|
bank TEXT,
|
||||||
|
status TEXT,
|
||||||
|
iban TEXT,
|
||||||
|
amount REAL,
|
||||||
|
currency TEXT,
|
||||||
|
type TEXT,
|
||||||
|
timestamp DATETIME
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for better performance
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_balances_account_id
|
||||||
|
ON balances(account_id)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_balances_timestamp
|
||||||
|
ON balances(timestamp)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp
|
||||||
|
ON balances(account_id, type, timestamp)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Insert balance into SQLite database
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
"""INSERT INTO balances (
|
||||||
|
account_id,
|
||||||
|
bank,
|
||||||
|
status,
|
||||||
|
iban,
|
||||||
|
amount,
|
||||||
|
currency,
|
||||||
|
type,
|
||||||
|
timestamp
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
balance["account_id"],
|
||||||
|
balance["bank"],
|
||||||
|
balance["status"],
|
||||||
|
balance["iban"],
|
||||||
|
balance["amount"],
|
||||||
|
balance["currency"],
|
||||||
|
balance["type"],
|
||||||
|
balance["timestamp"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except IntegrityError:
|
||||||
|
warning(f"[{balance['account_id']}] Skipped duplicate balance")
|
||||||
|
|
||||||
|
# Commit changes and close the connection
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
success(f"[{balance['account_id']}] Inserted balance of type {balance['type']}")
|
||||||
|
|
||||||
|
return balance
|
||||||
|
|
||||||
|
|
||||||
|
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||||
|
# Connect to SQLite database
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create the transactions table if it doesn't exist
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS transactions (
|
||||||
|
internalTransactionId TEXT PRIMARY KEY,
|
||||||
|
institutionId TEXT,
|
||||||
|
iban TEXT,
|
||||||
|
transactionDate DATETIME,
|
||||||
|
description TEXT,
|
||||||
|
transactionValue REAL,
|
||||||
|
transactionCurrency TEXT,
|
||||||
|
transactionStatus TEXT,
|
||||||
|
accountId TEXT,
|
||||||
|
rawTransaction JSON
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for better performance
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_id
|
||||||
|
ON transactions(accountId)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_date
|
||||||
|
ON transactions(transactionDate)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_date
|
||||||
|
ON transactions(accountId, transactionDate)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_amount
|
||||||
|
ON transactions(transactionValue)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Insert transactions into SQLite database
|
||||||
|
duplicates_count = 0
|
||||||
|
|
||||||
|
# Prepare an SQL statement for inserting data
|
||||||
|
insert_sql = """INSERT INTO transactions (
|
||||||
|
internalTransactionId,
|
||||||
|
institutionId,
|
||||||
|
iban,
|
||||||
|
transactionDate,
|
||||||
|
description,
|
||||||
|
transactionValue,
|
||||||
|
transactionCurrency,
|
||||||
|
transactionStatus,
|
||||||
|
accountId,
|
||||||
|
rawTransaction
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
|
||||||
|
|
||||||
|
new_transactions = []
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
insert_sql,
|
||||||
|
(
|
||||||
|
transaction["internalTransactionId"],
|
||||||
|
transaction["institutionId"],
|
||||||
|
transaction["iban"],
|
||||||
|
transaction["transactionDate"],
|
||||||
|
transaction["description"],
|
||||||
|
transaction["transactionValue"],
|
||||||
|
transaction["transactionCurrency"],
|
||||||
|
transaction["transactionStatus"],
|
||||||
|
transaction["accountId"],
|
||||||
|
json.dumps(transaction["rawTransaction"]),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
new_transactions.append(transaction)
|
||||||
|
except IntegrityError:
|
||||||
|
# A transaction with the same ID already exists, indicating a duplicate
|
||||||
|
duplicates_count += 1
|
||||||
|
|
||||||
|
# Commit changes and close the connection
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
success(f"[{account}] Inserted {len(new_transactions)} new transactions")
|
||||||
|
if duplicates_count:
|
||||||
|
warning(f"[{account}] Skipped {duplicates_count} duplicate transactions")
|
||||||
|
|
||||||
|
return new_transactions
|
||||||
|
|
||||||
|
|
||||||
|
def get_transactions(
|
||||||
|
account_id=None,
|
||||||
|
limit=100,
|
||||||
|
offset=0,
|
||||||
|
date_from=None,
|
||||||
|
date_to=None,
|
||||||
|
min_amount=None,
|
||||||
|
max_amount=None,
|
||||||
|
search=None,
|
||||||
|
hide_missing_ids=True,
|
||||||
|
):
|
||||||
|
"""Get transactions from SQLite database with optional filtering"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return []
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
conn.row_factory = sqlite3.Row # Enable dict-like access
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Build query with filters
|
||||||
|
query = "SELECT * FROM transactions WHERE 1=1"
|
||||||
|
params = []
|
||||||
|
|
||||||
|
if account_id:
|
||||||
|
query += " AND accountId = ?"
|
||||||
|
params.append(account_id)
|
||||||
|
|
||||||
|
if date_from:
|
||||||
|
query += " AND transactionDate >= ?"
|
||||||
|
params.append(date_from)
|
||||||
|
|
||||||
|
if date_to:
|
||||||
|
query += " AND transactionDate <= ?"
|
||||||
|
params.append(date_to)
|
||||||
|
|
||||||
|
if min_amount is not None:
|
||||||
|
query += " AND transactionValue >= ?"
|
||||||
|
params.append(min_amount)
|
||||||
|
|
||||||
|
if max_amount is not None:
|
||||||
|
query += " AND transactionValue <= ?"
|
||||||
|
params.append(max_amount)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
query += " AND description LIKE ?"
|
||||||
|
params.append(f"%{search}%")
|
||||||
|
|
||||||
|
if hide_missing_ids:
|
||||||
|
query += (
|
||||||
|
" AND internalTransactionId IS NOT NULL AND internalTransactionId != ''"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add ordering and pagination
|
||||||
|
query += " ORDER BY transactionDate DESC"
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
query += " LIMIT ?"
|
||||||
|
params.append(limit)
|
||||||
|
|
||||||
|
if offset:
|
||||||
|
query += " OFFSET ?"
|
||||||
|
params.append(offset)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(query, params)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
|
||||||
|
# Convert to list of dicts and parse JSON fields
|
||||||
|
transactions = []
|
||||||
|
for row in rows:
|
||||||
|
transaction = dict(row)
|
||||||
|
if transaction["rawTransaction"]:
|
||||||
|
transaction["rawTransaction"] = json.loads(
|
||||||
|
transaction["rawTransaction"]
|
||||||
|
)
|
||||||
|
transactions.append(transaction)
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
return transactions
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def get_balances(account_id=None):
|
||||||
|
"""Get latest balances from SQLite database"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return []
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Get latest balance for each account_id and type combination
|
||||||
|
query = """
|
||||||
|
SELECT * FROM balances b1
|
||||||
|
WHERE b1.timestamp = (
|
||||||
|
SELECT MAX(b2.timestamp)
|
||||||
|
FROM balances b2
|
||||||
|
WHERE b2.account_id = b1.account_id AND b2.type = b1.type
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
params = []
|
||||||
|
|
||||||
|
if account_id:
|
||||||
|
query += " AND b1.account_id = ?"
|
||||||
|
params.append(account_id)
|
||||||
|
|
||||||
|
query += " ORDER BY b1.account_id, b1.type"
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(query, params)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
|
||||||
|
balances = [dict(row) for row in rows]
|
||||||
|
conn.close()
|
||||||
|
return balances
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def get_account_summary(account_id):
|
||||||
|
"""Get basic account info from transactions table (avoids GoCardless API call)"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return None
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get account info from most recent transaction
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT DISTINCT accountId, institutionId, iban
|
||||||
|
FROM transactions
|
||||||
|
WHERE accountId = ?
|
||||||
|
ORDER BY transactionDate DESC
|
||||||
|
LIMIT 1
|
||||||
|
""",
|
||||||
|
(account_id,),
|
||||||
|
)
|
||||||
|
|
||||||
|
row = cursor.fetchone()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if row:
|
||||||
|
return dict(row)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def get_transaction_count(account_id=None, **filters):
|
||||||
|
"""Get total count of transactions matching filters"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return 0
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
query = "SELECT COUNT(*) FROM transactions WHERE 1=1"
|
||||||
|
params = []
|
||||||
|
|
||||||
|
if account_id:
|
||||||
|
query += " AND accountId = ?"
|
||||||
|
params.append(account_id)
|
||||||
|
|
||||||
|
# Add same filters as get_transactions
|
||||||
|
if filters.get("date_from"):
|
||||||
|
query += " AND transactionDate >= ?"
|
||||||
|
params.append(filters["date_from"])
|
||||||
|
|
||||||
|
if filters.get("date_to"):
|
||||||
|
query += " AND transactionDate <= ?"
|
||||||
|
params.append(filters["date_to"])
|
||||||
|
|
||||||
|
if filters.get("min_amount") is not None:
|
||||||
|
query += " AND transactionValue >= ?"
|
||||||
|
params.append(filters["min_amount"])
|
||||||
|
|
||||||
|
if filters.get("max_amount") is not None:
|
||||||
|
query += " AND transactionValue <= ?"
|
||||||
|
params.append(filters["max_amount"])
|
||||||
|
|
||||||
|
if filters.get("search"):
|
||||||
|
query += " AND description LIKE ?"
|
||||||
|
params.append(f"%{filters['search']}%")
|
||||||
|
|
||||||
|
if filters.get("hide_missing_ids", True):
|
||||||
|
query += (
|
||||||
|
" AND internalTransactionId IS NOT NULL AND internalTransactionId != ''"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(query, params)
|
||||||
|
count = cursor.fetchone()[0]
|
||||||
|
conn.close()
|
||||||
|
return count
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def persist_account(account_data: dict):
|
||||||
|
"""Persist account details to SQLite database"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create the accounts table if it doesn't exist
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS accounts (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
institution_id TEXT,
|
||||||
|
status TEXT,
|
||||||
|
iban TEXT,
|
||||||
|
name TEXT,
|
||||||
|
currency TEXT,
|
||||||
|
created DATETIME,
|
||||||
|
last_accessed DATETIME,
|
||||||
|
last_updated DATETIME
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for accounts table
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_accounts_institution_id
|
||||||
|
ON accounts(institution_id)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_accounts_status
|
||||||
|
ON accounts(status)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Insert or replace account data
|
||||||
|
cursor.execute(
|
||||||
|
"""INSERT OR REPLACE INTO accounts (
|
||||||
|
id,
|
||||||
|
institution_id,
|
||||||
|
status,
|
||||||
|
iban,
|
||||||
|
name,
|
||||||
|
currency,
|
||||||
|
created,
|
||||||
|
last_accessed,
|
||||||
|
last_updated
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
account_data["id"],
|
||||||
|
account_data["institution_id"],
|
||||||
|
account_data["status"],
|
||||||
|
account_data.get("iban"),
|
||||||
|
account_data.get("name"),
|
||||||
|
account_data.get("currency"),
|
||||||
|
account_data["created"],
|
||||||
|
account_data.get("last_accessed"),
|
||||||
|
account_data.get("last_updated", account_data["created"]),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
success(f"[{account_data['id']}] Account details persisted to database")
|
||||||
|
return account_data
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def get_accounts(account_ids=None):
|
||||||
|
"""Get account details from SQLite database"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return []
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
query = "SELECT * FROM accounts"
|
||||||
|
params = []
|
||||||
|
|
||||||
|
if account_ids:
|
||||||
|
placeholders = ",".join("?" * len(account_ids))
|
||||||
|
query += f" WHERE id IN ({placeholders})"
|
||||||
|
params.extend(account_ids)
|
||||||
|
|
||||||
|
query += " ORDER BY created DESC"
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(query, params)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
|
||||||
|
accounts = [dict(row) for row in rows]
|
||||||
|
conn.close()
|
||||||
|
return accounts
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
def get_account(account_id: str):
|
||||||
|
"""Get specific account details from SQLite database"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return None
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
conn.row_factory = sqlite3.Row
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute("SELECT * FROM accounts WHERE id = ?", (account_id,))
|
||||||
|
row = cursor.fetchone()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if row:
|
||||||
|
return dict(row)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
conn.close()
|
||||||
|
raise e
|
||||||
@@ -5,7 +5,6 @@ from pathlib import Path
|
|||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from leggen.utils.auth import get_token
|
|
||||||
from leggen.utils.config import load_config
|
from leggen.utils.config import load_config
|
||||||
from leggen.utils.text import error
|
from leggen.utils.text import error
|
||||||
|
|
||||||
@@ -74,29 +73,41 @@ class Group(click.Group):
|
|||||||
return getattr(mod, name)
|
return getattr(mod, name)
|
||||||
|
|
||||||
|
|
||||||
@click.group(cls=Group, context_settings={"help_option_names": ["-h", "--help"]})
|
@click.option(
|
||||||
|
"-c",
|
||||||
|
"--config",
|
||||||
|
type=click.Path(dir_okay=False),
|
||||||
|
default=Path.home() / ".config" / "leggen" / "config.toml",
|
||||||
|
show_default=True,
|
||||||
|
callback=load_config,
|
||||||
|
is_eager=True,
|
||||||
|
expose_value=False,
|
||||||
|
envvar="LEGGEN_CONFIG_FILE",
|
||||||
|
show_envvar=True,
|
||||||
|
help="Path to TOML configuration file",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--api-url",
|
||||||
|
type=str,
|
||||||
|
default="http://localhost:8000",
|
||||||
|
envvar="LEGGEND_API_URL",
|
||||||
|
show_envvar=True,
|
||||||
|
help="URL of the leggend API service",
|
||||||
|
)
|
||||||
|
@click.group(
|
||||||
|
cls=Group,
|
||||||
|
context_settings={"help_option_names": ["-h", "--help"]},
|
||||||
|
)
|
||||||
@click.version_option(package_name="leggen")
|
@click.version_option(package_name="leggen")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def cli(ctx: click.Context):
|
def cli(ctx: click.Context, api_url: str):
|
||||||
"""
|
"""
|
||||||
Leggen: An Open Banking CLI
|
Leggen: An Open Banking CLI
|
||||||
"""
|
"""
|
||||||
ctx.ensure_object(dict)
|
|
||||||
|
|
||||||
# Do not require authentication when printing help messages
|
# Do not require authentication when printing help messages
|
||||||
if "--help" in sys.argv[1:] or "-h" in sys.argv[1:]:
|
if "--help" in sys.argv[1:] or "-h" in sys.argv[1:]:
|
||||||
return
|
return
|
||||||
|
|
||||||
# or when running the init command
|
# Store API URL in context for commands to use
|
||||||
if ctx.invoked_subcommand == "init":
|
ctx.obj["api_url"] = api_url
|
||||||
if (click.get_app_dir("leggen") / Path("config.json")).is_file():
|
|
||||||
click.confirm(
|
|
||||||
"Configuration file already exists. Do you want to overwrite it?",
|
|
||||||
abort=True,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
config = load_config()
|
|
||||||
token = get_token(config)
|
|
||||||
ctx.obj["api_url"] = config["api_url"]
|
|
||||||
ctx.obj["mongo_uri"] = config["mongo_uri"]
|
|
||||||
ctx.obj["headers"] = {"Authorization": f"Bearer {token}"}
|
|
||||||
|
|||||||
57
leggen/notifications/discord.py
Normal file
57
leggen/notifications/discord.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import click
|
||||||
|
from discord_webhook import DiscordEmbed, DiscordWebhook
|
||||||
|
|
||||||
|
from leggen.utils.text import info
|
||||||
|
|
||||||
|
|
||||||
|
def send_expire_notification(ctx: click.Context, notification: dict):
|
||||||
|
info("Sending expiration notification to Discord")
|
||||||
|
webhook = DiscordWebhook(url=ctx.obj["notifications"]["discord"]["webhook"])
|
||||||
|
|
||||||
|
embed = DiscordEmbed(
|
||||||
|
title="",
|
||||||
|
description=f"Your account {notification['bank']} ({notification['requisition_id']}) is in {notification['status']} status. Days left: {notification['days_left']}",
|
||||||
|
color="03b2f8",
|
||||||
|
)
|
||||||
|
embed.set_author(
|
||||||
|
name="Leggen",
|
||||||
|
url="https://github.com/elisiariocouto/leggen",
|
||||||
|
)
|
||||||
|
embed.set_footer(text="Expiration notice")
|
||||||
|
embed.set_timestamp()
|
||||||
|
|
||||||
|
webhook.add_embed(embed)
|
||||||
|
response = webhook.execute()
|
||||||
|
try:
|
||||||
|
response.raise_for_status()
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Discord notification failed: {e}\n{response.text}") from e
|
||||||
|
|
||||||
|
|
||||||
|
def send_transactions_message(ctx: click.Context, transactions: list):
|
||||||
|
info(f"Got {len(transactions)} new transactions, sending message to Discord")
|
||||||
|
webhook = DiscordWebhook(url=ctx.obj["notifications"]["discord"]["webhook"])
|
||||||
|
|
||||||
|
embed = DiscordEmbed(
|
||||||
|
title="",
|
||||||
|
description=f"{len(transactions)} new transaction matches",
|
||||||
|
color="03b2f8",
|
||||||
|
)
|
||||||
|
embed.set_author(
|
||||||
|
name="Leggen",
|
||||||
|
url="https://github.com/elisiariocouto/leggen",
|
||||||
|
)
|
||||||
|
embed.set_footer(text="Case-insensitive filters")
|
||||||
|
embed.set_timestamp()
|
||||||
|
for transaction in transactions:
|
||||||
|
embed.add_embed_field(
|
||||||
|
name=transaction["name"],
|
||||||
|
value=f"{transaction['value']}{transaction['currency']} ({transaction['date']})",
|
||||||
|
)
|
||||||
|
|
||||||
|
webhook.add_embed(embed)
|
||||||
|
response = webhook.execute()
|
||||||
|
try:
|
||||||
|
response.raise_for_status()
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Discord notification failed: {e}\n{response.text}") from e
|
||||||
81
leggen/notifications/telegram.py
Normal file
81
leggen/notifications/telegram.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import click
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from leggen.utils.text import info
|
||||||
|
|
||||||
|
|
||||||
|
def escape_markdown(text: str) -> str:
|
||||||
|
return (
|
||||||
|
str(text)
|
||||||
|
.replace("_", "\\_")
|
||||||
|
.replace("*", "\\*")
|
||||||
|
.replace("[", "\\[")
|
||||||
|
.replace("]", "\\]")
|
||||||
|
.replace("(", "\\(")
|
||||||
|
.replace(")", "\\)")
|
||||||
|
.replace("~", "\\~")
|
||||||
|
.replace("`", "\\`")
|
||||||
|
.replace(">", "\\>")
|
||||||
|
.replace("#", "\\#")
|
||||||
|
.replace("+", "\\+")
|
||||||
|
.replace("-", "\\-")
|
||||||
|
.replace("=", "\\=")
|
||||||
|
.replace("|", "\\|")
|
||||||
|
.replace("{", "\\{")
|
||||||
|
.replace("}", "\\}")
|
||||||
|
.replace(".", "\\.")
|
||||||
|
.replace("!", "\\!")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def send_expire_notification(ctx: click.Context, notification: dict):
|
||||||
|
token = ctx.obj["notifications"]["telegram"]["api-key"]
|
||||||
|
chat_id = ctx.obj["notifications"]["telegram"]["chat-id"]
|
||||||
|
bot_url = f"https://api.telegram.org/bot{token}/sendMessage"
|
||||||
|
info("Sending expiration notification to Telegram")
|
||||||
|
message = "*💲 [Leggen](https://github.com/elisiariocouto/leggen)*\n"
|
||||||
|
message += escape_markdown(
|
||||||
|
f"Your account {notification['bank']} ({notification['requisition_id']}) is in {notification['status']} status. Days left: {notification['days_left']}\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
res = requests.post(
|
||||||
|
bot_url,
|
||||||
|
json={
|
||||||
|
"chat_id": chat_id,
|
||||||
|
"text": message,
|
||||||
|
"parse_mode": "MarkdownV2",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
res.raise_for_status()
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Telegram notification failed: {e}\n{res.text}") from e
|
||||||
|
|
||||||
|
|
||||||
|
def send_transaction_message(ctx: click.Context, transactions: list):
|
||||||
|
token = ctx.obj["notifications"]["telegram"]["api-key"]
|
||||||
|
chat_id = ctx.obj["notifications"]["telegram"]["chat-id"]
|
||||||
|
bot_url = f"https://api.telegram.org/bot{token}/sendMessage"
|
||||||
|
info(f"Got {len(transactions)} new transactions, sending message to Telegram")
|
||||||
|
message = "*💲 [Leggen](https://github.com/elisiariocouto/leggen)*\n"
|
||||||
|
message += f"{len(transactions)} new transaction matches\n\n"
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
message += f"*Name*: {escape_markdown(transaction['name'])}\n"
|
||||||
|
message += f"*Value*: {escape_markdown(transaction['value'])}{escape_markdown(transaction['currency'])}\n"
|
||||||
|
message += f"*Date*: {escape_markdown(transaction['date'])}\n\n"
|
||||||
|
|
||||||
|
res = requests.post(
|
||||||
|
bot_url,
|
||||||
|
json={
|
||||||
|
"chat_id": chat_id,
|
||||||
|
"text": message,
|
||||||
|
"parse_mode": "MarkdownV2",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
res.raise_for_status()
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(f"Telegram notification failed: {e}\n{res.text}") from e
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import click
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from leggen.utils.text import warning
|
|
||||||
|
|
||||||
|
|
||||||
def create_token(config: dict) -> str:
|
|
||||||
"""
|
|
||||||
Create a new token
|
|
||||||
"""
|
|
||||||
res = requests.post(
|
|
||||||
f"{config['api_url']}/token/new/",
|
|
||||||
json={"secret_id": config["api_key"], "secret_key": config["api_secret"]},
|
|
||||||
)
|
|
||||||
res.raise_for_status()
|
|
||||||
auth = res.json()
|
|
||||||
save_auth(auth)
|
|
||||||
return auth["access"]
|
|
||||||
|
|
||||||
|
|
||||||
def get_token(config: dict) -> str:
|
|
||||||
"""
|
|
||||||
Get the token from the auth file or request a new one
|
|
||||||
"""
|
|
||||||
auth_file = click.get_app_dir("leggen") / Path("auth.json")
|
|
||||||
if auth_file.exists():
|
|
||||||
with click.open_file(str(auth_file), "r") as f:
|
|
||||||
auth = json.load(f)
|
|
||||||
if not auth.get("access"):
|
|
||||||
return create_token(config)
|
|
||||||
|
|
||||||
res = requests.post(
|
|
||||||
f"{config['api_url']}/token/refresh/", json={"refresh": auth["refresh"]}
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
res.raise_for_status()
|
|
||||||
auth.update(res.json())
|
|
||||||
save_auth(auth)
|
|
||||||
return auth["access"]
|
|
||||||
except requests.exceptions.HTTPError:
|
|
||||||
warning(
|
|
||||||
f"Token probably expired, requesting a new one.\nResponse: {res.status_code}\n{res.text}"
|
|
||||||
)
|
|
||||||
return create_token(config)
|
|
||||||
else:
|
|
||||||
return create_token(config)
|
|
||||||
|
|
||||||
|
|
||||||
def save_auth(d: dict):
|
|
||||||
Path.mkdir(Path(click.get_app_dir("leggen")), exist_ok=True)
|
|
||||||
auth_file = click.get_app_dir("leggen") / Path("auth.json")
|
|
||||||
|
|
||||||
with click.open_file(str(auth_file), "w") as f:
|
|
||||||
json.dump(d, f)
|
|
||||||
@@ -1,29 +1,18 @@
|
|||||||
import json
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
import tomllib
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from leggen.utils.text import error, info
|
from leggen.utils.text import error
|
||||||
|
|
||||||
|
|
||||||
def save_config(d: dict):
|
def load_config(ctx: click.Context, _, filename):
|
||||||
Path.mkdir(Path(click.get_app_dir("leggen")), exist_ok=True)
|
|
||||||
config_file = click.get_app_dir("leggen") / Path("config.json")
|
|
||||||
|
|
||||||
with click.open_file(str(config_file), "w") as f:
|
|
||||||
json.dump(d, f)
|
|
||||||
info(f"Wrote configuration file at '{config_file}'")
|
|
||||||
|
|
||||||
|
|
||||||
def load_config() -> dict:
|
|
||||||
config_file = click.get_app_dir("leggen") / Path("config.json")
|
|
||||||
try:
|
try:
|
||||||
with click.open_file(str(config_file), "r") as f:
|
with click.open_file(str(filename), "rb") as f:
|
||||||
config = json.load(f)
|
# TODO: Implement configuration file validation (use pydantic?)
|
||||||
return config
|
ctx.obj = tomllib.load(f)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
error(
|
error(
|
||||||
"Configuration file not found. Run `leggen init` to configure your account."
|
"Configuration file not found. Provide a valid configuration file path with leggen --config <path> or LEGGEN_CONFIG=<path> environment variable."
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|||||||
122
leggen/utils/database.py
Normal file
122
leggen/utils/database.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
import leggen.database.sqlite as sqlite_engine
|
||||||
|
from leggen.utils.text import info, warning
|
||||||
|
|
||||||
|
|
||||||
|
def persist_balance(ctx: click.Context, account: str, balance: dict) -> None:
|
||||||
|
sqlite = ctx.obj.get("database", {}).get("sqlite", True)
|
||||||
|
|
||||||
|
if not sqlite:
|
||||||
|
warning("SQLite database is disabled, skipping balance saving")
|
||||||
|
return
|
||||||
|
|
||||||
|
info(f"[{account}] Fetched balances, saving to SQLite")
|
||||||
|
sqlite_engine.persist_balances(ctx, balance)
|
||||||
|
|
||||||
|
|
||||||
|
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||||
|
sqlite = ctx.obj.get("database", {}).get("sqlite", True)
|
||||||
|
|
||||||
|
if not sqlite:
|
||||||
|
warning("SQLite database is disabled, skipping transaction saving")
|
||||||
|
# WARNING: This will return the transactions list as is, without saving it to any database
|
||||||
|
# Possible duplicate notifications will be sent if the filters are enabled
|
||||||
|
return transactions
|
||||||
|
|
||||||
|
info(f"[{account}] Fetched {len(transactions)} transactions, saving to SQLite")
|
||||||
|
return sqlite_engine.persist_transactions(ctx, account, transactions)
|
||||||
|
|
||||||
|
|
||||||
|
def save_transactions(ctx: click.Context, account: str) -> list:
|
||||||
|
import requests
|
||||||
|
|
||||||
|
api_url = ctx.obj.get("api_url", "http://localhost:8000")
|
||||||
|
|
||||||
|
info(f"[{account}] Getting account details")
|
||||||
|
res = requests.get(f"{api_url}/accounts/{account}")
|
||||||
|
res.raise_for_status()
|
||||||
|
account_info = res.json()
|
||||||
|
|
||||||
|
info(f"[{account}] Getting transactions")
|
||||||
|
transactions = []
|
||||||
|
|
||||||
|
res = requests.get(f"{api_url}/accounts/{account}/transactions/")
|
||||||
|
res.raise_for_status()
|
||||||
|
account_transactions = res.json().get("transactions", [])
|
||||||
|
|
||||||
|
for transaction in account_transactions.get("booked", []):
|
||||||
|
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||||
|
"bookingDate"
|
||||||
|
)
|
||||||
|
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||||
|
if booked_date and value_date:
|
||||||
|
min_date = min(
|
||||||
|
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
min_date = datetime.fromisoformat(booked_date or value_date)
|
||||||
|
|
||||||
|
transactionValue = float(
|
||||||
|
transaction.get("transactionAmount", {}).get("amount", 0)
|
||||||
|
)
|
||||||
|
currency = transaction.get("transactionAmount", {}).get("currency", "")
|
||||||
|
|
||||||
|
description = transaction.get(
|
||||||
|
"remittanceInformationUnstructured",
|
||||||
|
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||||
|
)
|
||||||
|
|
||||||
|
t = {
|
||||||
|
"internalTransactionId": transaction.get("internalTransactionId"),
|
||||||
|
"institutionId": account_info["institution_id"],
|
||||||
|
"iban": account_info.get("iban", "N/A"),
|
||||||
|
"transactionDate": min_date,
|
||||||
|
"description": description,
|
||||||
|
"transactionValue": transactionValue,
|
||||||
|
"transactionCurrency": currency,
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": account,
|
||||||
|
"rawTransaction": transaction,
|
||||||
|
}
|
||||||
|
transactions.append(t)
|
||||||
|
|
||||||
|
for transaction in account_transactions.get("pending", []):
|
||||||
|
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||||
|
"bookingDate"
|
||||||
|
)
|
||||||
|
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||||
|
if booked_date and value_date:
|
||||||
|
min_date = min(
|
||||||
|
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
min_date = datetime.fromisoformat(booked_date or value_date)
|
||||||
|
|
||||||
|
transactionValue = float(
|
||||||
|
transaction.get("transactionAmount", {}).get("amount", 0)
|
||||||
|
)
|
||||||
|
currency = transaction.get("transactionAmount", {}).get("currency", "")
|
||||||
|
|
||||||
|
description = transaction.get(
|
||||||
|
"remittanceInformationUnstructured",
|
||||||
|
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||||
|
)
|
||||||
|
|
||||||
|
t = {
|
||||||
|
"internalTransactionId": transaction.get("internalTransactionId"),
|
||||||
|
"institutionId": account_info["institution_id"],
|
||||||
|
"iban": account_info.get("iban", "N/A"),
|
||||||
|
"transactionDate": min_date,
|
||||||
|
"description": description,
|
||||||
|
"transactionValue": transactionValue,
|
||||||
|
"transactionCurrency": currency,
|
||||||
|
"transactionStatus": "pending",
|
||||||
|
"accountId": account,
|
||||||
|
"rawTransaction": transaction,
|
||||||
|
}
|
||||||
|
transactions.append(t)
|
||||||
|
|
||||||
|
return persist_transactions(ctx, account, transactions)
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import click
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from leggen.utils.text import error
|
|
||||||
|
|
||||||
|
|
||||||
def get(ctx: click.Context, path: str, params: dict = {}):
|
|
||||||
"""
|
|
||||||
GET request to the GoCardless API
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = f"{ctx.obj['api_url']}{path}"
|
|
||||||
res = requests.get(url, headers=ctx.obj["headers"], params=params)
|
|
||||||
try:
|
|
||||||
res.raise_for_status()
|
|
||||||
except Exception as e:
|
|
||||||
error(f"Error: {e}")
|
|
||||||
ctx.abort()
|
|
||||||
return res.json()
|
|
||||||
|
|
||||||
|
|
||||||
def post(ctx: click.Context, path: str, data: dict = {}):
|
|
||||||
"""
|
|
||||||
POST request to the GoCardless API
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = f"{ctx.obj['api_url']}{path}"
|
|
||||||
res = requests.post(url, headers=ctx.obj["headers"], json=data)
|
|
||||||
try:
|
|
||||||
res.raise_for_status()
|
|
||||||
except Exception as e:
|
|
||||||
error(f"Error: {e}")
|
|
||||||
ctx.abort()
|
|
||||||
return res.json()
|
|
||||||
|
|
||||||
|
|
||||||
def put(ctx: click.Context, path: str, data: dict = {}):
|
|
||||||
"""
|
|
||||||
PUT request to the GoCardless API
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = f"{ctx.obj['api_url']}{path}"
|
|
||||||
res = requests.put(url, headers=ctx.obj["headers"], json=data)
|
|
||||||
try:
|
|
||||||
res.raise_for_status()
|
|
||||||
except Exception as e:
|
|
||||||
error(f"Error: {e}")
|
|
||||||
error(res.text)
|
|
||||||
ctx.abort()
|
|
||||||
return res.json()
|
|
||||||
65
leggen/utils/notifications.py
Normal file
65
leggen/utils/notifications.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import click
|
||||||
|
|
||||||
|
import leggen.notifications.discord as discord
|
||||||
|
import leggen.notifications.telegram as telegram
|
||||||
|
from leggen.utils.text import error, info, warning
|
||||||
|
|
||||||
|
|
||||||
|
def send_expire_notification(ctx: click.Context, notification: dict):
|
||||||
|
discord_enabled = ctx.obj.get("notifications", {}).get("discord", False)
|
||||||
|
telegram_enabled = ctx.obj.get("notifications", {}).get("telegram", False)
|
||||||
|
|
||||||
|
if not discord_enabled and not telegram_enabled:
|
||||||
|
warning("No notification engine is enabled, skipping notifications")
|
||||||
|
error(
|
||||||
|
f"Your account {notification['bank']} ({notification['requisition_id']}) is in {notification['status']} status. Days left: {notification['days_left']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if discord_enabled:
|
||||||
|
info("Sending expiration notification to Discord")
|
||||||
|
discord.send_expire_notification(ctx, notification)
|
||||||
|
|
||||||
|
if telegram_enabled:
|
||||||
|
info("Sending expiration notification to Telegram")
|
||||||
|
telegram.send_expire_notification(ctx, notification)
|
||||||
|
|
||||||
|
|
||||||
|
def send_notification(ctx: click.Context, transactions: list):
|
||||||
|
if ctx.obj.get("filters") is None:
|
||||||
|
warning("No filters are enabled, skipping notifications")
|
||||||
|
return
|
||||||
|
|
||||||
|
filters_case_insensitive = ctx.obj.get("filters", {}).get("case-insensitive", {})
|
||||||
|
|
||||||
|
# Add transaction to the list of transactions to be sent as a notification
|
||||||
|
notification_transactions = []
|
||||||
|
for transaction in transactions:
|
||||||
|
for _, v in filters_case_insensitive.items():
|
||||||
|
if v.lower() in transaction["description"].lower():
|
||||||
|
notification_transactions.append(
|
||||||
|
{
|
||||||
|
"name": transaction["description"],
|
||||||
|
"value": transaction["transactionValue"],
|
||||||
|
"currency": transaction["transactionCurrency"],
|
||||||
|
"date": transaction["transactionDate"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(notification_transactions) == 0:
|
||||||
|
warning("No transactions matched the filters, skipping notifications")
|
||||||
|
return
|
||||||
|
|
||||||
|
discord_enabled = ctx.obj.get("notifications", {}).get("discord", False)
|
||||||
|
telegram_enabled = ctx.obj.get("notifications", {}).get("telegram", False)
|
||||||
|
|
||||||
|
if not discord_enabled and not telegram_enabled:
|
||||||
|
warning("No notification engine is enabled, skipping notifications")
|
||||||
|
return
|
||||||
|
|
||||||
|
if discord_enabled:
|
||||||
|
info(f"Sending {len(notification_transactions)} transactions to Discord")
|
||||||
|
discord.send_transactions_message(ctx, notification_transactions)
|
||||||
|
|
||||||
|
if telegram_enabled:
|
||||||
|
info(f"Sending {len(notification_transactions)} transactions to Telegram")
|
||||||
|
telegram.send_transaction_message(ctx, notification_transactions)
|
||||||
0
leggend/__init__.py
Normal file
0
leggend/__init__.py
Normal file
66
leggend/api/models/accounts.py
Normal file
66
leggend/api/models/accounts.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class AccountBalance(BaseModel):
|
||||||
|
"""Account balance model"""
|
||||||
|
|
||||||
|
amount: float
|
||||||
|
currency: str
|
||||||
|
balance_type: str
|
||||||
|
last_change_date: Optional[datetime] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
||||||
|
|
||||||
|
|
||||||
|
class AccountDetails(BaseModel):
|
||||||
|
"""Account details model"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
institution_id: str
|
||||||
|
status: str
|
||||||
|
iban: Optional[str] = None
|
||||||
|
name: Optional[str] = None
|
||||||
|
currency: Optional[str] = None
|
||||||
|
created: datetime
|
||||||
|
last_accessed: Optional[datetime] = None
|
||||||
|
balances: List[AccountBalance] = []
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(BaseModel):
|
||||||
|
"""Transaction model"""
|
||||||
|
|
||||||
|
internal_transaction_id: Optional[str] = None
|
||||||
|
institution_id: str
|
||||||
|
iban: Optional[str] = None
|
||||||
|
account_id: str
|
||||||
|
transaction_date: datetime
|
||||||
|
description: str
|
||||||
|
transaction_value: float
|
||||||
|
transaction_currency: str
|
||||||
|
transaction_status: str # "booked" or "pending"
|
||||||
|
raw_transaction: Dict[str, Any]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionSummary(BaseModel):
|
||||||
|
"""Transaction summary for lists"""
|
||||||
|
|
||||||
|
internal_transaction_id: Optional[str] = None
|
||||||
|
date: datetime
|
||||||
|
description: str
|
||||||
|
amount: float
|
||||||
|
currency: str
|
||||||
|
status: str
|
||||||
|
account_id: str
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||||
52
leggend/api/models/banks.py
Normal file
52
leggend/api/models/banks.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class BankInstitution(BaseModel):
|
||||||
|
"""Bank institution model"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
name: str
|
||||||
|
bic: Optional[str] = None
|
||||||
|
transaction_total_days: int
|
||||||
|
countries: List[str]
|
||||||
|
logo: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class BankConnectionRequest(BaseModel):
|
||||||
|
"""Request to connect to a bank"""
|
||||||
|
|
||||||
|
institution_id: str
|
||||||
|
redirect_url: Optional[str] = "http://localhost:8000/"
|
||||||
|
|
||||||
|
|
||||||
|
class BankRequisition(BaseModel):
|
||||||
|
"""Bank requisition/connection model"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
institution_id: str
|
||||||
|
status: str
|
||||||
|
status_display: Optional[str] = None
|
||||||
|
created: datetime
|
||||||
|
link: str
|
||||||
|
accounts: List[str] = []
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||||
|
|
||||||
|
|
||||||
|
class BankConnectionStatus(BaseModel):
|
||||||
|
"""Bank connection status response"""
|
||||||
|
|
||||||
|
bank_id: str
|
||||||
|
bank_name: str
|
||||||
|
status: str
|
||||||
|
status_display: str
|
||||||
|
created_at: datetime
|
||||||
|
requisition_id: str
|
||||||
|
accounts_count: int
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||||
29
leggend/api/models/common.py
Normal file
29
leggend/api/models/common.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class APIResponse(BaseModel):
|
||||||
|
"""Base API response model"""
|
||||||
|
|
||||||
|
success: bool = True
|
||||||
|
message: Optional[str] = None
|
||||||
|
data: Optional[Any] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorResponse(BaseModel):
|
||||||
|
"""Error response model"""
|
||||||
|
|
||||||
|
success: bool = False
|
||||||
|
message: str
|
||||||
|
error_code: Optional[str] = None
|
||||||
|
details: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class PaginatedResponse(BaseModel):
|
||||||
|
"""Paginated response model"""
|
||||||
|
|
||||||
|
success: bool = True
|
||||||
|
data: list
|
||||||
|
pagination: Dict[str, Any]
|
||||||
|
message: Optional[str] = None
|
||||||
51
leggend/api/models/notifications.py
Normal file
51
leggend/api/models/notifications.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
from typing import Optional, List
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class DiscordConfig(BaseModel):
|
||||||
|
"""Discord notification configuration"""
|
||||||
|
|
||||||
|
webhook: str
|
||||||
|
enabled: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class TelegramConfig(BaseModel):
|
||||||
|
"""Telegram notification configuration"""
|
||||||
|
|
||||||
|
token: str
|
||||||
|
chat_id: int
|
||||||
|
enabled: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationFilters(BaseModel):
|
||||||
|
"""Notification filters configuration"""
|
||||||
|
|
||||||
|
case_insensitive: List[str] = []
|
||||||
|
case_sensitive: Optional[List[str]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationSettings(BaseModel):
|
||||||
|
"""Complete notification settings"""
|
||||||
|
|
||||||
|
discord: Optional[DiscordConfig] = None
|
||||||
|
telegram: Optional[TelegramConfig] = None
|
||||||
|
filters: NotificationFilters = NotificationFilters()
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationTest(BaseModel):
|
||||||
|
"""Test notification request"""
|
||||||
|
|
||||||
|
service: str # "discord" or "telegram"
|
||||||
|
message: str = "Test notification from Leggen"
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationHistory(BaseModel):
|
||||||
|
"""Notification history entry"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
service: str
|
||||||
|
message: str
|
||||||
|
status: str # "sent", "failed"
|
||||||
|
sent_at: str
|
||||||
|
error: Optional[str] = None
|
||||||
55
leggend/api/models/sync.py
Normal file
55
leggend/api/models/sync.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class SyncRequest(BaseModel):
|
||||||
|
"""Request to trigger a sync"""
|
||||||
|
|
||||||
|
account_ids: Optional[list[str]] = None # If None, sync all accounts
|
||||||
|
force: bool = False # Force sync even if recently synced
|
||||||
|
|
||||||
|
|
||||||
|
class SyncStatus(BaseModel):
|
||||||
|
"""Sync operation status"""
|
||||||
|
|
||||||
|
is_running: bool
|
||||||
|
last_sync: Optional[datetime] = None
|
||||||
|
next_sync: Optional[datetime] = None
|
||||||
|
accounts_synced: int = 0
|
||||||
|
total_accounts: int = 0
|
||||||
|
transactions_added: int = 0
|
||||||
|
errors: list[str] = []
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
||||||
|
|
||||||
|
|
||||||
|
class SyncResult(BaseModel):
|
||||||
|
"""Result of a sync operation"""
|
||||||
|
|
||||||
|
success: bool
|
||||||
|
accounts_processed: int
|
||||||
|
transactions_added: int
|
||||||
|
transactions_updated: int
|
||||||
|
balances_updated: int
|
||||||
|
duration_seconds: float
|
||||||
|
errors: list[str] = []
|
||||||
|
started_at: datetime
|
||||||
|
completed_at: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||||
|
|
||||||
|
|
||||||
|
class SchedulerConfig(BaseModel):
|
||||||
|
"""Scheduler configuration model"""
|
||||||
|
|
||||||
|
enabled: bool = True
|
||||||
|
hour: Optional[int] = 3
|
||||||
|
minute: Optional[int] = 0
|
||||||
|
cron: Optional[str] = None # Custom cron expression
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
extra = "forbid"
|
||||||
287
leggend/api/routes/accounts.py
Normal file
287
leggend/api/routes/accounts.py
Normal file
@@ -0,0 +1,287 @@
|
|||||||
|
from typing import Optional, List, Union
|
||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
from leggend.api.models.accounts import (
|
||||||
|
AccountDetails,
|
||||||
|
AccountBalance,
|
||||||
|
Transaction,
|
||||||
|
TransactionSummary,
|
||||||
|
)
|
||||||
|
from leggend.services.database_service import DatabaseService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
database_service = DatabaseService()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/accounts", response_model=APIResponse)
|
||||||
|
async def get_all_accounts() -> APIResponse:
|
||||||
|
"""Get all connected accounts from database"""
|
||||||
|
try:
|
||||||
|
accounts = []
|
||||||
|
|
||||||
|
# Get all account details from database
|
||||||
|
db_accounts = await database_service.get_accounts_from_db()
|
||||||
|
|
||||||
|
# Process accounts found in database
|
||||||
|
for db_account in db_accounts:
|
||||||
|
try:
|
||||||
|
# Get latest balances from database for this account
|
||||||
|
balances_data = await database_service.get_balances_from_db(
|
||||||
|
db_account["id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process balances
|
||||||
|
balances = []
|
||||||
|
for balance in balances_data:
|
||||||
|
balances.append(
|
||||||
|
AccountBalance(
|
||||||
|
amount=balance["amount"],
|
||||||
|
currency=balance["currency"],
|
||||||
|
balance_type=balance["type"],
|
||||||
|
last_change_date=balance.get("timestamp"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
accounts.append(
|
||||||
|
AccountDetails(
|
||||||
|
id=db_account["id"],
|
||||||
|
institution_id=db_account["institution_id"],
|
||||||
|
status=db_account["status"],
|
||||||
|
iban=db_account.get("iban"),
|
||||||
|
name=db_account.get("name"),
|
||||||
|
currency=db_account.get("currency"),
|
||||||
|
created=db_account["created"],
|
||||||
|
last_accessed=db_account.get("last_accessed"),
|
||||||
|
balances=balances,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to process database account {db_account['id']}: {e}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=accounts,
|
||||||
|
message=f"Retrieved {len(accounts)} accounts from database",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get accounts: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get accounts: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/accounts/{account_id}", response_model=APIResponse)
|
||||||
|
async def get_account_details(account_id: str) -> APIResponse:
|
||||||
|
"""Get details for a specific account from database"""
|
||||||
|
try:
|
||||||
|
# Get account details from database
|
||||||
|
db_account = await database_service.get_account_details_from_db(account_id)
|
||||||
|
|
||||||
|
if not db_account:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Account {account_id} not found in database"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get latest balances from database for this account
|
||||||
|
balances_data = await database_service.get_balances_from_db(account_id)
|
||||||
|
|
||||||
|
# Process balances
|
||||||
|
balances = []
|
||||||
|
for balance in balances_data:
|
||||||
|
balances.append(
|
||||||
|
AccountBalance(
|
||||||
|
amount=balance["amount"],
|
||||||
|
currency=balance["currency"],
|
||||||
|
balance_type=balance["type"],
|
||||||
|
last_change_date=balance.get("timestamp"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
account = AccountDetails(
|
||||||
|
id=db_account["id"],
|
||||||
|
institution_id=db_account["institution_id"],
|
||||||
|
status=db_account["status"],
|
||||||
|
iban=db_account.get("iban"),
|
||||||
|
name=db_account.get("name"),
|
||||||
|
currency=db_account.get("currency"),
|
||||||
|
created=db_account["created"],
|
||||||
|
last_accessed=db_account.get("last_accessed"),
|
||||||
|
balances=balances,
|
||||||
|
)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=account,
|
||||||
|
message=f"Account details retrieved from database for {account_id}",
|
||||||
|
)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get account details for {account_id}: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get account details: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/accounts/{account_id}/balances", response_model=APIResponse)
|
||||||
|
async def get_account_balances(account_id: str) -> APIResponse:
|
||||||
|
"""Get balances for a specific account from database"""
|
||||||
|
try:
|
||||||
|
# Get balances from database instead of GoCardless API
|
||||||
|
db_balances = await database_service.get_balances_from_db(account_id=account_id)
|
||||||
|
|
||||||
|
balances = []
|
||||||
|
for balance in db_balances:
|
||||||
|
balances.append(
|
||||||
|
AccountBalance(
|
||||||
|
amount=balance["amount"],
|
||||||
|
currency=balance["currency"],
|
||||||
|
balance_type=balance["type"],
|
||||||
|
last_change_date=balance.get("timestamp"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=balances,
|
||||||
|
message=f"Retrieved {len(balances)} balances for account {account_id}",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to get balances from database for account {account_id}: {e}"
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Failed to get balances: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/balances", response_model=APIResponse)
|
||||||
|
async def get_all_balances() -> APIResponse:
|
||||||
|
"""Get all balances from all accounts in database"""
|
||||||
|
try:
|
||||||
|
# Get all accounts first to iterate through them
|
||||||
|
db_accounts = await database_service.get_accounts_from_db()
|
||||||
|
|
||||||
|
all_balances = []
|
||||||
|
for db_account in db_accounts:
|
||||||
|
try:
|
||||||
|
# Get balances for this account
|
||||||
|
db_balances = await database_service.get_balances_from_db(
|
||||||
|
account_id=db_account["id"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process balances and add account info
|
||||||
|
for balance in db_balances:
|
||||||
|
balance_data = {
|
||||||
|
"id": f"{db_account['id']}_{balance['type']}", # Create unique ID
|
||||||
|
"account_id": db_account["id"],
|
||||||
|
"balance_amount": balance["amount"],
|
||||||
|
"balance_type": balance["type"],
|
||||||
|
"currency": balance["currency"],
|
||||||
|
"reference_date": balance.get(
|
||||||
|
"timestamp", db_account.get("last_accessed")
|
||||||
|
),
|
||||||
|
"created_at": db_account.get("created"),
|
||||||
|
"updated_at": db_account.get("last_accessed"),
|
||||||
|
}
|
||||||
|
all_balances.append(balance_data)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to get balances for account {db_account['id']}: {e}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=all_balances,
|
||||||
|
message=f"Retrieved {len(all_balances)} balances from {len(db_accounts)} accounts",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get all balances: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get balances: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/accounts/{account_id}/transactions", response_model=APIResponse)
|
||||||
|
async def get_account_transactions(
|
||||||
|
account_id: str,
|
||||||
|
limit: Optional[int] = Query(default=100, le=500),
|
||||||
|
offset: Optional[int] = Query(default=0, ge=0),
|
||||||
|
summary_only: bool = Query(
|
||||||
|
default=False, description="Return transaction summaries only"
|
||||||
|
),
|
||||||
|
) -> APIResponse:
|
||||||
|
"""Get transactions for a specific account from database"""
|
||||||
|
try:
|
||||||
|
# Get transactions from database instead of GoCardless API
|
||||||
|
db_transactions = await database_service.get_transactions_from_db(
|
||||||
|
account_id=account_id,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get total count for pagination info
|
||||||
|
total_transactions = await database_service.get_transaction_count_from_db(
|
||||||
|
account_id=account_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
data: Union[List[TransactionSummary], List[Transaction]]
|
||||||
|
|
||||||
|
if summary_only:
|
||||||
|
# Return simplified transaction summaries
|
||||||
|
data = [
|
||||||
|
TransactionSummary(
|
||||||
|
internal_transaction_id=txn["internalTransactionId"],
|
||||||
|
date=txn["transactionDate"],
|
||||||
|
description=txn["description"],
|
||||||
|
amount=txn["transactionValue"],
|
||||||
|
currency=txn["transactionCurrency"],
|
||||||
|
status=txn["transactionStatus"],
|
||||||
|
account_id=txn["accountId"],
|
||||||
|
)
|
||||||
|
for txn in db_transactions
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
# Return full transaction details
|
||||||
|
data = [
|
||||||
|
Transaction(
|
||||||
|
internal_transaction_id=txn["internalTransactionId"],
|
||||||
|
institution_id=txn["institutionId"],
|
||||||
|
iban=txn["iban"],
|
||||||
|
account_id=txn["accountId"],
|
||||||
|
transaction_date=txn["transactionDate"],
|
||||||
|
description=txn["description"],
|
||||||
|
transaction_value=txn["transactionValue"],
|
||||||
|
transaction_currency=txn["transactionCurrency"],
|
||||||
|
transaction_status=txn["transactionStatus"],
|
||||||
|
raw_transaction=txn["rawTransaction"],
|
||||||
|
)
|
||||||
|
for txn in db_transactions
|
||||||
|
]
|
||||||
|
|
||||||
|
actual_offset = offset or 0
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=data,
|
||||||
|
message=f"Retrieved {len(data)} transactions (showing {actual_offset + 1}-{actual_offset + len(data)} of {total_transactions})",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to get transactions from database for account {account_id}: {e}"
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=404, detail=f"Failed to get transactions: {str(e)}"
|
||||||
|
) from e
|
||||||
179
leggend/api/routes/banks.py
Normal file
179
leggend/api/routes/banks.py
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
from leggend.api.models.banks import (
|
||||||
|
BankInstitution,
|
||||||
|
BankConnectionRequest,
|
||||||
|
BankRequisition,
|
||||||
|
BankConnectionStatus,
|
||||||
|
)
|
||||||
|
from leggend.services.gocardless_service import GoCardlessService
|
||||||
|
from leggend.utils.gocardless import REQUISITION_STATUS
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
gocardless_service = GoCardlessService()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/banks/institutions", response_model=APIResponse)
|
||||||
|
async def get_bank_institutions(
|
||||||
|
country: str = Query(default="PT", description="Country code (e.g., PT, ES, FR)"),
|
||||||
|
) -> APIResponse:
|
||||||
|
"""Get available bank institutions for a country"""
|
||||||
|
try:
|
||||||
|
institutions_data = await gocardless_service.get_institutions(country)
|
||||||
|
|
||||||
|
institutions = [
|
||||||
|
BankInstitution(
|
||||||
|
id=inst["id"],
|
||||||
|
name=inst["name"],
|
||||||
|
bic=inst.get("bic"),
|
||||||
|
transaction_total_days=inst["transaction_total_days"],
|
||||||
|
countries=inst["countries"],
|
||||||
|
logo=inst.get("logo"),
|
||||||
|
)
|
||||||
|
for inst in institutions_data
|
||||||
|
]
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=institutions,
|
||||||
|
message=f"Found {len(institutions)} institutions for {country}",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get institutions for {country}: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get institutions: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/banks/connect", response_model=APIResponse)
|
||||||
|
async def connect_to_bank(request: BankConnectionRequest) -> APIResponse:
|
||||||
|
"""Create a connection to a bank (requisition)"""
|
||||||
|
try:
|
||||||
|
redirect_url = request.redirect_url or "http://localhost:8000/"
|
||||||
|
requisition_data = await gocardless_service.create_requisition(
|
||||||
|
request.institution_id, redirect_url
|
||||||
|
)
|
||||||
|
|
||||||
|
requisition = BankRequisition(
|
||||||
|
id=requisition_data["id"],
|
||||||
|
institution_id=requisition_data["institution_id"],
|
||||||
|
status=requisition_data["status"],
|
||||||
|
created=requisition_data["created"],
|
||||||
|
link=requisition_data["link"],
|
||||||
|
accounts=requisition_data.get("accounts", []),
|
||||||
|
)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=requisition,
|
||||||
|
message="Bank connection created. Please visit the link to authorize.",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to connect to bank {request.institution_id}: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to connect to bank: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/banks/status", response_model=APIResponse)
|
||||||
|
async def get_bank_connections_status() -> APIResponse:
|
||||||
|
"""Get status of all bank connections"""
|
||||||
|
try:
|
||||||
|
requisitions_data = await gocardless_service.get_requisitions()
|
||||||
|
|
||||||
|
connections = []
|
||||||
|
for req in requisitions_data.get("results", []):
|
||||||
|
status = req["status"]
|
||||||
|
status_display = REQUISITION_STATUS.get(status, "UNKNOWN")
|
||||||
|
|
||||||
|
connections.append(
|
||||||
|
BankConnectionStatus(
|
||||||
|
bank_id=req["institution_id"],
|
||||||
|
bank_name=req[
|
||||||
|
"institution_id"
|
||||||
|
], # Could be enhanced with actual bank names
|
||||||
|
status=status,
|
||||||
|
status_display=status_display,
|
||||||
|
created_at=req["created"],
|
||||||
|
requisition_id=req["id"],
|
||||||
|
accounts_count=len(req.get("accounts", [])),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=connections,
|
||||||
|
message=f"Found {len(connections)} bank connections",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get bank connection status: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get bank status: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/banks/connections/{requisition_id}", response_model=APIResponse)
|
||||||
|
async def delete_bank_connection(requisition_id: str) -> APIResponse:
|
||||||
|
"""Delete a bank connection"""
|
||||||
|
try:
|
||||||
|
# This would need to be implemented in GoCardlessService
|
||||||
|
# For now, return success
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
message=f"Bank connection {requisition_id} deleted successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete bank connection {requisition_id}: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to delete connection: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/banks/countries", response_model=APIResponse)
|
||||||
|
async def get_supported_countries() -> APIResponse:
|
||||||
|
"""Get list of supported countries"""
|
||||||
|
countries = [
|
||||||
|
{"code": "AT", "name": "Austria"},
|
||||||
|
{"code": "BE", "name": "Belgium"},
|
||||||
|
{"code": "BG", "name": "Bulgaria"},
|
||||||
|
{"code": "HR", "name": "Croatia"},
|
||||||
|
{"code": "CY", "name": "Cyprus"},
|
||||||
|
{"code": "CZ", "name": "Czech Republic"},
|
||||||
|
{"code": "DK", "name": "Denmark"},
|
||||||
|
{"code": "EE", "name": "Estonia"},
|
||||||
|
{"code": "FI", "name": "Finland"},
|
||||||
|
{"code": "FR", "name": "France"},
|
||||||
|
{"code": "DE", "name": "Germany"},
|
||||||
|
{"code": "GR", "name": "Greece"},
|
||||||
|
{"code": "HU", "name": "Hungary"},
|
||||||
|
{"code": "IS", "name": "Iceland"},
|
||||||
|
{"code": "IE", "name": "Ireland"},
|
||||||
|
{"code": "IT", "name": "Italy"},
|
||||||
|
{"code": "LV", "name": "Latvia"},
|
||||||
|
{"code": "LI", "name": "Liechtenstein"},
|
||||||
|
{"code": "LT", "name": "Lithuania"},
|
||||||
|
{"code": "LU", "name": "Luxembourg"},
|
||||||
|
{"code": "MT", "name": "Malta"},
|
||||||
|
{"code": "NL", "name": "Netherlands"},
|
||||||
|
{"code": "NO", "name": "Norway"},
|
||||||
|
{"code": "PL", "name": "Poland"},
|
||||||
|
{"code": "PT", "name": "Portugal"},
|
||||||
|
{"code": "RO", "name": "Romania"},
|
||||||
|
{"code": "SK", "name": "Slovakia"},
|
||||||
|
{"code": "SI", "name": "Slovenia"},
|
||||||
|
{"code": "ES", "name": "Spain"},
|
||||||
|
{"code": "SE", "name": "Sweden"},
|
||||||
|
{"code": "GB", "name": "United Kingdom"},
|
||||||
|
]
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=countries,
|
||||||
|
message="Supported countries retrieved successfully",
|
||||||
|
)
|
||||||
218
leggend/api/routes/notifications.py
Normal file
218
leggend/api/routes/notifications.py
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
from typing import Dict, Any
|
||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
from leggend.api.models.notifications import (
|
||||||
|
NotificationSettings,
|
||||||
|
NotificationTest,
|
||||||
|
DiscordConfig,
|
||||||
|
TelegramConfig,
|
||||||
|
NotificationFilters,
|
||||||
|
)
|
||||||
|
from leggend.services.notification_service import NotificationService
|
||||||
|
from leggend.config import config
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
notification_service = NotificationService()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/notifications/settings", response_model=APIResponse)
|
||||||
|
async def get_notification_settings() -> APIResponse:
|
||||||
|
"""Get current notification settings"""
|
||||||
|
try:
|
||||||
|
notifications_config = config.notifications_config
|
||||||
|
filters_config = config.filters_config
|
||||||
|
|
||||||
|
# Build response safely without exposing secrets
|
||||||
|
discord_config = notifications_config.get("discord", {})
|
||||||
|
telegram_config = notifications_config.get("telegram", {})
|
||||||
|
|
||||||
|
settings = NotificationSettings(
|
||||||
|
discord=DiscordConfig(
|
||||||
|
webhook="***" if discord_config.get("webhook") else "",
|
||||||
|
enabled=discord_config.get("enabled", True),
|
||||||
|
)
|
||||||
|
if discord_config.get("webhook")
|
||||||
|
else None,
|
||||||
|
telegram=TelegramConfig(
|
||||||
|
token="***"
|
||||||
|
if (telegram_config.get("token") or telegram_config.get("api-key"))
|
||||||
|
else "",
|
||||||
|
chat_id=telegram_config.get("chat_id")
|
||||||
|
or telegram_config.get("chat-id", 0),
|
||||||
|
enabled=telegram_config.get("enabled", True),
|
||||||
|
)
|
||||||
|
if (telegram_config.get("token") or telegram_config.get("api-key"))
|
||||||
|
else None,
|
||||||
|
filters=NotificationFilters(
|
||||||
|
case_insensitive=filters_config.get("case-insensitive", []),
|
||||||
|
case_sensitive=filters_config.get("case-sensitive"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=settings,
|
||||||
|
message="Notification settings retrieved successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get notification settings: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get notification settings: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/notifications/settings", response_model=APIResponse)
|
||||||
|
async def update_notification_settings(settings: NotificationSettings) -> APIResponse:
|
||||||
|
"""Update notification settings"""
|
||||||
|
try:
|
||||||
|
# Update notifications config
|
||||||
|
notifications_config = {}
|
||||||
|
|
||||||
|
if settings.discord:
|
||||||
|
notifications_config["discord"] = {
|
||||||
|
"webhook": settings.discord.webhook,
|
||||||
|
"enabled": settings.discord.enabled,
|
||||||
|
}
|
||||||
|
|
||||||
|
if settings.telegram:
|
||||||
|
notifications_config["telegram"] = {
|
||||||
|
"token": settings.telegram.token,
|
||||||
|
"chat_id": settings.telegram.chat_id,
|
||||||
|
"enabled": settings.telegram.enabled,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Update filters config
|
||||||
|
filters_config: Dict[str, Any] = {}
|
||||||
|
if settings.filters.case_insensitive:
|
||||||
|
filters_config["case-insensitive"] = settings.filters.case_insensitive
|
||||||
|
if settings.filters.case_sensitive:
|
||||||
|
filters_config["case-sensitive"] = settings.filters.case_sensitive
|
||||||
|
|
||||||
|
# Save to config
|
||||||
|
if notifications_config:
|
||||||
|
config.update_section("notifications", notifications_config)
|
||||||
|
if filters_config:
|
||||||
|
config.update_section("filters", filters_config)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data={"updated": True},
|
||||||
|
message="Notification settings updated successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to update notification settings: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to update notification settings: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/notifications/test", response_model=APIResponse)
|
||||||
|
async def test_notification(test_request: NotificationTest) -> APIResponse:
|
||||||
|
"""Send a test notification"""
|
||||||
|
try:
|
||||||
|
success = await notification_service.send_test_notification(
|
||||||
|
test_request.service, test_request.message
|
||||||
|
)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data={"sent": True},
|
||||||
|
message=f"Test notification sent to {test_request.service} successfully",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return APIResponse(
|
||||||
|
success=False,
|
||||||
|
message=f"Failed to send test notification to {test_request.service}",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send test notification: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to send test notification: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/notifications/services", response_model=APIResponse)
|
||||||
|
async def get_notification_services() -> APIResponse:
|
||||||
|
"""Get available notification services and their status"""
|
||||||
|
try:
|
||||||
|
notifications_config = config.notifications_config
|
||||||
|
|
||||||
|
services = {
|
||||||
|
"discord": {
|
||||||
|
"name": "Discord",
|
||||||
|
"enabled": bool(notifications_config.get("discord", {}).get("webhook")),
|
||||||
|
"configured": bool(
|
||||||
|
notifications_config.get("discord", {}).get("webhook")
|
||||||
|
),
|
||||||
|
"active": notifications_config.get("discord", {}).get("enabled", True),
|
||||||
|
},
|
||||||
|
"telegram": {
|
||||||
|
"name": "Telegram",
|
||||||
|
"enabled": bool(
|
||||||
|
(
|
||||||
|
notifications_config.get("telegram", {}).get("token")
|
||||||
|
or notifications_config.get("telegram", {}).get("api-key")
|
||||||
|
)
|
||||||
|
and (
|
||||||
|
notifications_config.get("telegram", {}).get("chat_id")
|
||||||
|
or notifications_config.get("telegram", {}).get("chat-id")
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"configured": bool(
|
||||||
|
(
|
||||||
|
notifications_config.get("telegram", {}).get("token")
|
||||||
|
or notifications_config.get("telegram", {}).get("api-key")
|
||||||
|
)
|
||||||
|
and (
|
||||||
|
notifications_config.get("telegram", {}).get("chat_id")
|
||||||
|
or notifications_config.get("telegram", {}).get("chat-id")
|
||||||
|
)
|
||||||
|
),
|
||||||
|
"active": notifications_config.get("telegram", {}).get("enabled", True),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=services,
|
||||||
|
message="Notification services status retrieved successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get notification services: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get notification services: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/notifications/settings/{service}", response_model=APIResponse)
|
||||||
|
async def delete_notification_service(service: str) -> APIResponse:
|
||||||
|
"""Delete/disable a notification service"""
|
||||||
|
try:
|
||||||
|
if service not in ["discord", "telegram"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Service must be 'discord' or 'telegram'"
|
||||||
|
)
|
||||||
|
|
||||||
|
notifications_config = config.notifications_config.copy()
|
||||||
|
if service in notifications_config:
|
||||||
|
del notifications_config[service]
|
||||||
|
config.update_section("notifications", notifications_config)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data={"deleted": service},
|
||||||
|
message=f"{service.capitalize()} notification service deleted successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete notification service {service}: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to delete notification service: {str(e)}"
|
||||||
|
) from e
|
||||||
212
leggend/api/routes/sync.py
Normal file
212
leggend/api/routes/sync.py
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
from leggend.api.models.sync import SyncRequest, SchedulerConfig
|
||||||
|
from leggend.services.sync_service import SyncService
|
||||||
|
from leggend.background.scheduler import scheduler
|
||||||
|
from leggend.config import config
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
sync_service = SyncService()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/sync/status", response_model=APIResponse)
|
||||||
|
async def get_sync_status() -> APIResponse:
|
||||||
|
"""Get current sync status"""
|
||||||
|
try:
|
||||||
|
status = await sync_service.get_sync_status()
|
||||||
|
|
||||||
|
# Add scheduler information
|
||||||
|
next_sync_time = scheduler.get_next_sync_time()
|
||||||
|
if next_sync_time:
|
||||||
|
status.next_sync = next_sync_time
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True, data=status, message="Sync status retrieved successfully"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get sync status: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get sync status: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sync", response_model=APIResponse)
|
||||||
|
async def trigger_sync(
|
||||||
|
background_tasks: BackgroundTasks, sync_request: Optional[SyncRequest] = None
|
||||||
|
) -> APIResponse:
|
||||||
|
"""Trigger a manual sync operation"""
|
||||||
|
try:
|
||||||
|
# Check if sync is already running
|
||||||
|
status = await sync_service.get_sync_status()
|
||||||
|
if status.is_running and not (sync_request and sync_request.force):
|
||||||
|
return APIResponse(
|
||||||
|
success=False,
|
||||||
|
message="Sync is already running. Use 'force: true' to override.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine what to sync
|
||||||
|
if sync_request and sync_request.account_ids:
|
||||||
|
# Sync specific accounts in background
|
||||||
|
background_tasks.add_task(
|
||||||
|
sync_service.sync_specific_accounts,
|
||||||
|
sync_request.account_ids,
|
||||||
|
sync_request.force if sync_request else False,
|
||||||
|
)
|
||||||
|
message = (
|
||||||
|
f"Started sync for {len(sync_request.account_ids)} specific accounts"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Sync all accounts in background
|
||||||
|
background_tasks.add_task(
|
||||||
|
sync_service.sync_all_accounts,
|
||||||
|
sync_request.force if sync_request else False,
|
||||||
|
)
|
||||||
|
message = "Started sync for all accounts"
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data={
|
||||||
|
"sync_started": True,
|
||||||
|
"force": sync_request.force if sync_request else False,
|
||||||
|
},
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to trigger sync: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to trigger sync: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sync/now", response_model=APIResponse)
|
||||||
|
async def sync_now(sync_request: Optional[SyncRequest] = None) -> APIResponse:
|
||||||
|
"""Run sync synchronously and return results (slower, for testing)"""
|
||||||
|
try:
|
||||||
|
if sync_request and sync_request.account_ids:
|
||||||
|
result = await sync_service.sync_specific_accounts(
|
||||||
|
sync_request.account_ids, sync_request.force
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
result = await sync_service.sync_all_accounts(
|
||||||
|
sync_request.force if sync_request else False
|
||||||
|
)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=result.success,
|
||||||
|
data=result,
|
||||||
|
message="Sync completed"
|
||||||
|
if result.success
|
||||||
|
else f"Sync failed with {len(result.errors)} errors",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to run sync: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to run sync: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/sync/scheduler", response_model=APIResponse)
|
||||||
|
async def get_scheduler_config() -> APIResponse:
|
||||||
|
"""Get current scheduler configuration"""
|
||||||
|
try:
|
||||||
|
scheduler_config = config.scheduler_config
|
||||||
|
next_sync_time = scheduler.get_next_sync_time()
|
||||||
|
|
||||||
|
response_data = {
|
||||||
|
**scheduler_config,
|
||||||
|
"next_scheduled_sync": next_sync_time.isoformat()
|
||||||
|
if next_sync_time
|
||||||
|
else None,
|
||||||
|
"is_running": scheduler.scheduler.running
|
||||||
|
if hasattr(scheduler, "scheduler")
|
||||||
|
else False,
|
||||||
|
}
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=response_data,
|
||||||
|
message="Scheduler configuration retrieved successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get scheduler config: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get scheduler config: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.put("/sync/scheduler", response_model=APIResponse)
|
||||||
|
async def update_scheduler_config(scheduler_config: SchedulerConfig) -> APIResponse:
|
||||||
|
"""Update scheduler configuration"""
|
||||||
|
try:
|
||||||
|
# Validate cron expression if provided
|
||||||
|
if scheduler_config.cron:
|
||||||
|
try:
|
||||||
|
cron_parts = scheduler_config.cron.split()
|
||||||
|
if len(cron_parts) != 5:
|
||||||
|
raise ValueError(
|
||||||
|
"Cron expression must have 5 parts: minute hour day month day_of_week"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail=f"Invalid cron expression: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
# Update configuration
|
||||||
|
schedule_data = scheduler_config.dict(exclude_none=True)
|
||||||
|
config.update_section("scheduler", {"sync": schedule_data})
|
||||||
|
|
||||||
|
# Reschedule the job
|
||||||
|
scheduler.reschedule_sync(schedule_data)
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=schedule_data,
|
||||||
|
message="Scheduler configuration updated successfully",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to update scheduler config: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to update scheduler config: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sync/scheduler/start", response_model=APIResponse)
|
||||||
|
async def start_scheduler() -> APIResponse:
|
||||||
|
"""Start the background scheduler"""
|
||||||
|
try:
|
||||||
|
if not scheduler.scheduler.running:
|
||||||
|
scheduler.start()
|
||||||
|
return APIResponse(success=True, message="Scheduler started successfully")
|
||||||
|
else:
|
||||||
|
return APIResponse(success=True, message="Scheduler is already running")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to start scheduler: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to start scheduler: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/sync/scheduler/stop", response_model=APIResponse)
|
||||||
|
async def stop_scheduler() -> APIResponse:
|
||||||
|
"""Stop the background scheduler"""
|
||||||
|
try:
|
||||||
|
if scheduler.scheduler.running:
|
||||||
|
scheduler.shutdown()
|
||||||
|
return APIResponse(success=True, message="Scheduler stopped successfully")
|
||||||
|
else:
|
||||||
|
return APIResponse(success=True, message="Scheduler is already stopped")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to stop scheduler: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to stop scheduler: {str(e)}"
|
||||||
|
) from e
|
||||||
209
leggend/api/routes/transactions.py
Normal file
209
leggend/api/routes/transactions.py
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
from typing import Optional, List, Union
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
from leggend.api.models.accounts import Transaction, TransactionSummary
|
||||||
|
from leggend.services.database_service import DatabaseService
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
database_service = DatabaseService()
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/transactions", response_model=APIResponse)
|
||||||
|
async def get_all_transactions(
|
||||||
|
limit: Optional[int] = Query(default=100, le=500),
|
||||||
|
offset: Optional[int] = Query(default=0, ge=0),
|
||||||
|
summary_only: bool = Query(
|
||||||
|
default=True, description="Return transaction summaries only"
|
||||||
|
),
|
||||||
|
hide_missing_ids: bool = Query(
|
||||||
|
default=True, description="Hide transactions without internalTransactionId"
|
||||||
|
),
|
||||||
|
date_from: Optional[str] = Query(
|
||||||
|
default=None, description="Filter from date (YYYY-MM-DD)"
|
||||||
|
),
|
||||||
|
date_to: Optional[str] = Query(
|
||||||
|
default=None, description="Filter to date (YYYY-MM-DD)"
|
||||||
|
),
|
||||||
|
min_amount: Optional[float] = Query(
|
||||||
|
default=None, description="Minimum transaction amount"
|
||||||
|
),
|
||||||
|
max_amount: Optional[float] = Query(
|
||||||
|
default=None, description="Maximum transaction amount"
|
||||||
|
),
|
||||||
|
search: Optional[str] = Query(
|
||||||
|
default=None, description="Search in transaction descriptions"
|
||||||
|
),
|
||||||
|
account_id: Optional[str] = Query(default=None, description="Filter by account ID"),
|
||||||
|
) -> APIResponse:
|
||||||
|
"""Get all transactions from database with filtering options"""
|
||||||
|
try:
|
||||||
|
# Get transactions from database instead of GoCardless API
|
||||||
|
db_transactions = await database_service.get_transactions_from_db(
|
||||||
|
account_id=account_id,
|
||||||
|
limit=limit,
|
||||||
|
offset=offset,
|
||||||
|
date_from=date_from,
|
||||||
|
date_to=date_to,
|
||||||
|
min_amount=min_amount,
|
||||||
|
max_amount=max_amount,
|
||||||
|
search=search,
|
||||||
|
hide_missing_ids=hide_missing_ids,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get total count for pagination info (respecting the same filters)
|
||||||
|
total_transactions = await database_service.get_transaction_count_from_db(
|
||||||
|
account_id=account_id,
|
||||||
|
date_from=date_from,
|
||||||
|
date_to=date_to,
|
||||||
|
min_amount=min_amount,
|
||||||
|
max_amount=max_amount,
|
||||||
|
search=search,
|
||||||
|
hide_missing_ids=hide_missing_ids,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get total count for pagination info
|
||||||
|
total_transactions = await database_service.get_transaction_count_from_db(
|
||||||
|
account_id=account_id,
|
||||||
|
date_from=date_from,
|
||||||
|
date_to=date_to,
|
||||||
|
min_amount=min_amount,
|
||||||
|
max_amount=max_amount,
|
||||||
|
search=search,
|
||||||
|
)
|
||||||
|
|
||||||
|
data: Union[List[TransactionSummary], List[Transaction]]
|
||||||
|
|
||||||
|
if summary_only:
|
||||||
|
# Return simplified transaction summaries
|
||||||
|
data = [
|
||||||
|
TransactionSummary(
|
||||||
|
internal_transaction_id=txn["internalTransactionId"],
|
||||||
|
date=txn["transactionDate"],
|
||||||
|
description=txn["description"],
|
||||||
|
amount=txn["transactionValue"],
|
||||||
|
currency=txn["transactionCurrency"],
|
||||||
|
status=txn["transactionStatus"],
|
||||||
|
account_id=txn["accountId"],
|
||||||
|
)
|
||||||
|
for txn in db_transactions
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
# Return full transaction details
|
||||||
|
data = [
|
||||||
|
Transaction(
|
||||||
|
internal_transaction_id=txn["internalTransactionId"],
|
||||||
|
institution_id=txn["institutionId"],
|
||||||
|
iban=txn["iban"],
|
||||||
|
account_id=txn["accountId"],
|
||||||
|
transaction_date=txn["transactionDate"],
|
||||||
|
description=txn["description"],
|
||||||
|
transaction_value=txn["transactionValue"],
|
||||||
|
transaction_currency=txn["transactionCurrency"],
|
||||||
|
transaction_status=txn["transactionStatus"],
|
||||||
|
raw_transaction=txn["rawTransaction"],
|
||||||
|
)
|
||||||
|
for txn in db_transactions
|
||||||
|
]
|
||||||
|
|
||||||
|
actual_offset = offset or 0
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=data,
|
||||||
|
message=f"Retrieved {len(data)} transactions (showing {actual_offset + 1}-{actual_offset + len(data)} of {total_transactions})",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get transactions from database: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get transactions: {str(e)}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/transactions/stats", response_model=APIResponse)
|
||||||
|
async def get_transaction_stats(
|
||||||
|
days: int = Query(default=30, description="Number of days to include in stats"),
|
||||||
|
account_id: Optional[str] = Query(default=None, description="Filter by account ID"),
|
||||||
|
hide_missing_ids: bool = Query(
|
||||||
|
default=True, description="Hide transactions without internalTransactionId"
|
||||||
|
),
|
||||||
|
) -> APIResponse:
|
||||||
|
"""Get transaction statistics for the last N days from database"""
|
||||||
|
try:
|
||||||
|
# Date range for stats
|
||||||
|
end_date = datetime.now()
|
||||||
|
start_date = end_date - timedelta(days=days)
|
||||||
|
|
||||||
|
# Format dates for database query
|
||||||
|
date_from = start_date.isoformat()
|
||||||
|
date_to = end_date.isoformat()
|
||||||
|
|
||||||
|
# Get transactions from database
|
||||||
|
recent_transactions = await database_service.get_transactions_from_db(
|
||||||
|
account_id=account_id,
|
||||||
|
date_from=date_from,
|
||||||
|
date_to=date_to,
|
||||||
|
limit=None, # Get all matching transactions for stats
|
||||||
|
hide_missing_ids=hide_missing_ids,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate stats
|
||||||
|
total_transactions = len(recent_transactions)
|
||||||
|
total_income = sum(
|
||||||
|
txn["transactionValue"]
|
||||||
|
for txn in recent_transactions
|
||||||
|
if txn["transactionValue"] > 0
|
||||||
|
)
|
||||||
|
total_expenses = sum(
|
||||||
|
abs(txn["transactionValue"])
|
||||||
|
for txn in recent_transactions
|
||||||
|
if txn["transactionValue"] < 0
|
||||||
|
)
|
||||||
|
net_change = total_income - total_expenses
|
||||||
|
|
||||||
|
# Count by status
|
||||||
|
booked_count = len(
|
||||||
|
[txn for txn in recent_transactions if txn["transactionStatus"] == "booked"]
|
||||||
|
)
|
||||||
|
pending_count = len(
|
||||||
|
[
|
||||||
|
txn
|
||||||
|
for txn in recent_transactions
|
||||||
|
if txn["transactionStatus"] == "pending"
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count unique accounts
|
||||||
|
unique_accounts = len({txn["accountId"] for txn in recent_transactions})
|
||||||
|
|
||||||
|
stats = {
|
||||||
|
"period_days": days,
|
||||||
|
"total_transactions": total_transactions,
|
||||||
|
"booked_transactions": booked_count,
|
||||||
|
"pending_transactions": pending_count,
|
||||||
|
"total_income": round(total_income, 2),
|
||||||
|
"total_expenses": round(total_expenses, 2),
|
||||||
|
"net_change": round(net_change, 2),
|
||||||
|
"average_transaction": round(
|
||||||
|
sum(txn["transactionValue"] for txn in recent_transactions)
|
||||||
|
/ total_transactions,
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
if total_transactions > 0
|
||||||
|
else 0,
|
||||||
|
"accounts_included": unique_accounts,
|
||||||
|
}
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data=stats,
|
||||||
|
message=f"Transaction statistics for last {days} days",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get transaction stats from database: {e}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail=f"Failed to get transaction stats: {str(e)}"
|
||||||
|
) from e
|
||||||
168
leggend/background/scheduler.py
Normal file
168
leggend/background/scheduler.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.config import config
|
||||||
|
from leggend.services.sync_service import SyncService
|
||||||
|
from leggend.services.notification_service import NotificationService
|
||||||
|
|
||||||
|
|
||||||
|
class BackgroundScheduler:
|
||||||
|
def __init__(self):
|
||||||
|
self.scheduler = AsyncIOScheduler()
|
||||||
|
self.sync_service = SyncService()
|
||||||
|
self.notification_service = NotificationService()
|
||||||
|
self.max_retries = 3
|
||||||
|
self.retry_delay = 300 # 5 minutes
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Start the scheduler and configure sync jobs based on configuration"""
|
||||||
|
schedule_config = config.scheduler_config.get("sync", {})
|
||||||
|
|
||||||
|
if not schedule_config.get("enabled", True):
|
||||||
|
logger.info("Sync scheduling is disabled in configuration")
|
||||||
|
self.scheduler.start()
|
||||||
|
return
|
||||||
|
|
||||||
|
# Parse schedule configuration
|
||||||
|
trigger = self._parse_cron_config(schedule_config)
|
||||||
|
if not trigger:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.scheduler.add_job(
|
||||||
|
self._run_sync,
|
||||||
|
trigger,
|
||||||
|
id="daily_sync",
|
||||||
|
name="Scheduled sync of all transactions",
|
||||||
|
max_instances=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.scheduler.start()
|
||||||
|
logger.info(f"Background scheduler started with sync job: {trigger}")
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
if self.scheduler.running:
|
||||||
|
self.scheduler.shutdown()
|
||||||
|
logger.info("Background scheduler shutdown")
|
||||||
|
|
||||||
|
def reschedule_sync(self, schedule_config: dict):
|
||||||
|
"""Reschedule the sync job with new configuration"""
|
||||||
|
if self.scheduler.running:
|
||||||
|
try:
|
||||||
|
self.scheduler.remove_job("daily_sync")
|
||||||
|
logger.info("Removed existing sync job")
|
||||||
|
except Exception:
|
||||||
|
pass # Job might not exist
|
||||||
|
|
||||||
|
if not schedule_config.get("enabled", True):
|
||||||
|
logger.info("Sync scheduling disabled")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Configure new schedule
|
||||||
|
trigger = self._parse_cron_config(schedule_config)
|
||||||
|
if not trigger:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.scheduler.add_job(
|
||||||
|
self._run_sync,
|
||||||
|
trigger,
|
||||||
|
id="daily_sync",
|
||||||
|
name="Scheduled sync of all transactions",
|
||||||
|
max_instances=1,
|
||||||
|
)
|
||||||
|
logger.info(f"Rescheduled sync job with: {trigger}")
|
||||||
|
|
||||||
|
def _parse_cron_config(self, schedule_config: dict) -> CronTrigger:
|
||||||
|
"""Parse cron configuration and return CronTrigger"""
|
||||||
|
if schedule_config.get("cron"):
|
||||||
|
# Parse custom cron expression (e.g., "0 3 * * *" for daily at 3 AM)
|
||||||
|
try:
|
||||||
|
cron_parts = schedule_config["cron"].split()
|
||||||
|
if len(cron_parts) == 5:
|
||||||
|
minute, hour, day, month, day_of_week = cron_parts
|
||||||
|
return CronTrigger(
|
||||||
|
minute=minute,
|
||||||
|
hour=hour,
|
||||||
|
day=day if day != "*" else None,
|
||||||
|
month=month if month != "*" else None,
|
||||||
|
day_of_week=day_of_week if day_of_week != "*" else None,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.error(f"Invalid cron expression: {schedule_config['cron']}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error parsing cron expression: {e}")
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
# Use hour/minute configuration (default: 3:00 AM daily)
|
||||||
|
hour = schedule_config.get("hour", 3)
|
||||||
|
minute = schedule_config.get("minute", 0)
|
||||||
|
return CronTrigger(hour=hour, minute=minute)
|
||||||
|
|
||||||
|
async def _run_sync(self, retry_count: int = 0):
|
||||||
|
"""Run sync with enhanced error handling and retry logic"""
|
||||||
|
try:
|
||||||
|
logger.info("Starting scheduled sync job")
|
||||||
|
await self.sync_service.sync_all_accounts()
|
||||||
|
logger.info("Scheduled sync job completed successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Scheduled sync job failed (attempt {retry_count + 1}/{self.max_retries}): {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send notification about the failure
|
||||||
|
try:
|
||||||
|
await self.notification_service.send_expiry_notification(
|
||||||
|
{
|
||||||
|
"type": "sync_failure",
|
||||||
|
"error": str(e),
|
||||||
|
"retry_count": retry_count + 1,
|
||||||
|
"max_retries": self.max_retries,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as notification_error:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to send failure notification: {notification_error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Implement retry logic for transient failures
|
||||||
|
if retry_count < self.max_retries - 1:
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
logger.info(f"Retrying sync job in {self.retry_delay} seconds...")
|
||||||
|
# Schedule a retry
|
||||||
|
retry_time = datetime.datetime.now() + datetime.timedelta(
|
||||||
|
seconds=self.retry_delay
|
||||||
|
)
|
||||||
|
self.scheduler.add_job(
|
||||||
|
self._run_sync,
|
||||||
|
"date",
|
||||||
|
args=[retry_count + 1],
|
||||||
|
id=f"sync_retry_{retry_count + 1}",
|
||||||
|
run_date=retry_time,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.error("Maximum retries exceeded for sync job")
|
||||||
|
# Send final failure notification
|
||||||
|
try:
|
||||||
|
await self.notification_service.send_expiry_notification(
|
||||||
|
{
|
||||||
|
"type": "sync_final_failure",
|
||||||
|
"error": str(e),
|
||||||
|
"retry_count": retry_count + 1,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as notification_error:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to send final failure notification: {notification_error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_next_sync_time(self):
|
||||||
|
"""Get the next scheduled sync time"""
|
||||||
|
job = self.scheduler.get_job("daily_sync")
|
||||||
|
if job:
|
||||||
|
return job.next_run_time
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
scheduler = BackgroundScheduler()
|
||||||
143
leggend/config.py
Normal file
143
leggend/config.py
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
import os
|
||||||
|
import tomllib
|
||||||
|
import tomli_w
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
_instance = None
|
||||||
|
_config = None
|
||||||
|
_config_path = None
|
||||||
|
|
||||||
|
def __new__(cls):
|
||||||
|
if cls._instance is None:
|
||||||
|
cls._instance = super().__new__(cls)
|
||||||
|
return cls._instance
|
||||||
|
|
||||||
|
def load_config(self, config_path: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
if self._config is not None:
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
if config_path is None:
|
||||||
|
config_path = os.environ.get(
|
||||||
|
"LEGGEN_CONFIG_FILE",
|
||||||
|
str(Path.home() / ".config" / "leggen" / "config.toml"),
|
||||||
|
)
|
||||||
|
|
||||||
|
self._config_path = config_path
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(config_path, "rb") as f:
|
||||||
|
self._config = tomllib.load(f)
|
||||||
|
logger.info(f"Configuration loaded from {config_path}")
|
||||||
|
except FileNotFoundError:
|
||||||
|
logger.error(f"Configuration file not found: {config_path}")
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error loading configuration: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
def save_config(
|
||||||
|
self,
|
||||||
|
config_data: Optional[Dict[str, Any]] = None,
|
||||||
|
config_path: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Save configuration to TOML file"""
|
||||||
|
if config_data is None:
|
||||||
|
config_data = self._config
|
||||||
|
|
||||||
|
if config_path is None:
|
||||||
|
config_path = self._config_path or os.environ.get(
|
||||||
|
"LEGGEN_CONFIG_FILE",
|
||||||
|
str(Path.home() / ".config" / "leggen" / "config.toml"),
|
||||||
|
)
|
||||||
|
|
||||||
|
if config_path is None:
|
||||||
|
raise ValueError("No config path specified")
|
||||||
|
if config_data is None:
|
||||||
|
raise ValueError("No config data to save")
|
||||||
|
|
||||||
|
# Ensure directory exists
|
||||||
|
Path(config_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(config_path, "wb") as f:
|
||||||
|
tomli_w.dump(config_data, f)
|
||||||
|
|
||||||
|
# Update in-memory config
|
||||||
|
self._config = config_data
|
||||||
|
self._config_path = config_path
|
||||||
|
logger.info(f"Configuration saved to {config_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error saving configuration: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def update_config(self, section: str, key: str, value: Any) -> None:
|
||||||
|
"""Update a specific configuration value"""
|
||||||
|
if self._config is None:
|
||||||
|
self.load_config()
|
||||||
|
|
||||||
|
if self._config is None:
|
||||||
|
raise RuntimeError("Failed to load config")
|
||||||
|
|
||||||
|
if section not in self._config:
|
||||||
|
self._config[section] = {}
|
||||||
|
|
||||||
|
self._config[section][key] = value
|
||||||
|
self.save_config()
|
||||||
|
|
||||||
|
def update_section(self, section: str, data: Dict[str, Any]) -> None:
|
||||||
|
"""Update an entire configuration section"""
|
||||||
|
if self._config is None:
|
||||||
|
self.load_config()
|
||||||
|
|
||||||
|
if self._config is None:
|
||||||
|
raise RuntimeError("Failed to load config")
|
||||||
|
|
||||||
|
self._config[section] = data
|
||||||
|
self.save_config()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def config(self) -> Dict[str, Any]:
|
||||||
|
if self._config is None:
|
||||||
|
self.load_config()
|
||||||
|
if self._config is None:
|
||||||
|
raise RuntimeError("Failed to load config")
|
||||||
|
return self._config
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gocardless_config(self) -> Dict[str, str]:
|
||||||
|
return self.config.get("gocardless", {})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def database_config(self) -> Dict[str, Any]:
|
||||||
|
return self.config.get("database", {})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def notifications_config(self) -> Dict[str, Any]:
|
||||||
|
return self.config.get("notifications", {})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def filters_config(self) -> Dict[str, Any]:
|
||||||
|
return self.config.get("filters", {})
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scheduler_config(self) -> Dict[str, Any]:
|
||||||
|
"""Get scheduler configuration with defaults"""
|
||||||
|
default_schedule = {
|
||||||
|
"sync": {
|
||||||
|
"enabled": True,
|
||||||
|
"hour": 3,
|
||||||
|
"minute": 0,
|
||||||
|
"cron": None, # Optional custom cron expression
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return self.config.get("scheduler", default_schedule)
|
||||||
|
|
||||||
|
|
||||||
|
config = Config()
|
||||||
161
leggend/main.py
Normal file
161
leggend/main.py
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from importlib import metadata
|
||||||
|
|
||||||
|
import uvicorn
|
||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.routes import banks, accounts, sync, notifications, transactions
|
||||||
|
from leggend.background.scheduler import scheduler
|
||||||
|
from leggend.config import config
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(app: FastAPI):
|
||||||
|
# Startup
|
||||||
|
logger.info("Starting leggend service...")
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
try:
|
||||||
|
config.load_config()
|
||||||
|
logger.info("Configuration loaded successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load configuration: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Run database migrations
|
||||||
|
try:
|
||||||
|
from leggend.services.database_service import DatabaseService
|
||||||
|
|
||||||
|
db_service = DatabaseService()
|
||||||
|
await db_service.run_migrations_if_needed()
|
||||||
|
logger.info("Database migrations completed")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Database migration failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Start background scheduler
|
||||||
|
scheduler.start()
|
||||||
|
logger.info("Background scheduler started")
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# Shutdown
|
||||||
|
logger.info("Shutting down leggend service...")
|
||||||
|
scheduler.shutdown()
|
||||||
|
|
||||||
|
|
||||||
|
def create_app() -> FastAPI:
|
||||||
|
# Get version dynamically from package metadata
|
||||||
|
try:
|
||||||
|
version = metadata.version("leggen")
|
||||||
|
except metadata.PackageNotFoundError:
|
||||||
|
version = "unknown"
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title="Leggend API",
|
||||||
|
description="Open Banking API for Leggen",
|
||||||
|
version=version,
|
||||||
|
lifespan=lifespan,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=[
|
||||||
|
"http://localhost:3000",
|
||||||
|
"http://localhost:5173",
|
||||||
|
"http://frontend:80",
|
||||||
|
], # Frontend container and dev servers
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include API routes
|
||||||
|
app.include_router(banks.router, prefix="/api/v1", tags=["banks"])
|
||||||
|
app.include_router(accounts.router, prefix="/api/v1", tags=["accounts"])
|
||||||
|
app.include_router(transactions.router, prefix="/api/v1", tags=["transactions"])
|
||||||
|
app.include_router(sync.router, prefix="/api/v1", tags=["sync"])
|
||||||
|
app.include_router(notifications.router, prefix="/api/v1", tags=["notifications"])
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
# Get version dynamically
|
||||||
|
try:
|
||||||
|
version = metadata.version("leggen")
|
||||||
|
except metadata.PackageNotFoundError:
|
||||||
|
version = "unknown"
|
||||||
|
return {"message": "Leggend API is running", "version": version}
|
||||||
|
|
||||||
|
@app.get("/api/v1/health")
|
||||||
|
async def health():
|
||||||
|
"""Health check endpoint for API connectivity"""
|
||||||
|
try:
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
|
||||||
|
config_loaded = config._config is not None
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=True,
|
||||||
|
data={
|
||||||
|
"status": "healthy",
|
||||||
|
"config_loaded": config_loaded,
|
||||||
|
"message": "API is running and responsive",
|
||||||
|
},
|
||||||
|
message="Health check successful",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Health check failed: {e}")
|
||||||
|
from leggend.api.models.common import APIResponse
|
||||||
|
|
||||||
|
return APIResponse(
|
||||||
|
success=False,
|
||||||
|
data={"status": "unhealthy", "error": str(e)},
|
||||||
|
message="Health check failed",
|
||||||
|
)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Start the Leggend API service")
|
||||||
|
parser.add_argument(
|
||||||
|
"--reload", action="store_true", help="Enable auto-reload for development"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--host", default="0.0.0.0", help="Host to bind to (default: 0.0.0.0)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--port", type=int, default=8000, help="Port to bind to (default: 8000)"
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.reload:
|
||||||
|
# Use string import for reload to work properly
|
||||||
|
uvicorn.run(
|
||||||
|
"leggend.main:create_app",
|
||||||
|
factory=True,
|
||||||
|
host=args.host,
|
||||||
|
port=args.port,
|
||||||
|
log_level="info",
|
||||||
|
access_log=True,
|
||||||
|
reload=True,
|
||||||
|
reload_dirs=["leggend", "leggen"], # Watch both directories
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
app = create_app()
|
||||||
|
uvicorn.run(
|
||||||
|
app,
|
||||||
|
host=args.host,
|
||||||
|
port=args.port,
|
||||||
|
log_level="info",
|
||||||
|
access_log=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
584
leggend/services/database_service.py
Normal file
584
leggend/services/database_service.py
Normal file
@@ -0,0 +1,584 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.config import config
|
||||||
|
import leggen.database.sqlite as sqlite_db
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseService:
|
||||||
|
def __init__(self):
|
||||||
|
self.db_config = config.database_config
|
||||||
|
self.sqlite_enabled = self.db_config.get("sqlite", True)
|
||||||
|
|
||||||
|
async def persist_balance(
|
||||||
|
self, account_id: str, balance_data: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Persist account balance data"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, skipping balance persistence")
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._persist_balance_sqlite(account_id, balance_data)
|
||||||
|
|
||||||
|
async def persist_transactions(
|
||||||
|
self, account_id: str, transactions: List[Dict[str, Any]]
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Persist transactions and return new transactions"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, skipping transaction persistence")
|
||||||
|
return transactions
|
||||||
|
|
||||||
|
return await self._persist_transactions_sqlite(account_id, transactions)
|
||||||
|
|
||||||
|
def process_transactions(
|
||||||
|
self,
|
||||||
|
account_id: str,
|
||||||
|
account_info: Dict[str, Any],
|
||||||
|
transaction_data: Dict[str, Any],
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Process raw transaction data into standardized format"""
|
||||||
|
transactions = []
|
||||||
|
|
||||||
|
# Process booked transactions
|
||||||
|
for transaction in transaction_data.get("transactions", {}).get("booked", []):
|
||||||
|
processed = self._process_single_transaction(
|
||||||
|
account_id, account_info, transaction, "booked"
|
||||||
|
)
|
||||||
|
transactions.append(processed)
|
||||||
|
|
||||||
|
# Process pending transactions
|
||||||
|
for transaction in transaction_data.get("transactions", {}).get("pending", []):
|
||||||
|
processed = self._process_single_transaction(
|
||||||
|
account_id, account_info, transaction, "pending"
|
||||||
|
)
|
||||||
|
transactions.append(processed)
|
||||||
|
|
||||||
|
return transactions
|
||||||
|
|
||||||
|
def _process_single_transaction(
|
||||||
|
self,
|
||||||
|
account_id: str,
|
||||||
|
account_info: Dict[str, Any],
|
||||||
|
transaction: Dict[str, Any],
|
||||||
|
status: str,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Process a single transaction into standardized format"""
|
||||||
|
# Extract dates
|
||||||
|
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||||
|
"bookingDate"
|
||||||
|
)
|
||||||
|
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||||
|
|
||||||
|
if booked_date and value_date:
|
||||||
|
min_date = min(
|
||||||
|
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
date_str = booked_date or value_date
|
||||||
|
if not date_str:
|
||||||
|
raise ValueError("No valid date found in transaction")
|
||||||
|
min_date = datetime.fromisoformat(date_str)
|
||||||
|
|
||||||
|
# Extract amount and currency
|
||||||
|
transaction_amount = transaction.get("transactionAmount", {})
|
||||||
|
amount = float(transaction_amount.get("amount", 0))
|
||||||
|
currency = transaction_amount.get("currency", "")
|
||||||
|
|
||||||
|
# Extract description
|
||||||
|
description = transaction.get(
|
||||||
|
"remittanceInformationUnstructured",
|
||||||
|
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"internalTransactionId": transaction.get("internalTransactionId"),
|
||||||
|
"institutionId": account_info["institution_id"],
|
||||||
|
"iban": account_info.get("iban", "N/A"),
|
||||||
|
"transactionDate": min_date,
|
||||||
|
"description": description,
|
||||||
|
"transactionValue": amount,
|
||||||
|
"transactionCurrency": currency,
|
||||||
|
"transactionStatus": status,
|
||||||
|
"accountId": account_id,
|
||||||
|
"rawTransaction": transaction,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_transactions_from_db(
|
||||||
|
self,
|
||||||
|
account_id: Optional[str] = None,
|
||||||
|
limit: Optional[int] = 100,
|
||||||
|
offset: Optional[int] = 0,
|
||||||
|
date_from: Optional[str] = None,
|
||||||
|
date_to: Optional[str] = None,
|
||||||
|
min_amount: Optional[float] = None,
|
||||||
|
max_amount: Optional[float] = None,
|
||||||
|
search: Optional[str] = None,
|
||||||
|
hide_missing_ids: bool = True,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Get transactions from SQLite database"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, cannot read transactions")
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
transactions = sqlite_db.get_transactions(
|
||||||
|
account_id=account_id,
|
||||||
|
limit=limit or 100,
|
||||||
|
offset=offset or 0,
|
||||||
|
date_from=date_from,
|
||||||
|
date_to=date_to,
|
||||||
|
min_amount=min_amount,
|
||||||
|
max_amount=max_amount,
|
||||||
|
search=search,
|
||||||
|
hide_missing_ids=hide_missing_ids,
|
||||||
|
)
|
||||||
|
logger.debug(f"Retrieved {len(transactions)} transactions from database")
|
||||||
|
return transactions
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get transactions from database: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_transaction_count_from_db(
|
||||||
|
self,
|
||||||
|
account_id: Optional[str] = None,
|
||||||
|
date_from: Optional[str] = None,
|
||||||
|
date_to: Optional[str] = None,
|
||||||
|
min_amount: Optional[float] = None,
|
||||||
|
max_amount: Optional[float] = None,
|
||||||
|
search: Optional[str] = None,
|
||||||
|
hide_missing_ids: bool = True,
|
||||||
|
) -> int:
|
||||||
|
"""Get total count of transactions from SQLite database"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
filters = {
|
||||||
|
"date_from": date_from,
|
||||||
|
"date_to": date_to,
|
||||||
|
"min_amount": min_amount,
|
||||||
|
"max_amount": max_amount,
|
||||||
|
"search": search,
|
||||||
|
}
|
||||||
|
# Remove None values
|
||||||
|
filters = {k: v for k, v in filters.items() if v is not None}
|
||||||
|
|
||||||
|
count = sqlite_db.get_transaction_count(
|
||||||
|
account_id=account_id, hide_missing_ids=hide_missing_ids, **filters
|
||||||
|
)
|
||||||
|
logger.debug(f"Total transaction count: {count}")
|
||||||
|
return count
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get transaction count from database: {e}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
async def get_balances_from_db(
|
||||||
|
self, account_id: Optional[str] = None
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Get balances from SQLite database"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, cannot read balances")
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
balances = sqlite_db.get_balances(account_id=account_id)
|
||||||
|
logger.debug(f"Retrieved {len(balances)} balances from database")
|
||||||
|
return balances
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get balances from database: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_account_summary_from_db(
|
||||||
|
self, account_id: str
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get basic account info from SQLite database (avoids GoCardless call)"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
summary = sqlite_db.get_account_summary(account_id)
|
||||||
|
if summary:
|
||||||
|
logger.debug(
|
||||||
|
f"Retrieved account summary from database for {account_id}"
|
||||||
|
)
|
||||||
|
return summary
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get account summary from database: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def persist_account_details(self, account_data: Dict[str, Any]) -> None:
|
||||||
|
"""Persist account details to database"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, skipping account persistence")
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._persist_account_details_sqlite(account_data)
|
||||||
|
|
||||||
|
async def get_accounts_from_db(
|
||||||
|
self, account_ids: Optional[List[str]] = None
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Get account details from database"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, cannot read accounts")
|
||||||
|
return []
|
||||||
|
|
||||||
|
try:
|
||||||
|
accounts = sqlite_db.get_accounts(account_ids=account_ids)
|
||||||
|
logger.debug(f"Retrieved {len(accounts)} accounts from database")
|
||||||
|
return accounts
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get accounts from database: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_account_details_from_db(
|
||||||
|
self, account_id: str
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get specific account details from database"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.warning("SQLite database disabled, cannot read account")
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
account = sqlite_db.get_account(account_id)
|
||||||
|
if account:
|
||||||
|
logger.debug(
|
||||||
|
f"Retrieved account details from database for {account_id}"
|
||||||
|
)
|
||||||
|
return account
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get account details from database: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def run_migrations_if_needed(self):
|
||||||
|
"""Run all necessary database migrations"""
|
||||||
|
if not self.sqlite_enabled:
|
||||||
|
logger.info("SQLite database disabled, skipping migrations")
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._migrate_balance_timestamps_if_needed()
|
||||||
|
|
||||||
|
async def _migrate_balance_timestamps_if_needed(self):
|
||||||
|
"""Check and migrate balance timestamps if needed"""
|
||||||
|
try:
|
||||||
|
if await self._check_balance_timestamp_migration_needed():
|
||||||
|
logger.info("Balance timestamp migration needed, starting...")
|
||||||
|
await self._migrate_balance_timestamps()
|
||||||
|
logger.info("Balance timestamp migration completed")
|
||||||
|
else:
|
||||||
|
logger.info("Balance timestamps are already consistent")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Balance timestamp migration failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _check_balance_timestamp_migration_needed(self) -> bool:
|
||||||
|
"""Check if balance timestamps need migration"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Check for mixed timestamp types
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT typeof(timestamp) as type, COUNT(*) as count
|
||||||
|
FROM balances
|
||||||
|
GROUP BY typeof(timestamp)
|
||||||
|
""")
|
||||||
|
|
||||||
|
types = cursor.fetchall()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# If we have both 'real' and 'text' types, migration is needed
|
||||||
|
type_names = [row[0] for row in types]
|
||||||
|
return "real" in type_names and "text" in type_names
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to check migration status: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _migrate_balance_timestamps(self):
|
||||||
|
"""Convert all Unix timestamps to datetime strings"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
if not db_path.exists():
|
||||||
|
logger.warning("Database file not found, skipping migration")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Get all balances with REAL timestamps
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT id, timestamp
|
||||||
|
FROM balances
|
||||||
|
WHERE typeof(timestamp) = 'real'
|
||||||
|
ORDER BY id
|
||||||
|
""")
|
||||||
|
|
||||||
|
unix_records = cursor.fetchall()
|
||||||
|
total_records = len(unix_records)
|
||||||
|
|
||||||
|
if total_records == 0:
|
||||||
|
logger.info("No Unix timestamps found to migrate")
|
||||||
|
conn.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Migrating {total_records} balance records from Unix to datetime format"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert and update in batches
|
||||||
|
batch_size = 100
|
||||||
|
migrated_count = 0
|
||||||
|
|
||||||
|
for i in range(0, total_records, batch_size):
|
||||||
|
batch = unix_records[i : i + batch_size]
|
||||||
|
|
||||||
|
for record_id, unix_timestamp in batch:
|
||||||
|
try:
|
||||||
|
# Convert Unix timestamp to datetime string
|
||||||
|
dt_string = self._unix_to_datetime_string(float(unix_timestamp))
|
||||||
|
|
||||||
|
# Update the record
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
UPDATE balances
|
||||||
|
SET timestamp = ?
|
||||||
|
WHERE id = ?
|
||||||
|
""",
|
||||||
|
(dt_string, record_id),
|
||||||
|
)
|
||||||
|
|
||||||
|
migrated_count += 1
|
||||||
|
|
||||||
|
if migrated_count % 100 == 0:
|
||||||
|
logger.info(
|
||||||
|
f"Migrated {migrated_count}/{total_records} balance records"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to migrate record {record_id}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Commit batch
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
conn.close()
|
||||||
|
logger.info(f"Successfully migrated {migrated_count} balance records")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Balance timestamp migration failed: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _unix_to_datetime_string(self, unix_timestamp: float) -> str:
|
||||||
|
"""Convert Unix timestamp to datetime string"""
|
||||||
|
dt = datetime.fromtimestamp(unix_timestamp)
|
||||||
|
return dt.isoformat()
|
||||||
|
|
||||||
|
async def _persist_balance_sqlite(
|
||||||
|
self, account_id: str, balance_data: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Persist balance to SQLite"""
|
||||||
|
try:
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create the balances table if it doesn't exist
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS balances (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
account_id TEXT,
|
||||||
|
bank TEXT,
|
||||||
|
status TEXT,
|
||||||
|
iban TEXT,
|
||||||
|
amount REAL,
|
||||||
|
currency TEXT,
|
||||||
|
type TEXT,
|
||||||
|
timestamp DATETIME
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for better performance
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_balances_account_id
|
||||||
|
ON balances(account_id)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_balances_timestamp
|
||||||
|
ON balances(timestamp)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp
|
||||||
|
ON balances(account_id, type, timestamp)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert GoCardless balance format to our format and persist
|
||||||
|
for balance in balance_data.get("balances", []):
|
||||||
|
balance_amount = balance["balanceAmount"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
"""INSERT INTO balances (
|
||||||
|
account_id,
|
||||||
|
bank,
|
||||||
|
status,
|
||||||
|
iban,
|
||||||
|
amount,
|
||||||
|
currency,
|
||||||
|
type,
|
||||||
|
timestamp
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||||
|
(
|
||||||
|
account_id,
|
||||||
|
balance_data.get("institution_id", "unknown"),
|
||||||
|
balance_data.get("account_status"),
|
||||||
|
balance_data.get("iban", "N/A"),
|
||||||
|
float(balance_amount["amount"]),
|
||||||
|
balance_amount["currency"],
|
||||||
|
balance["balanceType"],
|
||||||
|
datetime.now().isoformat(),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except sqlite3.IntegrityError:
|
||||||
|
logger.warning(f"Skipped duplicate balance for {account_id}")
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
logger.info(f"Persisted balances to SQLite for account {account_id}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to persist balances to SQLite: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _persist_transactions_sqlite(
|
||||||
|
self, account_id: str, transactions: List[Dict[str, Any]]
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Persist transactions to SQLite"""
|
||||||
|
try:
|
||||||
|
import sqlite3
|
||||||
|
import json
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create the transactions table if it doesn't exist
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS transactions (
|
||||||
|
internalTransactionId TEXT PRIMARY KEY,
|
||||||
|
institutionId TEXT,
|
||||||
|
iban TEXT,
|
||||||
|
transactionDate DATETIME,
|
||||||
|
description TEXT,
|
||||||
|
transactionValue REAL,
|
||||||
|
transactionCurrency TEXT,
|
||||||
|
transactionStatus TEXT,
|
||||||
|
accountId TEXT,
|
||||||
|
rawTransaction JSON
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes for better performance
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_id
|
||||||
|
ON transactions(accountId)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_date
|
||||||
|
ON transactions(transactionDate)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_date
|
||||||
|
ON transactions(accountId, transactionDate)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE INDEX IF NOT EXISTS idx_transactions_amount
|
||||||
|
ON transactions(transactionValue)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prepare an SQL statement for inserting data
|
||||||
|
insert_sql = """INSERT INTO transactions (
|
||||||
|
internalTransactionId,
|
||||||
|
institutionId,
|
||||||
|
iban,
|
||||||
|
transactionDate,
|
||||||
|
description,
|
||||||
|
transactionValue,
|
||||||
|
transactionCurrency,
|
||||||
|
transactionStatus,
|
||||||
|
accountId,
|
||||||
|
rawTransaction
|
||||||
|
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
|
||||||
|
|
||||||
|
new_transactions = []
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
try:
|
||||||
|
cursor.execute(
|
||||||
|
insert_sql,
|
||||||
|
(
|
||||||
|
transaction["internalTransactionId"],
|
||||||
|
transaction["institutionId"],
|
||||||
|
transaction["iban"],
|
||||||
|
transaction["transactionDate"],
|
||||||
|
transaction["description"],
|
||||||
|
transaction["transactionValue"],
|
||||||
|
transaction["transactionCurrency"],
|
||||||
|
transaction["transactionStatus"],
|
||||||
|
transaction["accountId"],
|
||||||
|
json.dumps(transaction["rawTransaction"]),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
new_transactions.append(transaction)
|
||||||
|
except sqlite3.IntegrityError:
|
||||||
|
# Transaction already exists
|
||||||
|
continue
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Persisted {len(new_transactions)} new transactions to SQLite for account {account_id}"
|
||||||
|
)
|
||||||
|
return new_transactions
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to persist transactions to SQLite: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _persist_account_details_sqlite(
|
||||||
|
self, account_data: Dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""Persist account details to SQLite"""
|
||||||
|
try:
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||||
|
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Use the sqlite_db module function
|
||||||
|
sqlite_db.persist_account(account_data)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Persisted account details to SQLite for account {account_data['id']}"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to persist account details to SQLite: {e}")
|
||||||
|
raise
|
||||||
174
leggend/services/gocardless_service.py
Normal file
174
leggend/services/gocardless_service.py
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
import json
|
||||||
|
import httpx
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Any, List
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.config import config
|
||||||
|
|
||||||
|
|
||||||
|
def _log_rate_limits(response):
|
||||||
|
"""Log GoCardless API rate limit headers"""
|
||||||
|
limit = response.headers.get("X-RateLimit-Limit")
|
||||||
|
remaining = response.headers.get("X-RateLimit-Remaining")
|
||||||
|
reset = response.headers.get("X-RateLimit-Reset")
|
||||||
|
account_success_reset = response.headers.get("X-RateLimit-Account-Success-Reset")
|
||||||
|
|
||||||
|
if limit or remaining or reset or account_success_reset:
|
||||||
|
logger.info(
|
||||||
|
f"GoCardless rate limits - Limit: {limit}, Remaining: {remaining}, Reset: {reset}s, Account Success Reset: {account_success_reset}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GoCardlessService:
|
||||||
|
def __init__(self):
|
||||||
|
self.config = config.gocardless_config
|
||||||
|
self.base_url = self.config.get(
|
||||||
|
"url", "https://bankaccountdata.gocardless.com/api/v2"
|
||||||
|
)
|
||||||
|
self._token = None
|
||||||
|
|
||||||
|
async def _get_auth_headers(self) -> Dict[str, str]:
|
||||||
|
"""Get authentication headers for GoCardless API"""
|
||||||
|
token = await self._get_token()
|
||||||
|
return {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
|
||||||
|
|
||||||
|
async def _get_token(self) -> str:
|
||||||
|
"""Get access token for GoCardless API"""
|
||||||
|
if self._token:
|
||||||
|
return self._token
|
||||||
|
|
||||||
|
# Use ~/.config/leggen for consistency with main config
|
||||||
|
auth_file = Path.home() / ".config" / "leggen" / "auth.json"
|
||||||
|
|
||||||
|
if auth_file.exists():
|
||||||
|
try:
|
||||||
|
with open(auth_file, "r") as f:
|
||||||
|
auth = json.load(f)
|
||||||
|
|
||||||
|
if auth.get("access"):
|
||||||
|
# Try to refresh the token
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
try:
|
||||||
|
response = await client.post(
|
||||||
|
f"{self.base_url}/token/refresh/",
|
||||||
|
json={"refresh": auth["refresh"]},
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
auth.update(response.json())
|
||||||
|
self._save_auth(auth)
|
||||||
|
self._token = auth["access"]
|
||||||
|
return self._token
|
||||||
|
except httpx.HTTPStatusError:
|
||||||
|
logger.warning("Token refresh failed, creating new token")
|
||||||
|
return await self._create_token()
|
||||||
|
else:
|
||||||
|
return await self._create_token()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error reading auth file: {e}")
|
||||||
|
return await self._create_token()
|
||||||
|
else:
|
||||||
|
return await self._create_token()
|
||||||
|
|
||||||
|
async def _create_token(self) -> str:
|
||||||
|
"""Create a new GoCardless access token"""
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{self.base_url}/token/new/",
|
||||||
|
json={
|
||||||
|
"secret_id": self.config["key"],
|
||||||
|
"secret_key": self.config["secret"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
auth = response.json()
|
||||||
|
self._save_auth(auth)
|
||||||
|
self._token = auth["access"]
|
||||||
|
return self._token
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to create GoCardless token: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _save_auth(self, auth_data: dict):
|
||||||
|
"""Save authentication data to file"""
|
||||||
|
auth_file = Path.home() / ".config" / "leggen" / "auth.json"
|
||||||
|
auth_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
with open(auth_file, "w") as f:
|
||||||
|
json.dump(auth_data, f)
|
||||||
|
|
||||||
|
async def get_institutions(self, country: str = "PT") -> List[Dict[str, Any]]:
|
||||||
|
"""Get available bank institutions for a country"""
|
||||||
|
headers = await self._get_auth_headers()
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{self.base_url}/institutions/",
|
||||||
|
headers=headers,
|
||||||
|
params={"country": country},
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
async def create_requisition(
|
||||||
|
self, institution_id: str, redirect_url: str
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a bank connection requisition"""
|
||||||
|
headers = await self._get_auth_headers()
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.post(
|
||||||
|
f"{self.base_url}/requisitions/",
|
||||||
|
headers=headers,
|
||||||
|
json={"institution_id": institution_id, "redirect": redirect_url},
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
async def get_requisitions(self) -> Dict[str, Any]:
|
||||||
|
"""Get all requisitions"""
|
||||||
|
headers = await self._get_auth_headers()
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{self.base_url}/requisitions/", headers=headers
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
async def get_account_details(self, account_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get account details"""
|
||||||
|
headers = await self._get_auth_headers()
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{self.base_url}/accounts/{account_id}/", headers=headers
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
async def get_account_balances(self, account_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get account balances"""
|
||||||
|
headers = await self._get_auth_headers()
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{self.base_url}/accounts/{account_id}/balances/", headers=headers
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
|
|
||||||
|
async def get_account_transactions(self, account_id: str) -> Dict[str, Any]:
|
||||||
|
"""Get account transactions"""
|
||||||
|
headers = await self._get_auth_headers()
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
|
response = await client.get(
|
||||||
|
f"{self.base_url}/accounts/{account_id}/transactions/", headers=headers
|
||||||
|
)
|
||||||
|
_log_rate_limits(response)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response.json()
|
||||||
204
leggend/services/notification_service.py
Normal file
204
leggend/services/notification_service.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
from typing import List, Dict, Any
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.config import config
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationService:
|
||||||
|
def __init__(self):
|
||||||
|
self.notifications_config = config.notifications_config
|
||||||
|
self.filters_config = config.filters_config
|
||||||
|
|
||||||
|
async def send_transaction_notifications(
|
||||||
|
self, transactions: List[Dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
|
"""Send notifications for new transactions that match filters"""
|
||||||
|
if not self.filters_config:
|
||||||
|
logger.info("No notification filters configured, skipping notifications")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Filter transactions that match notification criteria
|
||||||
|
matching_transactions = self._filter_transactions(transactions)
|
||||||
|
|
||||||
|
if not matching_transactions:
|
||||||
|
logger.info("No transactions matched notification filters")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Send to enabled notification services
|
||||||
|
if self._is_discord_enabled():
|
||||||
|
await self._send_discord_notifications(matching_transactions)
|
||||||
|
|
||||||
|
if self._is_telegram_enabled():
|
||||||
|
await self._send_telegram_notifications(matching_transactions)
|
||||||
|
|
||||||
|
async def send_test_notification(self, service: str, message: str) -> bool:
|
||||||
|
"""Send a test notification"""
|
||||||
|
try:
|
||||||
|
if service == "discord" and self._is_discord_enabled():
|
||||||
|
await self._send_discord_test(message)
|
||||||
|
return True
|
||||||
|
elif service == "telegram" and self._is_telegram_enabled():
|
||||||
|
await self._send_telegram_test(message)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Notification service '{service}' not enabled or not found"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send test notification to {service}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def send_expiry_notification(self, notification_data: Dict[str, Any]) -> None:
|
||||||
|
"""Send notification about account expiry"""
|
||||||
|
if self._is_discord_enabled():
|
||||||
|
await self._send_discord_expiry(notification_data)
|
||||||
|
|
||||||
|
if self._is_telegram_enabled():
|
||||||
|
await self._send_telegram_expiry(notification_data)
|
||||||
|
|
||||||
|
def _filter_transactions(
|
||||||
|
self, transactions: List[Dict[str, Any]]
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""Filter transactions based on notification criteria"""
|
||||||
|
matching = []
|
||||||
|
filters_case_insensitive = self.filters_config.get("case-insensitive", [])
|
||||||
|
filters_case_sensitive = self.filters_config.get("case-sensitive", [])
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
description = transaction.get("description", "")
|
||||||
|
description_lower = description.lower()
|
||||||
|
|
||||||
|
# Check case-insensitive filters
|
||||||
|
for filter_value in filters_case_insensitive:
|
||||||
|
if filter_value.lower() in description_lower:
|
||||||
|
matching.append(
|
||||||
|
{
|
||||||
|
"name": transaction["description"],
|
||||||
|
"value": transaction["transactionValue"],
|
||||||
|
"currency": transaction["transactionCurrency"],
|
||||||
|
"date": transaction["transactionDate"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check case-sensitive filters
|
||||||
|
for filter_value in filters_case_sensitive:
|
||||||
|
if filter_value in description:
|
||||||
|
matching.append(
|
||||||
|
{
|
||||||
|
"name": transaction["description"],
|
||||||
|
"value": transaction["transactionValue"],
|
||||||
|
"currency": transaction["transactionCurrency"],
|
||||||
|
"date": transaction["transactionDate"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return matching
|
||||||
|
|
||||||
|
def _is_discord_enabled(self) -> bool:
|
||||||
|
"""Check if Discord notifications are enabled"""
|
||||||
|
discord_config = self.notifications_config.get("discord", {})
|
||||||
|
return bool(
|
||||||
|
discord_config.get("webhook") and discord_config.get("enabled", True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _is_telegram_enabled(self) -> bool:
|
||||||
|
"""Check if Telegram notifications are enabled"""
|
||||||
|
telegram_config = self.notifications_config.get("telegram", {})
|
||||||
|
return bool(
|
||||||
|
telegram_config.get("token")
|
||||||
|
or telegram_config.get("api-key")
|
||||||
|
and (telegram_config.get("chat_id") or telegram_config.get("chat-id"))
|
||||||
|
and telegram_config.get("enabled", True)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _send_discord_notifications(
|
||||||
|
self, transactions: List[Dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
|
"""Send Discord notifications - placeholder implementation"""
|
||||||
|
# Would import and use leggen.notifications.discord
|
||||||
|
logger.info(f"Sending {len(transactions)} transaction notifications to Discord")
|
||||||
|
|
||||||
|
async def _send_telegram_notifications(
|
||||||
|
self, transactions: List[Dict[str, Any]]
|
||||||
|
) -> None:
|
||||||
|
"""Send Telegram notifications - placeholder implementation"""
|
||||||
|
# Would import and use leggen.notifications.telegram
|
||||||
|
logger.info(
|
||||||
|
f"Sending {len(transactions)} transaction notifications to Telegram"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _send_discord_test(self, message: str) -> None:
|
||||||
|
"""Send Discord test notification"""
|
||||||
|
try:
|
||||||
|
from leggen.notifications.discord import send_expire_notification
|
||||||
|
import click
|
||||||
|
|
||||||
|
# Create a mock context with the webhook
|
||||||
|
ctx = click.Context(click.Command("test"))
|
||||||
|
ctx.obj = {
|
||||||
|
"notifications": {
|
||||||
|
"discord": {
|
||||||
|
"webhook": self.notifications_config.get("discord", {}).get(
|
||||||
|
"webhook"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send test notification using the actual implementation
|
||||||
|
test_notification = {
|
||||||
|
"bank": "Test",
|
||||||
|
"requisition_id": "test-123",
|
||||||
|
"status": "active",
|
||||||
|
"days_left": 30,
|
||||||
|
}
|
||||||
|
send_expire_notification(ctx, test_notification)
|
||||||
|
logger.info(f"Discord test notification sent: {message}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send Discord test notification: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _send_telegram_test(self, message: str) -> None:
|
||||||
|
"""Send Telegram test notification"""
|
||||||
|
try:
|
||||||
|
from leggen.notifications.telegram import send_expire_notification
|
||||||
|
import click
|
||||||
|
|
||||||
|
# Create a mock context with the telegram config
|
||||||
|
ctx = click.Context(click.Command("test"))
|
||||||
|
telegram_config = self.notifications_config.get("telegram", {})
|
||||||
|
ctx.obj = {
|
||||||
|
"notifications": {
|
||||||
|
"telegram": {
|
||||||
|
"api-key": telegram_config.get("token")
|
||||||
|
or telegram_config.get("api-key"),
|
||||||
|
"chat-id": telegram_config.get("chat_id")
|
||||||
|
or telegram_config.get("chat-id"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send test notification using the actual implementation
|
||||||
|
test_notification = {
|
||||||
|
"bank": "Test",
|
||||||
|
"requisition_id": "test-123",
|
||||||
|
"status": "active",
|
||||||
|
"days_left": 30,
|
||||||
|
}
|
||||||
|
send_expire_notification(ctx, test_notification)
|
||||||
|
logger.info(f"Telegram test notification sent: {message}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send Telegram test notification: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def _send_discord_expiry(self, notification_data: Dict[str, Any]) -> None:
|
||||||
|
"""Send Discord expiry notification"""
|
||||||
|
logger.info(f"Sending Discord expiry notification: {notification_data}")
|
||||||
|
|
||||||
|
async def _send_telegram_expiry(self, notification_data: Dict[str, Any]) -> None:
|
||||||
|
"""Send Telegram expiry notification"""
|
||||||
|
logger.info(f"Sending Telegram expiry notification: {notification_data}")
|
||||||
168
leggend/services/sync_service.py
Normal file
168
leggend/services/sync_service.py
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggend.api.models.sync import SyncResult, SyncStatus
|
||||||
|
from leggend.services.gocardless_service import GoCardlessService
|
||||||
|
from leggend.services.database_service import DatabaseService
|
||||||
|
from leggend.services.notification_service import NotificationService
|
||||||
|
|
||||||
|
|
||||||
|
class SyncService:
|
||||||
|
def __init__(self):
|
||||||
|
self.gocardless = GoCardlessService()
|
||||||
|
self.database = DatabaseService()
|
||||||
|
self.notifications = NotificationService()
|
||||||
|
self._sync_status = SyncStatus(is_running=False)
|
||||||
|
|
||||||
|
async def get_sync_status(self) -> SyncStatus:
|
||||||
|
"""Get current sync status"""
|
||||||
|
return self._sync_status
|
||||||
|
|
||||||
|
async def sync_all_accounts(self, force: bool = False) -> SyncResult:
|
||||||
|
"""Sync all connected accounts"""
|
||||||
|
if self._sync_status.is_running and not force:
|
||||||
|
raise Exception("Sync is already running")
|
||||||
|
|
||||||
|
start_time = datetime.now()
|
||||||
|
self._sync_status.is_running = True
|
||||||
|
self._sync_status.errors = []
|
||||||
|
|
||||||
|
accounts_processed = 0
|
||||||
|
transactions_added = 0
|
||||||
|
transactions_updated = 0
|
||||||
|
balances_updated = 0
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("Starting sync of all accounts")
|
||||||
|
|
||||||
|
# Get all requisitions and accounts
|
||||||
|
requisitions = await self.gocardless.get_requisitions()
|
||||||
|
all_accounts = set()
|
||||||
|
|
||||||
|
for req in requisitions.get("results", []):
|
||||||
|
all_accounts.update(req.get("accounts", []))
|
||||||
|
|
||||||
|
self._sync_status.total_accounts = len(all_accounts)
|
||||||
|
|
||||||
|
# Process each account
|
||||||
|
for account_id in all_accounts:
|
||||||
|
try:
|
||||||
|
# Get account details
|
||||||
|
account_details = await self.gocardless.get_account_details(
|
||||||
|
account_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Persist account details to database
|
||||||
|
if account_details:
|
||||||
|
await self.database.persist_account_details(account_details)
|
||||||
|
|
||||||
|
# Get and save balances
|
||||||
|
balances = await self.gocardless.get_account_balances(account_id)
|
||||||
|
if balances and account_details:
|
||||||
|
# Merge account details into balances data for proper persistence
|
||||||
|
balances_with_account_info = balances.copy()
|
||||||
|
balances_with_account_info["institution_id"] = (
|
||||||
|
account_details.get("institution_id")
|
||||||
|
)
|
||||||
|
balances_with_account_info["iban"] = account_details.get("iban")
|
||||||
|
balances_with_account_info["account_status"] = (
|
||||||
|
account_details.get("status")
|
||||||
|
)
|
||||||
|
await self.database.persist_balance(
|
||||||
|
account_id, balances_with_account_info
|
||||||
|
)
|
||||||
|
balances_updated += len(balances.get("balances", []))
|
||||||
|
|
||||||
|
# Get and save transactions
|
||||||
|
transactions = await self.gocardless.get_account_transactions(
|
||||||
|
account_id
|
||||||
|
)
|
||||||
|
if transactions:
|
||||||
|
processed_transactions = self.database.process_transactions(
|
||||||
|
account_id, account_details, transactions
|
||||||
|
)
|
||||||
|
new_transactions = await self.database.persist_transactions(
|
||||||
|
account_id, processed_transactions
|
||||||
|
)
|
||||||
|
transactions_added += len(new_transactions)
|
||||||
|
|
||||||
|
# Send notifications for new transactions
|
||||||
|
if new_transactions:
|
||||||
|
await self.notifications.send_transaction_notifications(
|
||||||
|
new_transactions
|
||||||
|
)
|
||||||
|
|
||||||
|
accounts_processed += 1
|
||||||
|
self._sync_status.accounts_synced = accounts_processed
|
||||||
|
|
||||||
|
logger.info(f"Synced account {account_id} successfully")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Failed to sync account {account_id}: {str(e)}"
|
||||||
|
errors.append(error_msg)
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
end_time = datetime.now()
|
||||||
|
duration = (end_time - start_time).total_seconds()
|
||||||
|
|
||||||
|
self._sync_status.last_sync = end_time
|
||||||
|
|
||||||
|
result = SyncResult(
|
||||||
|
success=len(errors) == 0,
|
||||||
|
accounts_processed=accounts_processed,
|
||||||
|
transactions_added=transactions_added,
|
||||||
|
transactions_updated=transactions_updated,
|
||||||
|
balances_updated=balances_updated,
|
||||||
|
duration_seconds=duration,
|
||||||
|
errors=errors,
|
||||||
|
started_at=start_time,
|
||||||
|
completed_at=end_time,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Sync completed: {accounts_processed} accounts, {transactions_added} new transactions"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Sync failed: {str(e)}"
|
||||||
|
errors.append(error_msg)
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._sync_status.is_running = False
|
||||||
|
|
||||||
|
async def sync_specific_accounts(
|
||||||
|
self, account_ids: List[str], force: bool = False
|
||||||
|
) -> SyncResult:
|
||||||
|
"""Sync specific accounts"""
|
||||||
|
if self._sync_status.is_running and not force:
|
||||||
|
raise Exception("Sync is already running")
|
||||||
|
|
||||||
|
# Similar implementation but only for specified accounts
|
||||||
|
# For brevity, implementing a simplified version
|
||||||
|
start_time = datetime.now()
|
||||||
|
self._sync_status.is_running = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Process only specified accounts
|
||||||
|
# Implementation would be similar to sync_all_accounts
|
||||||
|
# but filtered to only the specified account_ids
|
||||||
|
|
||||||
|
end_time = datetime.now()
|
||||||
|
return SyncResult(
|
||||||
|
success=True,
|
||||||
|
accounts_processed=len(account_ids),
|
||||||
|
transactions_added=0,
|
||||||
|
transactions_updated=0,
|
||||||
|
balances_updated=0,
|
||||||
|
duration_seconds=(end_time - start_time).total_seconds(),
|
||||||
|
errors=[],
|
||||||
|
started_at=start_time,
|
||||||
|
completed_at=end_time,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
self._sync_status.is_running = False
|
||||||
10
leggend/utils/gocardless.py
Normal file
10
leggend/utils/gocardless.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
REQUISITION_STATUS = {
|
||||||
|
"CR": "CREATED",
|
||||||
|
"GC": "GIVING_CONSENT",
|
||||||
|
"UA": "UNDERGOING_AUTHENTICATION",
|
||||||
|
"RJ": "REJECTED",
|
||||||
|
"SA": "SELECTING_ACCOUNTS",
|
||||||
|
"GA": "GRANTING_ACCESS",
|
||||||
|
"LN": "LINKED",
|
||||||
|
"EX": "EXPIRED",
|
||||||
|
}
|
||||||
7
opencode.json
Normal file
7
opencode.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://opencode.ai/config.json",
|
||||||
|
"permission": {
|
||||||
|
"edit": "ask",
|
||||||
|
"bash": "ask"
|
||||||
|
}
|
||||||
|
}
|
||||||
645
poetry.lock
generated
645
poetry.lock
generated
@@ -1,645 +0,0 @@
|
|||||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "black"
|
|
||||||
version = "24.1.1"
|
|
||||||
description = "The uncompromising code formatter."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "black-24.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2588021038bd5ada078de606f2a804cadd0a3cc6a79cb3e9bb3a8bf581325a4c"},
|
|
||||||
{file = "black-24.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a95915c98d6e32ca43809d46d932e2abc5f1f7d582ffbe65a5b4d1588af7445"},
|
|
||||||
{file = "black-24.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa6a0e965779c8f2afb286f9ef798df770ba2b6cee063c650b96adec22c056a"},
|
|
||||||
{file = "black-24.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5242ecd9e990aeb995b6d03dc3b2d112d4a78f2083e5a8e86d566340ae80fec4"},
|
|
||||||
{file = "black-24.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fc1ec9aa6f4d98d022101e015261c056ddebe3da6a8ccfc2c792cbe0349d48b7"},
|
|
||||||
{file = "black-24.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0269dfdea12442022e88043d2910429bed717b2d04523867a85dacce535916b8"},
|
|
||||||
{file = "black-24.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3d64db762eae4a5ce04b6e3dd745dcca0fb9560eb931a5be97472e38652a161"},
|
|
||||||
{file = "black-24.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:5d7b06ea8816cbd4becfe5f70accae953c53c0e53aa98730ceccb0395520ee5d"},
|
|
||||||
{file = "black-24.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e2c8dfa14677f90d976f68e0c923947ae68fa3961d61ee30976c388adc0b02c8"},
|
|
||||||
{file = "black-24.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a21725862d0e855ae05da1dd25e3825ed712eaaccef6b03017fe0853a01aa45e"},
|
|
||||||
{file = "black-24.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07204d078e25327aad9ed2c64790d681238686bce254c910de640c7cc4fc3aa6"},
|
|
||||||
{file = "black-24.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:a83fe522d9698d8f9a101b860b1ee154c1d25f8a82ceb807d319f085b2627c5b"},
|
|
||||||
{file = "black-24.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08b34e85170d368c37ca7bf81cf67ac863c9d1963b2c1780c39102187ec8dd62"},
|
|
||||||
{file = "black-24.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7258c27115c1e3b5de9ac6c4f9957e3ee2c02c0b39222a24dc7aa03ba0e986f5"},
|
|
||||||
{file = "black-24.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40657e1b78212d582a0edecafef133cf1dd02e6677f539b669db4746150d38f6"},
|
|
||||||
{file = "black-24.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e298d588744efda02379521a19639ebcd314fba7a49be22136204d7ed1782717"},
|
|
||||||
{file = "black-24.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34afe9da5056aa123b8bfda1664bfe6fb4e9c6f311d8e4a6eb089da9a9173bf9"},
|
|
||||||
{file = "black-24.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:854c06fb86fd854140f37fb24dbf10621f5dab9e3b0c29a690ba595e3d543024"},
|
|
||||||
{file = "black-24.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3897ae5a21ca132efa219c029cce5e6bfc9c3d34ed7e892113d199c0b1b444a2"},
|
|
||||||
{file = "black-24.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:ecba2a15dfb2d97105be74bbfe5128bc5e9fa8477d8c46766505c1dda5883aac"},
|
|
||||||
{file = "black-24.1.1-py3-none-any.whl", hash = "sha256:5cdc2e2195212208fbcae579b931407c1fa9997584f0a415421748aeafff1168"},
|
|
||||||
{file = "black-24.1.1.tar.gz", hash = "sha256:48b5760dcbfe5cf97fd4fba23946681f3a81514c6ab8a45b50da67ac8fbc6c7b"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
click = ">=8.0.0"
|
|
||||||
mypy-extensions = ">=0.4.3"
|
|
||||||
packaging = ">=22.0"
|
|
||||||
pathspec = ">=0.9.0"
|
|
||||||
platformdirs = ">=2"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
colorama = ["colorama (>=0.4.3)"]
|
|
||||||
d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
|
|
||||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
|
||||||
uvloop = ["uvloop (>=0.15.2)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "certifi"
|
|
||||||
version = "2023.11.17"
|
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
files = [
|
|
||||||
{file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"},
|
|
||||||
{file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfgv"
|
|
||||||
version = "3.4.0"
|
|
||||||
description = "Validate configuration and produce human readable error messages."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
|
|
||||||
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "charset-normalizer"
|
|
||||||
version = "3.3.2"
|
|
||||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7.0"
|
|
||||||
files = [
|
|
||||||
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
|
|
||||||
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
|
|
||||||
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "click"
|
|
||||||
version = "8.1.7"
|
|
||||||
description = "Composable command line interface toolkit"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
|
|
||||||
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "colorama"
|
|
||||||
version = "0.4.6"
|
|
||||||
description = "Cross-platform colored terminal text."
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
|
||||||
files = [
|
|
||||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
|
||||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "distlib"
|
|
||||||
version = "0.3.8"
|
|
||||||
description = "Distribution utilities"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
files = [
|
|
||||||
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
|
|
||||||
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "dnspython"
|
|
||||||
version = "2.6.0"
|
|
||||||
description = "DNS toolkit"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "dnspython-2.6.0-py3-none-any.whl", hash = "sha256:44c40af3bffed66e3307cea9ab667fd583e138ecc0777b18f262a9dae034e5fa"},
|
|
||||||
{file = "dnspython-2.6.0.tar.gz", hash = "sha256:233f871ff384d84c33b2eaf4358ffe7f8927eae3b257ad8467f9bdba7e7ac6bc"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
|
|
||||||
dnssec = ["cryptography (>=41)"]
|
|
||||||
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
|
|
||||||
doq = ["aioquic (>=0.9.25)"]
|
|
||||||
idna = ["idna (>=3.6)"]
|
|
||||||
trio = ["trio (>=0.23)"]
|
|
||||||
wmi = ["wmi (>=1.5.1)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "filelock"
|
|
||||||
version = "3.13.1"
|
|
||||||
description = "A platform independent file lock."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
|
|
||||||
{file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
|
|
||||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
|
|
||||||
typing = ["typing-extensions (>=4.8)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "identify"
|
|
||||||
version = "2.5.33"
|
|
||||||
description = "File identification library for Python"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"},
|
|
||||||
{file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
license = ["ukkonen"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "idna"
|
|
||||||
version = "3.6"
|
|
||||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
files = [
|
|
||||||
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
|
|
||||||
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "loguru"
|
|
||||||
version = "0.7.2"
|
|
||||||
description = "Python logging made (stupidly) simple"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
files = [
|
|
||||||
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
|
|
||||||
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
|
|
||||||
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mypy-extensions"
|
|
||||||
version = "1.0.0"
|
|
||||||
description = "Type system extensions for programs checked with the mypy type checker."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
files = [
|
|
||||||
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
|
||||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "nodeenv"
|
|
||||||
version = "1.8.0"
|
|
||||||
description = "Node.js virtual environment builder"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
|
|
||||||
files = [
|
|
||||||
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
|
|
||||||
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
setuptools = "*"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "packaging"
|
|
||||||
version = "23.2"
|
|
||||||
description = "Core utilities for Python packages"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
|
|
||||||
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pathspec"
|
|
||||||
version = "0.12.1"
|
|
||||||
description = "Utility library for gitignore style pattern matching of file paths."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
|
||||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "platformdirs"
|
|
||||||
version = "4.1.0"
|
|
||||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"},
|
|
||||||
{file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
|
|
||||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pre-commit"
|
|
||||||
version = "3.6.0"
|
|
||||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.9"
|
|
||||||
files = [
|
|
||||||
{file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"},
|
|
||||||
{file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
cfgv = ">=2.0.0"
|
|
||||||
identify = ">=1.0.0"
|
|
||||||
nodeenv = ">=0.11.1"
|
|
||||||
pyyaml = ">=5.1"
|
|
||||||
virtualenv = ">=20.10.0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pymongo"
|
|
||||||
version = "4.6.1"
|
|
||||||
description = "Python driver for MongoDB <http://www.mongodb.org>"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4344c30025210b9fa80ec257b0e0aab5aa1d5cca91daa70d82ab97b482cc038e"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux1_i686.whl", hash = "sha256:1c5654bb8bb2bdb10e7a0bc3c193dd8b49a960b9eebc4381ff5a2043f4c3c441"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:eaf2f65190c506def2581219572b9c70b8250615dc918b3b7c218361a51ec42e"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:262356ea5fcb13d35fb2ab6009d3927bafb9504ef02339338634fffd8a9f1ae4"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:2dd2f6960ee3c9360bed7fb3c678be0ca2d00f877068556785ec2eb6b73d2414"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:ff925f1cca42e933376d09ddc254598f8c5fcd36efc5cac0118bb36c36217c41"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:3cadf7f4c8e94d8a77874b54a63c80af01f4d48c4b669c8b6867f86a07ba994f"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55dac73316e7e8c2616ba2e6f62b750918e9e0ae0b2053699d66ca27a7790105"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:154b361dcb358ad377d5d40df41ee35f1cc14c8691b50511547c12404f89b5cb"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2940aa20e9cc328e8ddeacea8b9a6f5ddafe0b087fedad928912e787c65b4909"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:010bc9aa90fd06e5cc52c8fac2c2fd4ef1b5f990d9638548dde178005770a5e8"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e470fa4bace5f50076c32f4b3cc182b31303b4fefb9b87f990144515d572820b"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-win32.whl", hash = "sha256:da08ea09eefa6b960c2dd9a68ec47949235485c623621eb1d6c02b46765322ac"},
|
|
||||||
{file = "pymongo-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:13d613c866f9f07d51180f9a7da54ef491d130f169e999c27e7633abe8619ec9"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6a0ae7a48a6ef82ceb98a366948874834b86c84e288dbd55600c1abfc3ac1d88"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd94c503271e79917b27c6e77f7c5474da6930b3fb9e70a12e68c2dff386b9a"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d4ccac3053b84a09251da8f5350bb684cbbf8c8c01eda6b5418417d0a8ab198"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:349093675a2d3759e4fb42b596afffa2b2518c890492563d7905fac503b20daa"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88beb444fb438385e53dc9110852910ec2a22f0eab7dd489e827038fdc19ed8d"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8e62d06e90f60ea2a3d463ae51401475568b995bafaffd81767d208d84d7bb1"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-win32.whl", hash = "sha256:5556e306713e2522e460287615d26c0af0fe5ed9d4f431dad35c6624c5d277e9"},
|
|
||||||
{file = "pymongo-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:b10d8cda9fc2fcdcfa4a000aa10413a2bf8b575852cd07cb8a595ed09689ca98"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b435b13bb8e36be11b75f7384a34eefe487fe87a6267172964628e2b14ecf0a7"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e438417ce1dc5b758742e12661d800482200b042d03512a8f31f6aaa9137ad40"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b47ebd89e69fbf33d1c2df79759d7162fc80c7652dacfec136dae1c9b3afac7"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbed8cccebe1169d45cedf00461b2842652d476d2897fd1c42cf41b635d88746"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30a9e06041fbd7a7590693ec5e407aa8737ad91912a1e70176aff92e5c99d20"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8729dbf25eb32ad0dc0b9bd5e6a0d0b7e5c2dc8ec06ad171088e1896b522a74"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-win32.whl", hash = "sha256:3177f783ae7e08aaf7b2802e0df4e4b13903520e8380915e6337cdc7a6ff01d8"},
|
|
||||||
{file = "pymongo-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:00c199e1c593e2c8b033136d7a08f0c376452bac8a896c923fcd6f419e07bdd2"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6dcc95f4bb9ed793714b43f4f23a7b0c57e4ef47414162297d6f650213512c19"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:13552ca505366df74e3e2f0a4f27c363928f3dff0eef9f281eb81af7f29bc3c5"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:77e0df59b1a4994ad30c6d746992ae887f9756a43fc25dec2db515d94cf0222d"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3a7f02a58a0c2912734105e05dedbee4f7507e6f1bd132ebad520be0b11d46fd"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:026a24a36394dc8930cbcb1d19d5eb35205ef3c838a7e619e04bd170713972e7"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:3b287e814a01deddb59b88549c1e0c87cefacd798d4afc0c8bd6042d1c3d48aa"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:9a710c184ba845afb05a6f876edac8f27783ba70e52d5eaf939f121fc13b2f59"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:30b2c9caf3e55c2e323565d1f3b7e7881ab87db16997dc0cbca7c52885ed2347"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff62ba8ff70f01ab4fe0ae36b2cb0b5d1f42e73dfc81ddf0758cd9f77331ad25"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:547dc5d7f834b1deefda51aedb11a7af9c51c45e689e44e14aa85d44147c7657"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1de3c6faf948f3edd4e738abdb4b76572b4f4fdfc1fed4dad02427e70c5a6219"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2831e05ce0a4df10c4ac5399ef50b9a621f90894c2a4d2945dc5658765514ed"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:144a31391a39a390efce0c5ebcaf4bf112114af4384c90163f402cec5ede476b"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33bb16a07d3cc4e0aea37b242097cd5f7a156312012455c2fa8ca396953b11c4"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b7b1a83ce514700276a46af3d9e481ec381f05b64939effc9065afe18456a6b9"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:3071ec998cc3d7b4944377e5f1217c2c44b811fae16f9a495c7a1ce9b42fb038"},
|
|
||||||
{file = "pymongo-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2346450a075625c4d6166b40a013b605a38b6b6168ce2232b192a37fb200d588"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:061598cbc6abe2f382ab64c9caa83faa2f4c51256f732cdd890bcc6e63bfb67e"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d483793a384c550c2d12cb794ede294d303b42beff75f3b3081f57196660edaf"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f9756f1d25454ba6a3c2f1ef8b7ddec23e5cdeae3dc3c3377243ae37a383db00"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:1ed23b0e2dac6f84f44c8494fbceefe6eb5c35db5c1099f56ab78fc0d94ab3af"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:3d18a9b9b858ee140c15c5bfcb3e66e47e2a70a03272c2e72adda2482f76a6ad"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:c258dbacfff1224f13576147df16ce3c02024a0d792fd0323ac01bed5d3c545d"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:f7acc03a4f1154ba2643edeb13658d08598fe6e490c3dd96a241b94f09801626"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:76013fef1c9cd1cd00d55efde516c154aa169f2bf059b197c263a255ba8a9ddf"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f0e6a6c807fa887a0c51cc24fe7ea51bb9e496fe88f00d7930063372c3664c3"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd1fa413f8b9ba30140de198e4f408ffbba6396864c7554e0867aa7363eb58b2"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d219b4508f71d762368caec1fc180960569766049bbc4d38174f05e8ef2fe5b"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27b81ecf18031998ad7db53b960d1347f8f29e8b7cb5ea7b4394726468e4295e"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56816e43c92c2fa8c11dc2a686f0ca248bea7902f4a067fa6cbc77853b0f041e"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef801027629c5b511cf2ba13b9be29bfee36ae834b2d95d9877818479cdc99ea"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d4c2be9760b112b1caf649b4977b81b69893d75aa86caf4f0f398447be871f3c"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-win32.whl", hash = "sha256:39d77d8bbb392fa443831e6d4ae534237b1f4eee6aa186f0cdb4e334ba89536e"},
|
|
||||||
{file = "pymongo-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:4497d49d785482cc1a44a0ddf8830b036a468c088e72a05217f5b60a9e025012"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:69247f7a2835fc0984bbf0892e6022e9a36aec70e187fcfe6cae6a373eb8c4de"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7bb0e9049e81def6829d09558ad12d16d0454c26cabe6efc3658e544460688d9"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6a1810c2cbde714decf40f811d1edc0dae45506eb37298fd9d4247b8801509fe"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e2aced6fb2f5261b47d267cb40060b73b6527e64afe54f6497844c9affed5fd0"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d0355cff58a4ed6d5e5f6b9c3693f52de0784aa0c17119394e2a8e376ce489d4"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:3c74f4725485f0a7a3862cfd374cc1b740cebe4c133e0c1425984bcdcce0f4bb"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:9c79d597fb3a7c93d7c26924db7497eba06d58f88f58e586aa69b2ad89fee0f8"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8ec75f35f62571a43e31e7bd11749d974c1b5cd5ea4a8388725d579263c0fdf6"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e641f931c5cd95b376fd3c59db52770e17bec2bf86ef16cc83b3906c054845"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9aafd036f6f2e5ad109aec92f8dbfcbe76cff16bad683eb6dd18013739c0b3ae"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f2b856518bfcfa316c8dae3d7b412aecacf2e8ba30b149f5eb3b63128d703b9"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec31adc2e988fd7db3ab509954791bbc5a452a03c85e45b804b4bfc31fa221d"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9167e735379ec43d8eafa3fd675bfbb12e2c0464f98960586e9447d2cf2c7a83"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1461199b07903fc1424709efafe379205bf5f738144b1a50a08b0396357b5abf"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3094c7d2f820eecabadae76bfec02669567bbdd1730eabce10a5764778564f7b"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-win32.whl", hash = "sha256:c91ea3915425bd4111cb1b74511cdc56d1d16a683a48bf2a5a96b6a6c0f297f7"},
|
|
||||||
{file = "pymongo-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:ef102a67ede70e1721fe27f75073b5314911dbb9bc27cde0a1c402a11531e7bd"},
|
|
||||||
{file = "pymongo-4.6.1.tar.gz", hash = "sha256:31dab1f3e1d0cdd57e8df01b645f52d43cc1b653ed3afd535d2891f4fc4f9712"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
dnspython = ">=1.16.0,<3.0.0"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
aws = ["pymongo-auth-aws (<2.0.0)"]
|
|
||||||
encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"]
|
|
||||||
gssapi = ["pykerberos", "winkerberos (>=0.5.0)"]
|
|
||||||
ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
|
||||||
snappy = ["python-snappy"]
|
|
||||||
test = ["pytest (>=7)"]
|
|
||||||
zstd = ["zstandard"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyyaml"
|
|
||||||
version = "6.0.1"
|
|
||||||
description = "YAML parser and emitter for Python"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.6"
|
|
||||||
files = [
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
|
|
||||||
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
|
|
||||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
|
||||||
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
|
|
||||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
|
|
||||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
|
|
||||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
|
|
||||||
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
|
|
||||||
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
|
|
||||||
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
|
|
||||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
|
|
||||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
|
|
||||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
|
|
||||||
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
|
|
||||||
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
|
|
||||||
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
|
|
||||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
|
|
||||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
|
|
||||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
|
|
||||||
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
|
|
||||||
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
|
|
||||||
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
|
|
||||||
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "requests"
|
|
||||||
version = "2.31.0"
|
|
||||||
description = "Python HTTP for Humans."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
|
||||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
certifi = ">=2017.4.17"
|
|
||||||
charset-normalizer = ">=2,<4"
|
|
||||||
idna = ">=2.5,<4"
|
|
||||||
urllib3 = ">=1.21.1,<3"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
|
||||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ruff"
|
|
||||||
version = "0.1.14"
|
|
||||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"},
|
|
||||||
{file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"},
|
|
||||||
{file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "setuptools"
|
|
||||||
version = "69.0.3"
|
|
||||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"},
|
|
||||||
{file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
|
||||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
|
||||||
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tabulate"
|
|
||||||
version = "0.9.0"
|
|
||||||
description = "Pretty-print tabular data"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
|
|
||||||
{file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
widechars = ["wcwidth"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "urllib3"
|
|
||||||
version = "2.1.0"
|
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
files = [
|
|
||||||
{file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"},
|
|
||||||
{file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
|
||||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
|
||||||
zstd = ["zstandard (>=0.18.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "virtualenv"
|
|
||||||
version = "20.25.0"
|
|
||||||
description = "Virtual Python Environment builder"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
files = [
|
|
||||||
{file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"},
|
|
||||||
{file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
distlib = ">=0.3.7,<1"
|
|
||||||
filelock = ">=3.12.2,<4"
|
|
||||||
platformdirs = ">=3.9.1,<5"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
|
||||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "win32-setctime"
|
|
||||||
version = "1.1.0"
|
|
||||||
description = "A small Python utility to set file creation time on Windows"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.5"
|
|
||||||
files = [
|
|
||||||
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
|
|
||||||
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
lock-version = "2.0"
|
|
||||||
python-versions = "^3.12"
|
|
||||||
content-hash = "5dd54a205612068bfd2126aca1a10a6ff43f491b445021d485b9de513a06e163"
|
|
||||||
104
pyproject.toml
104
pyproject.toml
@@ -1,11 +1,20 @@
|
|||||||
[tool.poetry]
|
[project]
|
||||||
name = "leggen"
|
name = "leggen"
|
||||||
version = "0.1.1"
|
version = "2025.9.0"
|
||||||
description = "An Open Banking CLI"
|
description = "An Open Banking CLI"
|
||||||
authors = ["Elisiário Couto <elisiario@couto.io>"]
|
authors = [{ name = "Elisiário Couto", email = "elisiario@couto.io" }]
|
||||||
|
requires-python = "~=3.13.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/elisiariocouto/leggen"
|
keywords = [
|
||||||
|
"openbanking",
|
||||||
|
"cli",
|
||||||
|
"psd2",
|
||||||
|
"gocardless",
|
||||||
|
"bank",
|
||||||
|
"transactions",
|
||||||
|
"finance",
|
||||||
|
]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Development Status :: 3 - Alpha",
|
"Development Status :: 3 - Alpha",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
@@ -14,39 +23,70 @@ classifiers = [
|
|||||||
"Topic :: Utilities",
|
"Topic :: Utilities",
|
||||||
"Topic :: Office/Business :: Financial",
|
"Topic :: Office/Business :: Financial",
|
||||||
]
|
]
|
||||||
keywords = [
|
dependencies = [
|
||||||
"openbanking",
|
"click>=8.1.7,<9",
|
||||||
"cli",
|
"requests>=2.31.0,<3",
|
||||||
"psd2",
|
"loguru>=0.7.2,<0.8",
|
||||||
"gocardless",
|
"tabulate>=0.9.0,<0.10",
|
||||||
"mongodb",
|
"discord-webhook>=1.3.1,<2",
|
||||||
"bank",
|
"fastapi>=0.104.0,<1",
|
||||||
"transactions",
|
"uvicorn[standard]>=0.24.0,<1",
|
||||||
"finance",
|
"apscheduler>=3.10.0,<4",
|
||||||
|
"tomli-w>=1.0.0,<2",
|
||||||
|
"httpx>=0.28.1",
|
||||||
]
|
]
|
||||||
|
|
||||||
packages = [{ "include" = "leggen" }]
|
[project.urls]
|
||||||
|
Repository = "https://github.com/elisiariocouto/leggen"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[project.scripts]
|
||||||
python = "^3.12"
|
|
||||||
click = "^8.1.7"
|
|
||||||
requests = "^2.31.0"
|
|
||||||
loguru = "^0.7.2"
|
|
||||||
tabulate = "^0.9.0"
|
|
||||||
pymongo = "^4.6.1"
|
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
ruff = "^0.1.14"
|
|
||||||
pre-commit = "^3.6.0"
|
|
||||||
black = "^24.1.1"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
|
||||||
leggen = "leggen.main:cli"
|
leggen = "leggen.main:cli"
|
||||||
|
leggend = "leggend.main:main"
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
"ruff>=0.6.1",
|
||||||
|
"pre-commit>=3.6.0",
|
||||||
|
"pytest>=8.0.0",
|
||||||
|
"pytest-asyncio>=0.23.0",
|
||||||
|
"pytest-mock>=3.12.0",
|
||||||
|
"respx>=0.21.0",
|
||||||
|
"requests-mock>=1.12.0",
|
||||||
|
"mypy>=1.17.1",
|
||||||
|
"types-tabulate>=0.9.0.20241207",
|
||||||
|
"types-requests>=2.32.4.20250809",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = ["leggen", "leggend"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
include = ["leggen", "leggend"]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["hatchling"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
ignore = ["E501", "B008", "B006"]
|
lint.ignore = ["E501", "B008", "B006"]
|
||||||
extend-select = ["B", "C4", "PIE", "T20", "SIM", "TCH"]
|
lint.extend-select = ["B", "C4", "PIE", "T20", "SIM", "TCH"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
testpaths = ["tests"]
|
||||||
|
python_files = "test_*.py"
|
||||||
|
python_classes = "Test*"
|
||||||
|
python_functions = "test_*"
|
||||||
|
addopts = ["-v", "--tb=short", "--strict-markers", "--disable-warnings"]
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
|
markers = [
|
||||||
|
"unit: Unit tests",
|
||||||
|
"integration: Integration tests",
|
||||||
|
"slow: Slow running tests",
|
||||||
|
"api: API endpoint tests",
|
||||||
|
"cli: CLI command tests",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
module = ["apscheduler.*"]
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -ef -o pipefail
|
set -ef -o pipefail
|
||||||
|
|
||||||
@@ -11,25 +11,39 @@ function check_command {
|
|||||||
|
|
||||||
check_command git
|
check_command git
|
||||||
check_command git-cliff
|
check_command git-cliff
|
||||||
check_command poetry
|
check_command uv
|
||||||
|
|
||||||
if [ -z "$1" ]; then
|
# Get current date components
|
||||||
echo " > No semver verb specified, run release with <major|minor|patch> parameter."
|
YEAR=$(date +%Y)
|
||||||
exit 1
|
MONTH=$(date +%-m) # %-m removes zero padding
|
||||||
|
|
||||||
|
# Get the latest version for current year and month
|
||||||
|
LATEST_TAG=$(git tag -l "${YEAR}.${MONTH}.*" | sort -V | tail -n 1)
|
||||||
|
|
||||||
|
if [ -z "$LATEST_TAG" ]; then
|
||||||
|
# No version for current year/month exists, start at 0
|
||||||
|
MICRO=0
|
||||||
|
else
|
||||||
|
# Extract micro version and increment
|
||||||
|
MICRO=$(echo "$LATEST_TAG" | cut -d. -f3)
|
||||||
|
MICRO=$((MICRO + 1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
CURRENT_VERSION=$(poetry version -s)
|
NEXT_VERSION="${YEAR}.${MONTH}.${MICRO}"
|
||||||
|
CURRENT_VERSION=$(uv version --short)
|
||||||
|
|
||||||
echo " > Current version is $CURRENT_VERSION"
|
echo " > Current version is $CURRENT_VERSION"
|
||||||
|
echo " > Setting new version to $NEXT_VERSION"
|
||||||
|
|
||||||
poetry version "$1"
|
# Manually update version in pyproject.toml
|
||||||
NEXT_VERSION=$(poetry version -s)
|
sed -i '' "s/^version = .*/version = \"${NEXT_VERSION}\"/" pyproject.toml
|
||||||
|
|
||||||
echo " > leggen bumped to $NEXT_VERSION"
|
echo " > Version bumped to $NEXT_VERSION"
|
||||||
echo "Updating CHANGELOG.md"
|
echo "Updating CHANGELOG.md"
|
||||||
git-cliff --unreleased --tag "$NEXT_VERSION" --prepend CHANGELOG.md > /dev/null
|
git-cliff --unreleased --tag "$NEXT_VERSION" --prepend CHANGELOG.md > /dev/null
|
||||||
|
|
||||||
echo " > Commiting changes and adding git tag"
|
echo " > Commiting changes and adding git tag"
|
||||||
git add pyproject.toml CHANGELOG.md
|
git add pyproject.toml CHANGELOG.md uv.lock
|
||||||
git commit -m "chore(ci): Bump version to $NEXT_VERSION"
|
git commit -m "chore(ci): Bump version to $NEXT_VERSION"
|
||||||
git tag -a "$NEXT_VERSION" -m "$NEXT_VERSION"
|
git tag -a "$NEXT_VERSION" -m "$NEXT_VERSION"
|
||||||
|
|
||||||
|
|||||||
158
tests/conftest.py
Normal file
158
tests/conftest.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
"""Pytest configuration and shared fixtures."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import tempfile
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import patch
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from leggend.main import create_app
|
||||||
|
from leggend.config import Config
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_config_dir():
|
||||||
|
"""Create a temporary config directory for testing."""
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
config_dir = Path(tmpdir) / ".config" / "leggen"
|
||||||
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
yield config_dir
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_db_path():
|
||||||
|
"""Create a temporary database file for testing."""
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp_file:
|
||||||
|
db_path = Path(tmp_file.name)
|
||||||
|
yield db_path
|
||||||
|
# Clean up the temporary database file after test
|
||||||
|
if db_path.exists():
|
||||||
|
db_path.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_config(temp_config_dir, temp_db_path):
|
||||||
|
"""Mock configuration for testing."""
|
||||||
|
config_data = {
|
||||||
|
"gocardless": {
|
||||||
|
"key": "test-key",
|
||||||
|
"secret": "test-secret",
|
||||||
|
"url": "https://bankaccountdata.gocardless.com/api/v2",
|
||||||
|
},
|
||||||
|
"database": {"sqlite": True},
|
||||||
|
"scheduler": {"sync": {"enabled": True, "hour": 3, "minute": 0}},
|
||||||
|
}
|
||||||
|
|
||||||
|
config_file = temp_config_dir / "config.toml"
|
||||||
|
with open(config_file, "wb") as f:
|
||||||
|
import tomli_w
|
||||||
|
|
||||||
|
tomli_w.dump(config_data, f)
|
||||||
|
|
||||||
|
# Mock the config path
|
||||||
|
with patch.object(Config, "load_config") as mock_load:
|
||||||
|
mock_load.return_value = config_data
|
||||||
|
config = Config()
|
||||||
|
config._config = config_data
|
||||||
|
config._config_path = str(config_file)
|
||||||
|
yield config
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_auth_token(temp_config_dir):
|
||||||
|
"""Mock GoCardless authentication token."""
|
||||||
|
auth_data = {"access": "mock-access-token", "refresh": "mock-refresh-token"}
|
||||||
|
|
||||||
|
auth_file = temp_config_dir / "auth.json"
|
||||||
|
with open(auth_file, "w") as f:
|
||||||
|
json.dump(auth_data, f)
|
||||||
|
|
||||||
|
return auth_data
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def fastapi_app():
|
||||||
|
"""Create FastAPI test application."""
|
||||||
|
return create_app()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def api_client(fastapi_app):
|
||||||
|
"""Create FastAPI test client."""
|
||||||
|
return TestClient(fastapi_app)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_db_path(temp_db_path):
|
||||||
|
"""Mock the database path to use temporary database for testing."""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Create the expected directory structure
|
||||||
|
temp_home = temp_db_path.parent
|
||||||
|
config_dir = temp_home / ".config" / "leggen"
|
||||||
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create the expected database path
|
||||||
|
expected_db_path = config_dir / "leggen.db"
|
||||||
|
|
||||||
|
# Mock Path.home to return our temp directory
|
||||||
|
def mock_home():
|
||||||
|
return temp_home
|
||||||
|
|
||||||
|
# Patch Path.home in the main pathlib module
|
||||||
|
with patch.object(Path, "home", staticmethod(mock_home)):
|
||||||
|
yield expected_db_path
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_bank_data():
|
||||||
|
"""Sample bank/institution data for testing."""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": "REVOLUT_REVOLT21",
|
||||||
|
"name": "Revolut",
|
||||||
|
"bic": "REVOLT21",
|
||||||
|
"transaction_total_days": 90,
|
||||||
|
"countries": ["GB", "LT"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "BANCOBPI_BBPIPTPL",
|
||||||
|
"name": "Banco BPI",
|
||||||
|
"bic": "BBPIPTPL",
|
||||||
|
"transaction_total_days": 90,
|
||||||
|
"countries": ["PT"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_account_data():
|
||||||
|
"""Sample account data for testing."""
|
||||||
|
return {
|
||||||
|
"id": "test-account-123",
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"status": "READY",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"created": "2024-02-13T23:56:00Z",
|
||||||
|
"last_accessed": "2025-09-01T09:30:00Z",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_transaction_data():
|
||||||
|
"""Sample transaction data for testing."""
|
||||||
|
return {
|
||||||
|
"transactions": {
|
||||||
|
"booked": [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-123",
|
||||||
|
"bookingDate": "2025-09-01",
|
||||||
|
"valueDate": "2025-09-01",
|
||||||
|
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||||
|
"remittanceInformationUnstructured": "Coffee Shop Payment",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pending": [],
|
||||||
|
}
|
||||||
|
}
|
||||||
282
tests/unit/test_api_accounts.py
Normal file
282
tests/unit/test_api_accounts.py
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
"""Tests for accounts API endpoints."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.api
|
||||||
|
class TestAccountsAPI:
|
||||||
|
"""Test account-related API endpoints."""
|
||||||
|
|
||||||
|
def test_get_all_accounts_success(
|
||||||
|
self,
|
||||||
|
api_client,
|
||||||
|
mock_config,
|
||||||
|
mock_auth_token,
|
||||||
|
sample_account_data,
|
||||||
|
mock_db_path,
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of all accounts from database."""
|
||||||
|
mock_accounts = [
|
||||||
|
{
|
||||||
|
"id": "test-account-123",
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"status": "READY",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"created": "2024-02-13T23:56:00Z",
|
||||||
|
"last_accessed": "2025-09-01T09:30:00Z",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
mock_balances = [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 100.50,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "interimAvailable",
|
||||||
|
"timestamp": "2025-09-01T09:30:00Z",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_accounts_from_db",
|
||||||
|
return_value=mock_accounts,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||||
|
return_value=mock_balances,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/accounts")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 1
|
||||||
|
account = data["data"][0]
|
||||||
|
assert account["id"] == "test-account-123"
|
||||||
|
assert account["institution_id"] == "REVOLUT_REVOLT21"
|
||||||
|
assert len(account["balances"]) == 1
|
||||||
|
assert account["balances"][0]["amount"] == 100.50
|
||||||
|
|
||||||
|
def test_get_account_details_success(
|
||||||
|
self,
|
||||||
|
api_client,
|
||||||
|
mock_config,
|
||||||
|
mock_auth_token,
|
||||||
|
sample_account_data,
|
||||||
|
mock_db_path,
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of specific account details from database."""
|
||||||
|
mock_account = {
|
||||||
|
"id": "test-account-123",
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"status": "READY",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"created": "2024-02-13T23:56:00Z",
|
||||||
|
"last_accessed": "2025-09-01T09:30:00Z",
|
||||||
|
}
|
||||||
|
|
||||||
|
mock_balances = [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 250.75,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "interimAvailable",
|
||||||
|
"timestamp": "2025-09-01T09:30:00Z",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_account_details_from_db",
|
||||||
|
return_value=mock_account,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||||
|
return_value=mock_balances,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/accounts/test-account-123")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
account = data["data"]
|
||||||
|
assert account["id"] == "test-account-123"
|
||||||
|
assert account["iban"] == "LT313250081177977789"
|
||||||
|
assert len(account["balances"]) == 1
|
||||||
|
|
||||||
|
def test_get_account_balances_success(
|
||||||
|
self, api_client, mock_config, mock_auth_token, mock_db_path
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of account balances from database."""
|
||||||
|
mock_balances = [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 1000.00,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "interimAvailable",
|
||||||
|
"timestamp": "2025-09-01T10:00:00Z",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 950.00,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "expected",
|
||||||
|
"timestamp": "2025-09-01T10:00:00Z",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||||
|
return_value=mock_balances,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/accounts/test-account-123/balances")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 2
|
||||||
|
assert data["data"][0]["amount"] == 1000.00
|
||||||
|
assert data["data"][0]["currency"] == "EUR"
|
||||||
|
assert data["data"][0]["balance_type"] == "interimAvailable"
|
||||||
|
|
||||||
|
def test_get_account_transactions_success(
|
||||||
|
self,
|
||||||
|
api_client,
|
||||||
|
mock_config,
|
||||||
|
mock_auth_token,
|
||||||
|
sample_account_data,
|
||||||
|
sample_transaction_data,
|
||||||
|
mock_db_path,
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of account transactions from database."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-123",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": "2025-09-01T09:30:00Z",
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "data"},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||||
|
return_value=1,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get(
|
||||||
|
"/api/v1/accounts/test-account-123/transactions?summary_only=true"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 1
|
||||||
|
|
||||||
|
transaction = data["data"][0]
|
||||||
|
assert transaction["internal_transaction_id"] == "txn-123"
|
||||||
|
assert transaction["amount"] == -10.50
|
||||||
|
assert transaction["currency"] == "EUR"
|
||||||
|
assert transaction["description"] == "Coffee Shop Payment"
|
||||||
|
|
||||||
|
def test_get_account_transactions_full_details(
|
||||||
|
self,
|
||||||
|
api_client,
|
||||||
|
mock_config,
|
||||||
|
mock_auth_token,
|
||||||
|
sample_account_data,
|
||||||
|
sample_transaction_data,
|
||||||
|
mock_db_path,
|
||||||
|
):
|
||||||
|
"""Test retrieval of full transaction details from database."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-123",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": "2025-09-01T09:30:00Z",
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "raw_data"},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||||
|
return_value=1,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get(
|
||||||
|
"/api/v1/accounts/test-account-123/transactions?summary_only=false"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 1
|
||||||
|
|
||||||
|
transaction = data["data"][0]
|
||||||
|
assert transaction["internal_transaction_id"] == "txn-123"
|
||||||
|
assert transaction["institution_id"] == "REVOLUT_REVOLT21"
|
||||||
|
assert transaction["iban"] == "LT313250081177977789"
|
||||||
|
assert "raw_transaction" in transaction
|
||||||
|
|
||||||
|
def test_get_account_not_found(
|
||||||
|
self, api_client, mock_config, mock_auth_token, mock_db_path
|
||||||
|
):
|
||||||
|
"""Test handling of non-existent account."""
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.accounts.database_service.get_account_details_from_db",
|
||||||
|
return_value=None,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/accounts/nonexistent")
|
||||||
|
|
||||||
|
assert response.status_code == 404
|
||||||
163
tests/unit/test_api_banks.py
Normal file
163
tests/unit/test_api_banks.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
"""Tests for banks API endpoints."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import respx
|
||||||
|
import httpx
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.api
|
||||||
|
class TestBanksAPI:
|
||||||
|
"""Test bank-related API endpoints."""
|
||||||
|
|
||||||
|
@respx.mock
|
||||||
|
def test_get_institutions_success(
|
||||||
|
self, api_client, mock_config, mock_auth_token, sample_bank_data
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of bank institutions."""
|
||||||
|
# Mock GoCardless token creation/refresh
|
||||||
|
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||||
|
return_value=httpx.Response(
|
||||||
|
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock GoCardless institutions API
|
||||||
|
respx.get("https://bankaccountdata.gocardless.com/api/v2/institutions/").mock(
|
||||||
|
return_value=httpx.Response(200, json=sample_bank_data)
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("leggend.config.config", mock_config):
|
||||||
|
response = api_client.get("/api/v1/banks/institutions?country=PT")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 2
|
||||||
|
assert data["data"][0]["id"] == "REVOLUT_REVOLT21"
|
||||||
|
assert data["data"][1]["id"] == "BANCOBPI_BBPIPTPL"
|
||||||
|
|
||||||
|
@respx.mock
|
||||||
|
def test_get_institutions_invalid_country(self, api_client, mock_config):
|
||||||
|
"""Test institutions endpoint with invalid country code."""
|
||||||
|
# Mock GoCardless token creation
|
||||||
|
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||||
|
return_value=httpx.Response(
|
||||||
|
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock empty institutions response for invalid country
|
||||||
|
respx.get("https://bankaccountdata.gocardless.com/api/v2/institutions/").mock(
|
||||||
|
return_value=httpx.Response(200, json=[])
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("leggend.config.config", mock_config):
|
||||||
|
response = api_client.get("/api/v1/banks/institutions?country=XX")
|
||||||
|
|
||||||
|
# Should still work but return empty or filtered results
|
||||||
|
assert response.status_code in [200, 404]
|
||||||
|
|
||||||
|
@respx.mock
|
||||||
|
def test_connect_to_bank_success(self, api_client, mock_config, mock_auth_token):
|
||||||
|
"""Test successful bank connection creation."""
|
||||||
|
requisition_data = {
|
||||||
|
"id": "req-123",
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"status": "CR",
|
||||||
|
"created": "2025-09-02T00:00:00Z",
|
||||||
|
"link": "https://example.com/auth",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Mock GoCardless token creation
|
||||||
|
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||||
|
return_value=httpx.Response(
|
||||||
|
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock GoCardless requisitions API
|
||||||
|
respx.post("https://bankaccountdata.gocardless.com/api/v2/requisitions/").mock(
|
||||||
|
return_value=httpx.Response(200, json=requisition_data)
|
||||||
|
)
|
||||||
|
|
||||||
|
request_data = {
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"redirect_url": "http://localhost:8000/",
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch("leggend.config.config", mock_config):
|
||||||
|
response = api_client.post("/api/v1/banks/connect", json=request_data)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert data["data"]["id"] == "req-123"
|
||||||
|
assert data["data"]["institution_id"] == "REVOLUT_REVOLT21"
|
||||||
|
|
||||||
|
@respx.mock
|
||||||
|
def test_get_bank_status_success(self, api_client, mock_config, mock_auth_token):
|
||||||
|
"""Test successful retrieval of bank connection status."""
|
||||||
|
requisitions_data = {
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": "req-123",
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"status": "LN",
|
||||||
|
"created": "2025-09-02T00:00:00Z",
|
||||||
|
"accounts": ["acc-123"],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Mock GoCardless token creation
|
||||||
|
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||||
|
return_value=httpx.Response(
|
||||||
|
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock GoCardless requisitions API
|
||||||
|
respx.get("https://bankaccountdata.gocardless.com/api/v2/requisitions/").mock(
|
||||||
|
return_value=httpx.Response(200, json=requisitions_data)
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("leggend.config.config", mock_config):
|
||||||
|
response = api_client.get("/api/v1/banks/status")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 1
|
||||||
|
assert data["data"][0]["bank_id"] == "REVOLUT_REVOLT21"
|
||||||
|
assert data["data"][0]["status_display"] == "LINKED"
|
||||||
|
|
||||||
|
def test_get_supported_countries(self, api_client):
|
||||||
|
"""Test supported countries endpoint."""
|
||||||
|
response = api_client.get("/api/v1/banks/countries")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) > 0
|
||||||
|
|
||||||
|
# Check some expected countries
|
||||||
|
country_codes = [country["code"] for country in data["data"]]
|
||||||
|
assert "PT" in country_codes
|
||||||
|
assert "GB" in country_codes
|
||||||
|
assert "DE" in country_codes
|
||||||
|
|
||||||
|
@respx.mock
|
||||||
|
def test_authentication_failure(self, api_client, mock_config):
|
||||||
|
"""Test handling of authentication failures."""
|
||||||
|
# Mock token creation failure
|
||||||
|
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||||
|
return_value=httpx.Response(401, json={"detail": "Invalid credentials"})
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("leggend.config.config", mock_config):
|
||||||
|
response = api_client.get("/api/v1/banks/institutions")
|
||||||
|
|
||||||
|
assert response.status_code == 500
|
||||||
|
data = response.json()
|
||||||
|
assert "Failed to get institutions" in data["detail"]
|
||||||
155
tests/unit/test_api_client.py
Normal file
155
tests/unit/test_api_client.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
"""Tests for CLI API client."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
import requests_mock
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from leggen.api_client import LeggendAPIClient
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.cli
|
||||||
|
class TestLeggendAPIClient:
|
||||||
|
"""Test the CLI API client."""
|
||||||
|
|
||||||
|
def test_health_check_success(self):
|
||||||
|
"""Test successful health check."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.get("http://localhost:8000/health", json={"status": "healthy"})
|
||||||
|
|
||||||
|
result = client.health_check()
|
||||||
|
assert result is True
|
||||||
|
|
||||||
|
def test_health_check_failure(self):
|
||||||
|
"""Test health check failure."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.get("http://localhost:8000/health", status_code=500)
|
||||||
|
|
||||||
|
result = client.health_check()
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
def test_get_institutions_success(self, sample_bank_data):
|
||||||
|
"""Test getting institutions via API client."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
api_response = {
|
||||||
|
"success": True,
|
||||||
|
"data": sample_bank_data,
|
||||||
|
"message": "Found 2 institutions for PT",
|
||||||
|
}
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.get("http://localhost:8000/api/v1/banks/institutions", json=api_response)
|
||||||
|
|
||||||
|
result = client.get_institutions("PT")
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["id"] == "REVOLUT_REVOLT21"
|
||||||
|
|
||||||
|
def test_get_accounts_success(self, sample_account_data):
|
||||||
|
"""Test getting accounts via API client."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
api_response = {
|
||||||
|
"success": True,
|
||||||
|
"data": [sample_account_data],
|
||||||
|
"message": "Retrieved 1 accounts",
|
||||||
|
}
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.get("http://localhost:8000/api/v1/accounts", json=api_response)
|
||||||
|
|
||||||
|
result = client.get_accounts()
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["id"] == "test-account-123"
|
||||||
|
|
||||||
|
def test_trigger_sync_success(self):
|
||||||
|
"""Test triggering sync via API client."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
api_response = {
|
||||||
|
"success": True,
|
||||||
|
"data": {"sync_started": True, "force": False},
|
||||||
|
"message": "Started sync for all accounts",
|
||||||
|
}
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.post("http://localhost:8000/api/v1/sync", json=api_response)
|
||||||
|
|
||||||
|
result = client.trigger_sync()
|
||||||
|
assert result["sync_started"] is True
|
||||||
|
|
||||||
|
def test_connection_error_handling(self):
|
||||||
|
"""Test handling of connection errors."""
|
||||||
|
client = LeggendAPIClient("http://localhost:9999") # Non-existent service
|
||||||
|
|
||||||
|
with pytest.raises((requests.ConnectionError, requests.RequestException)):
|
||||||
|
client.get_accounts()
|
||||||
|
|
||||||
|
def test_http_error_handling(self):
|
||||||
|
"""Test handling of HTTP errors."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.get(
|
||||||
|
"http://localhost:8000/api/v1/accounts",
|
||||||
|
status_code=500,
|
||||||
|
json={"detail": "Internal server error"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises((requests.HTTPError, requests.RequestException)):
|
||||||
|
client.get_accounts()
|
||||||
|
|
||||||
|
def test_custom_api_url(self):
|
||||||
|
"""Test using custom API URL."""
|
||||||
|
custom_url = "http://custom-host:9000"
|
||||||
|
client = LeggendAPIClient(custom_url)
|
||||||
|
|
||||||
|
assert client.base_url == custom_url
|
||||||
|
|
||||||
|
def test_environment_variable_url(self):
|
||||||
|
"""Test using environment variable for API URL."""
|
||||||
|
with patch.dict("os.environ", {"LEGGEND_API_URL": "http://env-host:7000"}):
|
||||||
|
client = LeggendAPIClient()
|
||||||
|
assert client.base_url == "http://env-host:7000"
|
||||||
|
|
||||||
|
def test_sync_with_options(self):
|
||||||
|
"""Test sync with various options."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
api_response = {
|
||||||
|
"success": True,
|
||||||
|
"data": {"sync_started": True, "force": True},
|
||||||
|
"message": "Started sync for 2 specific accounts",
|
||||||
|
}
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.post("http://localhost:8000/api/v1/sync", json=api_response)
|
||||||
|
|
||||||
|
result = client.trigger_sync(account_ids=["acc1", "acc2"], force=True)
|
||||||
|
assert result["sync_started"] is True
|
||||||
|
assert result["force"] is True
|
||||||
|
|
||||||
|
def test_get_scheduler_config(self):
|
||||||
|
"""Test getting scheduler configuration."""
|
||||||
|
client = LeggendAPIClient("http://localhost:8000")
|
||||||
|
|
||||||
|
api_response = {
|
||||||
|
"success": True,
|
||||||
|
"data": {
|
||||||
|
"enabled": True,
|
||||||
|
"hour": 3,
|
||||||
|
"minute": 0,
|
||||||
|
"next_scheduled_sync": "2025-09-03T03:00:00Z",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.get("http://localhost:8000/api/v1/sync/scheduler", json=api_response)
|
||||||
|
|
||||||
|
result = client.get_scheduler_config()
|
||||||
|
assert result["enabled"] is True
|
||||||
|
assert result["hour"] == 3
|
||||||
370
tests/unit/test_api_transactions.py
Normal file
370
tests/unit/test_api_transactions.py
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
"""Tests for transactions API endpoints."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.api
|
||||||
|
class TestTransactionsAPI:
|
||||||
|
"""Test transaction-related API endpoints."""
|
||||||
|
|
||||||
|
def test_get_all_transactions_success(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of all transactions from database."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "data"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-002",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||||
|
"description": "Grocery Store",
|
||||||
|
"transactionValue": -45.30,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"other": "data"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||||
|
return_value=2,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions?summary_only=true")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 2
|
||||||
|
|
||||||
|
# Check first transaction summary
|
||||||
|
transaction = data["data"][0]
|
||||||
|
assert transaction["internal_transaction_id"] == "txn-001"
|
||||||
|
assert transaction["amount"] == -10.50
|
||||||
|
assert transaction["currency"] == "EUR"
|
||||||
|
assert transaction["description"] == "Coffee Shop Payment"
|
||||||
|
assert transaction["status"] == "booked"
|
||||||
|
assert transaction["account_id"] == "test-account-123"
|
||||||
|
|
||||||
|
def test_get_all_transactions_full_details(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test retrieval of full transaction details from database."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "raw_data"},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||||
|
return_value=1,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions?summary_only=false")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 1
|
||||||
|
|
||||||
|
transaction = data["data"][0]
|
||||||
|
assert transaction["internal_transaction_id"] == "txn-001"
|
||||||
|
assert transaction["institution_id"] == "REVOLUT_REVOLT21"
|
||||||
|
assert transaction["iban"] == "LT313250081177977789"
|
||||||
|
assert "raw_transaction" in transaction
|
||||||
|
|
||||||
|
def test_get_transactions_with_filters(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test getting transactions with various filters."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "data"},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
) as mock_get_transactions,
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||||
|
return_value=1,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get(
|
||||||
|
"/api/v1/transactions?"
|
||||||
|
"account_id=test-account-123&"
|
||||||
|
"date_from=2025-09-01&"
|
||||||
|
"date_to=2025-09-02&"
|
||||||
|
"min_amount=-50.0&"
|
||||||
|
"max_amount=0.0&"
|
||||||
|
"search=Coffee&"
|
||||||
|
"limit=10&"
|
||||||
|
"offset=5"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
|
||||||
|
# Verify the database service was called with correct filters
|
||||||
|
mock_get_transactions.assert_called_once_with(
|
||||||
|
account_id="test-account-123",
|
||||||
|
limit=10,
|
||||||
|
offset=5,
|
||||||
|
date_from="2025-09-01",
|
||||||
|
date_to="2025-09-02",
|
||||||
|
min_amount=-50.0,
|
||||||
|
max_amount=0.0,
|
||||||
|
search="Coffee",
|
||||||
|
hide_missing_ids=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_transactions_empty_result(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test getting transactions when database returns empty result."""
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=[],
|
||||||
|
),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||||
|
return_value=0,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
assert len(data["data"]) == 0
|
||||||
|
assert "0 transactions" in data["message"]
|
||||||
|
|
||||||
|
def test_get_transactions_database_error(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test handling database error when getting transactions."""
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
side_effect=Exception("Database connection failed"),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions")
|
||||||
|
|
||||||
|
assert response.status_code == 500
|
||||||
|
assert "Failed to get transactions" in response.json()["detail"]
|
||||||
|
|
||||||
|
def test_get_transaction_stats_success(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of transaction statistics from database."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-002",
|
||||||
|
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||||
|
"transactionValue": 100.00,
|
||||||
|
"transactionStatus": "pending",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-003",
|
||||||
|
"transactionDate": datetime(2025, 9, 3, 16, 45),
|
||||||
|
"transactionValue": -25.30,
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "other-account-456",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions/stats?days=30")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
|
||||||
|
stats = data["data"]
|
||||||
|
assert stats["period_days"] == 30
|
||||||
|
assert stats["total_transactions"] == 3
|
||||||
|
assert stats["booked_transactions"] == 2
|
||||||
|
assert stats["pending_transactions"] == 1
|
||||||
|
assert stats["total_income"] == 100.00
|
||||||
|
assert stats["total_expenses"] == 35.80 # abs(-10.50) + abs(-25.30)
|
||||||
|
assert stats["net_change"] == 64.20 # 100.00 - 35.80
|
||||||
|
assert stats["accounts_included"] == 2 # Two unique account IDs
|
||||||
|
|
||||||
|
# Average transaction: ((-10.50) + 100.00 + (-25.30)) / 3 = 64.20 / 3 = 21.4
|
||||||
|
expected_avg = round(64.20 / 3, 2)
|
||||||
|
assert stats["average_transaction"] == expected_avg
|
||||||
|
|
||||||
|
def test_get_transaction_stats_with_account_filter(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test getting transaction stats filtered by account."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
) as mock_get_transactions,
|
||||||
|
):
|
||||||
|
response = api_client.get(
|
||||||
|
"/api/v1/transactions/stats?account_id=test-account-123"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
# Verify the database service was called with account filter
|
||||||
|
mock_get_transactions.assert_called_once()
|
||||||
|
call_kwargs = mock_get_transactions.call_args.kwargs
|
||||||
|
assert call_kwargs["account_id"] == "test-account-123"
|
||||||
|
|
||||||
|
def test_get_transaction_stats_empty_result(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test getting stats when no transactions match criteria."""
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=[],
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions/stats")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["success"] is True
|
||||||
|
|
||||||
|
stats = data["data"]
|
||||||
|
assert stats["total_transactions"] == 0
|
||||||
|
assert stats["total_income"] == 0.0
|
||||||
|
assert stats["total_expenses"] == 0.0
|
||||||
|
assert stats["net_change"] == 0.0
|
||||||
|
assert stats["average_transaction"] == 0 # Division by zero handled
|
||||||
|
assert stats["accounts_included"] == 0
|
||||||
|
|
||||||
|
def test_get_transaction_stats_database_error(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test handling database error when getting stats."""
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
side_effect=Exception("Database connection failed"),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions/stats")
|
||||||
|
|
||||||
|
assert response.status_code == 500
|
||||||
|
assert "Failed to get transaction stats" in response.json()["detail"]
|
||||||
|
|
||||||
|
def test_get_transaction_stats_custom_period(
|
||||||
|
self, api_client, mock_config, mock_auth_token
|
||||||
|
):
|
||||||
|
"""Test getting transaction stats for custom time period."""
|
||||||
|
mock_transactions = [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("leggend.config.config", mock_config),
|
||||||
|
patch(
|
||||||
|
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||||
|
return_value=mock_transactions,
|
||||||
|
) as mock_get_transactions,
|
||||||
|
):
|
||||||
|
response = api_client.get("/api/v1/transactions/stats?days=7")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
data = response.json()
|
||||||
|
assert data["data"]["period_days"] == 7
|
||||||
|
|
||||||
|
# Verify the date range was calculated correctly for 7 days
|
||||||
|
mock_get_transactions.assert_called_once()
|
||||||
|
call_kwargs = mock_get_transactions.call_args.kwargs
|
||||||
|
assert "date_from" in call_kwargs
|
||||||
|
assert "date_to" in call_kwargs
|
||||||
202
tests/unit/test_config.py
Normal file
202
tests/unit/test_config.py
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
"""Tests for configuration management."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from leggend.config import Config
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
class TestConfig:
|
||||||
|
"""Test configuration management."""
|
||||||
|
|
||||||
|
def test_singleton_behavior(self):
|
||||||
|
"""Test that Config is a singleton."""
|
||||||
|
config1 = Config()
|
||||||
|
config2 = Config()
|
||||||
|
assert config1 is config2
|
||||||
|
|
||||||
|
def test_load_config_success(self, temp_config_dir):
|
||||||
|
"""Test successful configuration loading."""
|
||||||
|
config_data = {
|
||||||
|
"gocardless": {
|
||||||
|
"key": "test-key",
|
||||||
|
"secret": "test-secret",
|
||||||
|
"url": "https://test.example.com",
|
||||||
|
},
|
||||||
|
"database": {"sqlite": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
config_file = temp_config_dir / "config.toml"
|
||||||
|
with open(config_file, "wb") as f:
|
||||||
|
import tomli_w
|
||||||
|
|
||||||
|
tomli_w.dump(config_data, f)
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
# Reset singleton state for testing
|
||||||
|
config._config = None
|
||||||
|
config._config_path = None
|
||||||
|
|
||||||
|
result = config.load_config(str(config_file))
|
||||||
|
|
||||||
|
assert result == config_data
|
||||||
|
assert config.gocardless_config["key"] == "test-key"
|
||||||
|
assert config.database_config["sqlite"] is True
|
||||||
|
|
||||||
|
def test_load_config_file_not_found(self):
|
||||||
|
"""Test handling of missing configuration file."""
|
||||||
|
config = Config()
|
||||||
|
config._config = None # Reset for test
|
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError):
|
||||||
|
config.load_config("/nonexistent/config.toml")
|
||||||
|
|
||||||
|
def test_save_config_success(self, temp_config_dir):
|
||||||
|
"""Test successful configuration saving."""
|
||||||
|
config_data = {"gocardless": {"key": "new-key", "secret": "new-secret"}}
|
||||||
|
|
||||||
|
config_file = temp_config_dir / "new_config.toml"
|
||||||
|
config = Config()
|
||||||
|
config._config = None
|
||||||
|
|
||||||
|
config.save_config(config_data, str(config_file))
|
||||||
|
|
||||||
|
# Verify file was created and contains correct data
|
||||||
|
assert config_file.exists()
|
||||||
|
|
||||||
|
import tomllib
|
||||||
|
|
||||||
|
with open(config_file, "rb") as f:
|
||||||
|
saved_data = tomllib.load(f)
|
||||||
|
|
||||||
|
assert saved_data == config_data
|
||||||
|
|
||||||
|
def test_update_config_success(self, temp_config_dir):
|
||||||
|
"""Test updating configuration values."""
|
||||||
|
initial_config = {
|
||||||
|
"gocardless": {"key": "old-key"},
|
||||||
|
"database": {"sqlite": True},
|
||||||
|
}
|
||||||
|
|
||||||
|
config_file = temp_config_dir / "config.toml"
|
||||||
|
with open(config_file, "wb") as f:
|
||||||
|
import tomli_w
|
||||||
|
|
||||||
|
tomli_w.dump(initial_config, f)
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config._config = None
|
||||||
|
config.load_config(str(config_file))
|
||||||
|
|
||||||
|
config.update_config("gocardless", "key", "new-key")
|
||||||
|
|
||||||
|
assert config.gocardless_config["key"] == "new-key"
|
||||||
|
|
||||||
|
# Verify it was saved to file
|
||||||
|
import tomllib
|
||||||
|
|
||||||
|
with open(config_file, "rb") as f:
|
||||||
|
saved_data = tomllib.load(f)
|
||||||
|
assert saved_data["gocardless"]["key"] == "new-key"
|
||||||
|
|
||||||
|
def test_update_section_success(self, temp_config_dir):
|
||||||
|
"""Test updating entire configuration section."""
|
||||||
|
initial_config = {"database": {"sqlite": True}}
|
||||||
|
|
||||||
|
config_file = temp_config_dir / "config.toml"
|
||||||
|
with open(config_file, "wb") as f:
|
||||||
|
import tomli_w
|
||||||
|
|
||||||
|
tomli_w.dump(initial_config, f)
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config._config = None
|
||||||
|
config.load_config(str(config_file))
|
||||||
|
|
||||||
|
new_db_config = {"sqlite": False, "path": "./custom.db"}
|
||||||
|
config.update_section("database", new_db_config)
|
||||||
|
|
||||||
|
assert config.database_config == new_db_config
|
||||||
|
|
||||||
|
def test_scheduler_config_defaults(self):
|
||||||
|
"""Test scheduler configuration with defaults."""
|
||||||
|
config = Config()
|
||||||
|
config._config = {} # Empty config
|
||||||
|
|
||||||
|
scheduler_config = config.scheduler_config
|
||||||
|
|
||||||
|
assert scheduler_config["sync"]["enabled"] is True
|
||||||
|
assert scheduler_config["sync"]["hour"] == 3
|
||||||
|
assert scheduler_config["sync"]["minute"] == 0
|
||||||
|
assert scheduler_config["sync"]["cron"] is None
|
||||||
|
|
||||||
|
def test_scheduler_config_custom(self):
|
||||||
|
"""Test scheduler configuration with custom values."""
|
||||||
|
custom_config = {
|
||||||
|
"scheduler": {
|
||||||
|
"sync": {
|
||||||
|
"enabled": False,
|
||||||
|
"hour": 6,
|
||||||
|
"minute": 30,
|
||||||
|
"cron": "0 6 * * 1-5",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config._config = custom_config
|
||||||
|
|
||||||
|
scheduler_config = config.scheduler_config
|
||||||
|
|
||||||
|
assert scheduler_config["sync"]["enabled"] is False
|
||||||
|
assert scheduler_config["sync"]["hour"] == 6
|
||||||
|
assert scheduler_config["sync"]["minute"] == 30
|
||||||
|
assert scheduler_config["sync"]["cron"] == "0 6 * * 1-5"
|
||||||
|
|
||||||
|
def test_environment_variable_config_path(self):
|
||||||
|
"""Test using environment variable for config path."""
|
||||||
|
with patch.dict(
|
||||||
|
"os.environ", {"LEGGEN_CONFIG_FILE": "/custom/path/config.toml"}
|
||||||
|
):
|
||||||
|
config = Config()
|
||||||
|
config._config = None
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("builtins.open", side_effect=FileNotFoundError),
|
||||||
|
pytest.raises(FileNotFoundError),
|
||||||
|
):
|
||||||
|
config.load_config()
|
||||||
|
|
||||||
|
def test_notifications_config(self):
|
||||||
|
"""Test notifications configuration access."""
|
||||||
|
custom_config = {
|
||||||
|
"notifications": {
|
||||||
|
"discord": {"webhook": "https://discord.webhook", "enabled": True},
|
||||||
|
"telegram": {"token": "bot-token", "chat_id": 123},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config._config = custom_config
|
||||||
|
|
||||||
|
notifications = config.notifications_config
|
||||||
|
assert notifications["discord"]["webhook"] == "https://discord.webhook"
|
||||||
|
assert notifications["telegram"]["token"] == "bot-token"
|
||||||
|
|
||||||
|
def test_filters_config(self):
|
||||||
|
"""Test filters configuration access."""
|
||||||
|
custom_config = {
|
||||||
|
"filters": {
|
||||||
|
"case-insensitive": ["salary", "utility"],
|
||||||
|
"case-sensitive": ["SpecificStore"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config._config = custom_config
|
||||||
|
|
||||||
|
filters = config.filters_config
|
||||||
|
assert "salary" in filters["case-insensitive"]
|
||||||
|
assert "utility" in filters["case-insensitive"]
|
||||||
|
assert "SpecificStore" in filters["case-sensitive"]
|
||||||
438
tests/unit/test_database_service.py
Normal file
438
tests/unit/test_database_service.py
Normal file
@@ -0,0 +1,438 @@
|
|||||||
|
"""Tests for database service."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from leggend.services.database_service import DatabaseService
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def database_service():
|
||||||
|
"""Create a database service instance for testing."""
|
||||||
|
return DatabaseService()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_transactions_db_format():
|
||||||
|
"""Sample transactions in database format."""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "data"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-002",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||||
|
"description": "Grocery Store",
|
||||||
|
"transactionValue": -45.30,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"other": "data"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_balances_db_format():
|
||||||
|
"""Sample balances in database format."""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 1000.00,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "interimAvailable",
|
||||||
|
"timestamp": datetime(2025, 9, 1, 10, 0),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 950.00,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "expected",
|
||||||
|
"timestamp": datetime(2025, 9, 1, 10, 0),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
class TestDatabaseService:
|
||||||
|
"""Test database service operations."""
|
||||||
|
|
||||||
|
async def test_get_transactions_from_db_success(
|
||||||
|
self, database_service, sample_transactions_db_format
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of transactions from database."""
|
||||||
|
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||||
|
mock_get_transactions.return_value = sample_transactions_db_format
|
||||||
|
|
||||||
|
result = await database_service.get_transactions_from_db(
|
||||||
|
account_id="test-account-123", limit=10
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["internalTransactionId"] == "txn-001"
|
||||||
|
mock_get_transactions.assert_called_once_with(
|
||||||
|
account_id="test-account-123",
|
||||||
|
limit=10,
|
||||||
|
offset=0,
|
||||||
|
date_from=None,
|
||||||
|
date_to=None,
|
||||||
|
min_amount=None,
|
||||||
|
max_amount=None,
|
||||||
|
search=None,
|
||||||
|
hide_missing_ids=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_get_transactions_from_db_with_filters(
|
||||||
|
self, database_service, sample_transactions_db_format
|
||||||
|
):
|
||||||
|
"""Test retrieving transactions with filters."""
|
||||||
|
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||||
|
mock_get_transactions.return_value = sample_transactions_db_format
|
||||||
|
|
||||||
|
result = await database_service.get_transactions_from_db(
|
||||||
|
account_id="test-account-123",
|
||||||
|
limit=5,
|
||||||
|
offset=10,
|
||||||
|
date_from="2025-09-01",
|
||||||
|
date_to="2025-09-02",
|
||||||
|
min_amount=-50.0,
|
||||||
|
max_amount=0.0,
|
||||||
|
search="Coffee",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
mock_get_transactions.assert_called_once_with(
|
||||||
|
account_id="test-account-123",
|
||||||
|
limit=5,
|
||||||
|
offset=10,
|
||||||
|
date_from="2025-09-01",
|
||||||
|
date_to="2025-09-02",
|
||||||
|
min_amount=-50.0,
|
||||||
|
max_amount=0.0,
|
||||||
|
search="Coffee",
|
||||||
|
hide_missing_ids=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_get_transactions_from_db_sqlite_disabled(self, database_service):
|
||||||
|
"""Test getting transactions when SQLite is disabled."""
|
||||||
|
database_service.sqlite_enabled = False
|
||||||
|
|
||||||
|
result = await database_service.get_transactions_from_db()
|
||||||
|
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
async def test_get_transactions_from_db_error(self, database_service):
|
||||||
|
"""Test handling error when getting transactions."""
|
||||||
|
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||||
|
mock_get_transactions.side_effect = Exception("Database error")
|
||||||
|
|
||||||
|
result = await database_service.get_transactions_from_db()
|
||||||
|
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
async def test_get_transaction_count_from_db_success(self, database_service):
|
||||||
|
"""Test successful retrieval of transaction count."""
|
||||||
|
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||||
|
mock_get_count.return_value = 42
|
||||||
|
|
||||||
|
result = await database_service.get_transaction_count_from_db(
|
||||||
|
account_id="test-account-123"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == 42
|
||||||
|
mock_get_count.assert_called_once_with(
|
||||||
|
account_id="test-account-123", hide_missing_ids=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_get_transaction_count_from_db_with_filters(self, database_service):
|
||||||
|
"""Test getting transaction count with filters."""
|
||||||
|
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||||
|
mock_get_count.return_value = 15
|
||||||
|
|
||||||
|
result = await database_service.get_transaction_count_from_db(
|
||||||
|
account_id="test-account-123",
|
||||||
|
date_from="2025-09-01",
|
||||||
|
min_amount=-100.0,
|
||||||
|
search="Coffee",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == 15
|
||||||
|
mock_get_count.assert_called_once_with(
|
||||||
|
account_id="test-account-123",
|
||||||
|
date_from="2025-09-01",
|
||||||
|
min_amount=-100.0,
|
||||||
|
search="Coffee",
|
||||||
|
hide_missing_ids=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_get_transaction_count_from_db_sqlite_disabled(
|
||||||
|
self, database_service
|
||||||
|
):
|
||||||
|
"""Test getting count when SQLite is disabled."""
|
||||||
|
database_service.sqlite_enabled = False
|
||||||
|
|
||||||
|
result = await database_service.get_transaction_count_from_db()
|
||||||
|
|
||||||
|
assert result == 0
|
||||||
|
|
||||||
|
async def test_get_transaction_count_from_db_error(self, database_service):
|
||||||
|
"""Test handling error when getting count."""
|
||||||
|
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||||
|
mock_get_count.side_effect = Exception("Database error")
|
||||||
|
|
||||||
|
result = await database_service.get_transaction_count_from_db()
|
||||||
|
|
||||||
|
assert result == 0
|
||||||
|
|
||||||
|
async def test_get_balances_from_db_success(
|
||||||
|
self, database_service, sample_balances_db_format
|
||||||
|
):
|
||||||
|
"""Test successful retrieval of balances from database."""
|
||||||
|
with patch("leggen.database.sqlite.get_balances") as mock_get_balances:
|
||||||
|
mock_get_balances.return_value = sample_balances_db_format
|
||||||
|
|
||||||
|
result = await database_service.get_balances_from_db(
|
||||||
|
account_id="test-account-123"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
assert result[0]["account_id"] == "test-account-123"
|
||||||
|
assert result[0]["amount"] == 1000.00
|
||||||
|
mock_get_balances.assert_called_once_with(account_id="test-account-123")
|
||||||
|
|
||||||
|
async def test_get_balances_from_db_sqlite_disabled(self, database_service):
|
||||||
|
"""Test getting balances when SQLite is disabled."""
|
||||||
|
database_service.sqlite_enabled = False
|
||||||
|
|
||||||
|
result = await database_service.get_balances_from_db()
|
||||||
|
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
async def test_get_balances_from_db_error(self, database_service):
|
||||||
|
"""Test handling error when getting balances."""
|
||||||
|
with patch("leggen.database.sqlite.get_balances") as mock_get_balances:
|
||||||
|
mock_get_balances.side_effect = Exception("Database error")
|
||||||
|
|
||||||
|
result = await database_service.get_balances_from_db()
|
||||||
|
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
async def test_get_account_summary_from_db_success(self, database_service):
|
||||||
|
"""Test successful retrieval of account summary."""
|
||||||
|
mock_summary = {
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch("leggen.database.sqlite.get_account_summary") as mock_get_summary:
|
||||||
|
mock_get_summary.return_value = mock_summary
|
||||||
|
|
||||||
|
result = await database_service.get_account_summary_from_db(
|
||||||
|
"test-account-123"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == mock_summary
|
||||||
|
mock_get_summary.assert_called_once_with("test-account-123")
|
||||||
|
|
||||||
|
async def test_get_account_summary_from_db_sqlite_disabled(self, database_service):
|
||||||
|
"""Test getting summary when SQLite is disabled."""
|
||||||
|
database_service.sqlite_enabled = False
|
||||||
|
|
||||||
|
result = await database_service.get_account_summary_from_db("test-account-123")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
async def test_get_account_summary_from_db_error(self, database_service):
|
||||||
|
"""Test handling error when getting summary."""
|
||||||
|
with patch("leggen.database.sqlite.get_account_summary") as mock_get_summary:
|
||||||
|
mock_get_summary.side_effect = Exception("Database error")
|
||||||
|
|
||||||
|
result = await database_service.get_account_summary_from_db(
|
||||||
|
"test-account-123"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
async def test_persist_balance_sqlite_success(self, database_service):
|
||||||
|
"""Test successful balance persistence."""
|
||||||
|
balance_data = {
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"balances": [
|
||||||
|
{
|
||||||
|
"balanceAmount": {"amount": "1000.00", "currency": "EUR"},
|
||||||
|
"balanceType": "interimAvailable",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch("sqlite3.connect") as mock_connect:
|
||||||
|
mock_conn = mock_connect.return_value
|
||||||
|
mock_cursor = mock_conn.cursor.return_value
|
||||||
|
|
||||||
|
await database_service._persist_balance_sqlite(
|
||||||
|
"test-account-123", balance_data
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify database operations
|
||||||
|
mock_connect.assert_called()
|
||||||
|
mock_cursor.execute.assert_called() # Table creation and insert
|
||||||
|
mock_conn.commit.assert_called_once()
|
||||||
|
mock_conn.close.assert_called_once()
|
||||||
|
|
||||||
|
async def test_persist_balance_sqlite_error(self, database_service):
|
||||||
|
"""Test handling error during balance persistence."""
|
||||||
|
balance_data = {"balances": []}
|
||||||
|
|
||||||
|
with patch("sqlite3.connect") as mock_connect:
|
||||||
|
mock_connect.side_effect = Exception("Database error")
|
||||||
|
|
||||||
|
with pytest.raises(Exception, match="Database error"):
|
||||||
|
await database_service._persist_balance_sqlite(
|
||||||
|
"test-account-123", balance_data
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_persist_transactions_sqlite_success(
|
||||||
|
self, database_service, sample_transactions_db_format
|
||||||
|
):
|
||||||
|
"""Test successful transaction persistence."""
|
||||||
|
with patch("sqlite3.connect") as mock_connect:
|
||||||
|
mock_conn = mock_connect.return_value
|
||||||
|
mock_cursor = mock_conn.cursor.return_value
|
||||||
|
|
||||||
|
result = await database_service._persist_transactions_sqlite(
|
||||||
|
"test-account-123", sample_transactions_db_format
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should return the transactions (assuming no duplicates)
|
||||||
|
assert len(result) >= 0 # Could be empty if all are duplicates
|
||||||
|
|
||||||
|
# Verify database operations
|
||||||
|
mock_connect.assert_called()
|
||||||
|
mock_cursor.execute.assert_called()
|
||||||
|
mock_conn.commit.assert_called_once()
|
||||||
|
mock_conn.close.assert_called_once()
|
||||||
|
|
||||||
|
async def test_persist_transactions_sqlite_error(self, database_service):
|
||||||
|
"""Test handling error during transaction persistence."""
|
||||||
|
with patch("sqlite3.connect") as mock_connect:
|
||||||
|
mock_connect.side_effect = Exception("Database error")
|
||||||
|
|
||||||
|
with pytest.raises(Exception, match="Database error"):
|
||||||
|
await database_service._persist_transactions_sqlite(
|
||||||
|
"test-account-123", []
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_process_transactions_booked_and_pending(self, database_service):
|
||||||
|
"""Test processing transactions with both booked and pending."""
|
||||||
|
account_info = {
|
||||||
|
"institution_id": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
}
|
||||||
|
|
||||||
|
transaction_data = {
|
||||||
|
"transactions": {
|
||||||
|
"booked": [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"bookingDate": "2025-09-01",
|
||||||
|
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||||
|
"remittanceInformationUnstructured": "Coffee Shop",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pending": [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-002",
|
||||||
|
"bookingDate": "2025-09-02",
|
||||||
|
"transactionAmount": {"amount": "-25.00", "currency": "EUR"},
|
||||||
|
"remittanceInformationUnstructured": "Gas Station",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result = database_service.process_transactions(
|
||||||
|
"test-account-123", account_info, transaction_data
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
# Check booked transaction
|
||||||
|
booked_txn = next(t for t in result if t["transactionStatus"] == "booked")
|
||||||
|
assert booked_txn["internalTransactionId"] == "txn-001"
|
||||||
|
assert booked_txn["transactionValue"] == -10.50
|
||||||
|
assert booked_txn["description"] == "Coffee Shop"
|
||||||
|
|
||||||
|
# Check pending transaction
|
||||||
|
pending_txn = next(t for t in result if t["transactionStatus"] == "pending")
|
||||||
|
assert pending_txn["internalTransactionId"] == "txn-002"
|
||||||
|
assert pending_txn["transactionValue"] == -25.00
|
||||||
|
assert pending_txn["description"] == "Gas Station"
|
||||||
|
|
||||||
|
async def test_process_transactions_missing_date_error(self, database_service):
|
||||||
|
"""Test processing transaction with missing date raises error."""
|
||||||
|
account_info = {"institution_id": "TEST_BANK"}
|
||||||
|
|
||||||
|
transaction_data = {
|
||||||
|
"transactions": {
|
||||||
|
"booked": [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
# Missing both bookingDate and valueDate
|
||||||
|
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pending": [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with pytest.raises(ValueError, match="No valid date found in transaction"):
|
||||||
|
database_service.process_transactions(
|
||||||
|
"test-account-123", account_info, transaction_data
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_process_transactions_remittance_array(self, database_service):
|
||||||
|
"""Test processing transaction with remittance array."""
|
||||||
|
account_info = {"institution_id": "TEST_BANK"}
|
||||||
|
|
||||||
|
transaction_data = {
|
||||||
|
"transactions": {
|
||||||
|
"booked": [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"bookingDate": "2025-09-01",
|
||||||
|
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||||
|
"remittanceInformationUnstructuredArray": ["Line 1", "Line 2"],
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"pending": [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result = database_service.process_transactions(
|
||||||
|
"test-account-123", account_info, transaction_data
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result[0]["description"] == "Line 1,Line 2"
|
||||||
196
tests/unit/test_scheduler.py
Normal file
196
tests/unit/test_scheduler.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
"""Tests for background scheduler."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch, AsyncMock, MagicMock
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from leggend.background.scheduler import BackgroundScheduler
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.unit
|
||||||
|
class TestBackgroundScheduler:
|
||||||
|
"""Test background job scheduler."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_config(self):
|
||||||
|
"""Mock configuration for scheduler tests."""
|
||||||
|
return {"sync": {"enabled": True, "hour": 3, "minute": 0, "cron": None}}
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def scheduler(self):
|
||||||
|
"""Create scheduler instance for testing."""
|
||||||
|
with (
|
||||||
|
patch("leggend.background.scheduler.SyncService"),
|
||||||
|
patch("leggend.background.scheduler.config") as mock_config,
|
||||||
|
):
|
||||||
|
mock_config.scheduler_config = {
|
||||||
|
"sync": {"enabled": True, "hour": 3, "minute": 0}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create scheduler and replace its AsyncIO scheduler with a mock
|
||||||
|
scheduler = BackgroundScheduler()
|
||||||
|
mock_scheduler = MagicMock()
|
||||||
|
mock_scheduler.running = False
|
||||||
|
mock_scheduler.get_jobs.return_value = []
|
||||||
|
scheduler.scheduler = mock_scheduler
|
||||||
|
return scheduler
|
||||||
|
|
||||||
|
def test_scheduler_start_default_config(self, scheduler, mock_config):
|
||||||
|
"""Test starting scheduler with default configuration."""
|
||||||
|
with patch("leggend.config.config") as mock_config_obj:
|
||||||
|
mock_config_obj.scheduler_config = mock_config
|
||||||
|
|
||||||
|
# Mock the job that gets added
|
||||||
|
mock_job = MagicMock()
|
||||||
|
mock_job.id = "daily_sync"
|
||||||
|
scheduler.scheduler.get_jobs.return_value = [mock_job]
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
# Verify scheduler.start() was called
|
||||||
|
scheduler.scheduler.start.assert_called_once()
|
||||||
|
# Verify add_job was called
|
||||||
|
scheduler.scheduler.add_job.assert_called_once()
|
||||||
|
|
||||||
|
def test_scheduler_start_disabled(self, scheduler):
|
||||||
|
"""Test scheduler behavior when sync is disabled."""
|
||||||
|
disabled_config = {"sync": {"enabled": False}}
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(scheduler, "scheduler") as mock_scheduler,
|
||||||
|
patch("leggend.background.scheduler.config") as mock_config_obj,
|
||||||
|
):
|
||||||
|
mock_config_obj.scheduler_config = disabled_config
|
||||||
|
mock_scheduler.running = False
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
# Verify scheduler.start() was called
|
||||||
|
mock_scheduler.start.assert_called_once()
|
||||||
|
# Verify add_job was NOT called for disabled sync
|
||||||
|
mock_scheduler.add_job.assert_not_called()
|
||||||
|
|
||||||
|
def test_scheduler_start_with_cron(self, scheduler):
|
||||||
|
"""Test starting scheduler with custom cron expression."""
|
||||||
|
cron_config = {
|
||||||
|
"sync": {
|
||||||
|
"enabled": True,
|
||||||
|
"cron": "0 6 * * 1-5", # 6 AM on weekdays
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch("leggend.config.config") as mock_config_obj:
|
||||||
|
mock_config_obj.scheduler_config = cron_config
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
# Verify scheduler.start() and add_job were called
|
||||||
|
scheduler.scheduler.start.assert_called_once()
|
||||||
|
scheduler.scheduler.add_job.assert_called_once()
|
||||||
|
# Verify job was added with correct ID
|
||||||
|
call_args = scheduler.scheduler.add_job.call_args
|
||||||
|
assert call_args.kwargs["id"] == "daily_sync"
|
||||||
|
|
||||||
|
def test_scheduler_start_invalid_cron(self, scheduler):
|
||||||
|
"""Test handling of invalid cron expressions."""
|
||||||
|
invalid_cron_config = {"sync": {"enabled": True, "cron": "invalid cron"}}
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(scheduler, "scheduler") as mock_scheduler,
|
||||||
|
patch("leggend.background.scheduler.config") as mock_config_obj,
|
||||||
|
):
|
||||||
|
mock_config_obj.scheduler_config = invalid_cron_config
|
||||||
|
mock_scheduler.running = False
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
# With invalid cron, scheduler.start() should not be called due to early return
|
||||||
|
# and add_job should not be called
|
||||||
|
mock_scheduler.start.assert_not_called()
|
||||||
|
mock_scheduler.add_job.assert_not_called()
|
||||||
|
|
||||||
|
def test_scheduler_shutdown(self, scheduler):
|
||||||
|
"""Test scheduler shutdown."""
|
||||||
|
scheduler.scheduler.running = True
|
||||||
|
|
||||||
|
scheduler.shutdown()
|
||||||
|
|
||||||
|
scheduler.scheduler.shutdown.assert_called_once()
|
||||||
|
|
||||||
|
def test_reschedule_sync(self, scheduler, mock_config):
|
||||||
|
"""Test rescheduling sync job."""
|
||||||
|
scheduler.scheduler.running = True
|
||||||
|
|
||||||
|
# Reschedule with new config
|
||||||
|
new_config = {"enabled": True, "hour": 6, "minute": 30}
|
||||||
|
|
||||||
|
scheduler.reschedule_sync(new_config)
|
||||||
|
|
||||||
|
# Verify remove_job and add_job were called
|
||||||
|
scheduler.scheduler.remove_job.assert_called_once_with("daily_sync")
|
||||||
|
scheduler.scheduler.add_job.assert_called_once()
|
||||||
|
|
||||||
|
def test_reschedule_sync_disable(self, scheduler, mock_config):
|
||||||
|
"""Test disabling sync via reschedule."""
|
||||||
|
scheduler.scheduler.running = True
|
||||||
|
|
||||||
|
# Disable sync
|
||||||
|
disabled_config = {"enabled": False}
|
||||||
|
scheduler.reschedule_sync(disabled_config)
|
||||||
|
|
||||||
|
# Job should be removed but not re-added
|
||||||
|
scheduler.scheduler.remove_job.assert_called_once_with("daily_sync")
|
||||||
|
scheduler.scheduler.add_job.assert_not_called()
|
||||||
|
|
||||||
|
def test_get_next_sync_time(self, scheduler, mock_config):
|
||||||
|
"""Test getting next scheduled sync time."""
|
||||||
|
mock_job = MagicMock()
|
||||||
|
mock_job.next_run_time = datetime(2025, 9, 2, 3, 0)
|
||||||
|
scheduler.scheduler.get_job.return_value = mock_job
|
||||||
|
|
||||||
|
next_time = scheduler.get_next_sync_time()
|
||||||
|
|
||||||
|
assert next_time is not None
|
||||||
|
assert isinstance(next_time, datetime)
|
||||||
|
scheduler.scheduler.get_job.assert_called_once_with("daily_sync")
|
||||||
|
|
||||||
|
def test_get_next_sync_time_no_job(self, scheduler):
|
||||||
|
"""Test getting next sync time when no job is scheduled."""
|
||||||
|
scheduler.scheduler.get_job.return_value = None
|
||||||
|
|
||||||
|
next_time = scheduler.get_next_sync_time()
|
||||||
|
|
||||||
|
assert next_time is None
|
||||||
|
scheduler.scheduler.get_job.assert_called_once_with("daily_sync")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_run_sync_success(self, scheduler):
|
||||||
|
"""Test successful sync job execution."""
|
||||||
|
mock_sync_service = AsyncMock()
|
||||||
|
scheduler.sync_service = mock_sync_service
|
||||||
|
|
||||||
|
await scheduler._run_sync()
|
||||||
|
|
||||||
|
mock_sync_service.sync_all_accounts.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_run_sync_failure(self, scheduler):
|
||||||
|
"""Test sync job execution with failure."""
|
||||||
|
mock_sync_service = AsyncMock()
|
||||||
|
mock_sync_service.sync_all_accounts.side_effect = Exception("Sync failed")
|
||||||
|
scheduler.sync_service = mock_sync_service
|
||||||
|
|
||||||
|
# Should not raise exception, just log error
|
||||||
|
await scheduler._run_sync()
|
||||||
|
|
||||||
|
mock_sync_service.sync_all_accounts.assert_called_once()
|
||||||
|
|
||||||
|
def test_scheduler_job_max_instances(self, scheduler, mock_config):
|
||||||
|
"""Test that sync jobs have max_instances=1."""
|
||||||
|
with patch("leggend.config.config") as mock_config_obj:
|
||||||
|
mock_config_obj.scheduler_config = mock_config
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
# Verify add_job was called with max_instances=1
|
||||||
|
call_args = scheduler.scheduler.add_job.call_args
|
||||||
|
assert call_args.kwargs["max_instances"] == 1
|
||||||
368
tests/unit/test_sqlite_database.py
Normal file
368
tests/unit/test_sqlite_database.py
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
"""Tests for SQLite database functions."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import patch
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import leggen.database.sqlite as sqlite_db
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def temp_db_path():
|
||||||
|
"""Create a temporary database file for testing."""
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
db_path = Path(tmpdir) / f"test_{uuid.uuid4().hex}.db"
|
||||||
|
yield db_path
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_home_db_path(temp_db_path):
|
||||||
|
"""Mock the home database path to use temp file."""
|
||||||
|
config_dir = temp_db_path.parent / ".config" / "leggen"
|
||||||
|
config_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
db_file = config_dir / "leggen.db"
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = temp_db_path.parent
|
||||||
|
yield db_file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_transactions():
|
||||||
|
"""Sample transaction data for testing."""
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-001",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||||
|
"description": "Coffee Shop Payment",
|
||||||
|
"transactionValue": -10.50,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"some": "data"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"internalTransactionId": "txn-002",
|
||||||
|
"institutionId": "REVOLUT_REVOLT21",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||||
|
"description": "Grocery Store",
|
||||||
|
"transactionValue": -45.30,
|
||||||
|
"transactionCurrency": "EUR",
|
||||||
|
"transactionStatus": "booked",
|
||||||
|
"accountId": "test-account-123",
|
||||||
|
"rawTransaction": {"other": "data"},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def sample_balance():
|
||||||
|
"""Sample balance data for testing."""
|
||||||
|
return {
|
||||||
|
"account_id": "test-account-123",
|
||||||
|
"bank": "REVOLUT_REVOLT21",
|
||||||
|
"status": "active",
|
||||||
|
"iban": "LT313250081177977789",
|
||||||
|
"amount": 1000.00,
|
||||||
|
"currency": "EUR",
|
||||||
|
"type": "interimAvailable",
|
||||||
|
"timestamp": datetime.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class MockContext:
|
||||||
|
"""Mock context for testing."""
|
||||||
|
|
||||||
|
|
||||||
|
class TestSQLiteDatabase:
|
||||||
|
"""Test SQLite database operations."""
|
||||||
|
|
||||||
|
def test_persist_transactions(self, mock_home_db_path, sample_transactions):
|
||||||
|
"""Test persisting transactions to database."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
# Mock the database path
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
# Persist transactions
|
||||||
|
new_transactions = sqlite_db.persist_transactions(
|
||||||
|
ctx, "test-account-123", sample_transactions
|
||||||
|
)
|
||||||
|
|
||||||
|
# Should return all transactions as new
|
||||||
|
assert len(new_transactions) == 2
|
||||||
|
assert new_transactions[0]["internalTransactionId"] == "txn-001"
|
||||||
|
|
||||||
|
def test_persist_transactions_duplicates(
|
||||||
|
self, mock_home_db_path, sample_transactions
|
||||||
|
):
|
||||||
|
"""Test handling duplicate transactions."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
# Insert transactions twice
|
||||||
|
new_transactions_1 = sqlite_db.persist_transactions(
|
||||||
|
ctx, "test-account-123", sample_transactions
|
||||||
|
)
|
||||||
|
new_transactions_2 = sqlite_db.persist_transactions(
|
||||||
|
ctx, "test-account-123", sample_transactions
|
||||||
|
)
|
||||||
|
|
||||||
|
# First time should return all as new
|
||||||
|
assert len(new_transactions_1) == 2
|
||||||
|
# Second time should return none (all duplicates)
|
||||||
|
assert len(new_transactions_2) == 0
|
||||||
|
|
||||||
|
def test_get_transactions_all(self, mock_home_db_path, sample_transactions):
|
||||||
|
"""Test retrieving all transactions."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
# Insert test data
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Get all transactions
|
||||||
|
transactions = sqlite_db.get_transactions()
|
||||||
|
|
||||||
|
assert len(transactions) == 2
|
||||||
|
assert (
|
||||||
|
transactions[0]["internalTransactionId"] == "txn-002"
|
||||||
|
) # Ordered by date DESC
|
||||||
|
assert transactions[1]["internalTransactionId"] == "txn-001"
|
||||||
|
|
||||||
|
def test_get_transactions_filtered_by_account(
|
||||||
|
self, mock_home_db_path, sample_transactions
|
||||||
|
):
|
||||||
|
"""Test filtering transactions by account ID."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
# Add transaction for different account
|
||||||
|
other_account_transaction = sample_transactions[0].copy()
|
||||||
|
other_account_transaction["internalTransactionId"] = "txn-003"
|
||||||
|
other_account_transaction["accountId"] = "other-account"
|
||||||
|
|
||||||
|
all_transactions = sample_transactions + [other_account_transaction]
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", all_transactions)
|
||||||
|
|
||||||
|
# Filter by account
|
||||||
|
transactions = sqlite_db.get_transactions(account_id="test-account-123")
|
||||||
|
|
||||||
|
assert len(transactions) == 2
|
||||||
|
for txn in transactions:
|
||||||
|
assert txn["accountId"] == "test-account-123"
|
||||||
|
|
||||||
|
def test_get_transactions_with_pagination(
|
||||||
|
self, mock_home_db_path, sample_transactions
|
||||||
|
):
|
||||||
|
"""Test transaction pagination."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Get first page
|
||||||
|
transactions_page1 = sqlite_db.get_transactions(limit=1, offset=0)
|
||||||
|
assert len(transactions_page1) == 1
|
||||||
|
|
||||||
|
# Get second page
|
||||||
|
transactions_page2 = sqlite_db.get_transactions(limit=1, offset=1)
|
||||||
|
assert len(transactions_page2) == 1
|
||||||
|
|
||||||
|
# Should be different transactions
|
||||||
|
assert (
|
||||||
|
transactions_page1[0]["internalTransactionId"]
|
||||||
|
!= transactions_page2[0]["internalTransactionId"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_transactions_with_amount_filter(
|
||||||
|
self, mock_home_db_path, sample_transactions
|
||||||
|
):
|
||||||
|
"""Test filtering transactions by amount."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Filter by minimum amount (should exclude coffee shop payment)
|
||||||
|
transactions = sqlite_db.get_transactions(min_amount=-20.0)
|
||||||
|
assert len(transactions) == 1
|
||||||
|
assert transactions[0]["transactionValue"] == -10.50
|
||||||
|
|
||||||
|
def test_get_transactions_with_search(self, mock_home_db_path, sample_transactions):
|
||||||
|
"""Test searching transactions by description."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Search for "Coffee"
|
||||||
|
transactions = sqlite_db.get_transactions(search="Coffee")
|
||||||
|
assert len(transactions) == 1
|
||||||
|
assert "Coffee" in transactions[0]["description"]
|
||||||
|
|
||||||
|
def test_get_transactions_empty_database(self, mock_home_db_path):
|
||||||
|
"""Test getting transactions from empty database."""
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
transactions = sqlite_db.get_transactions()
|
||||||
|
assert transactions == []
|
||||||
|
|
||||||
|
def test_get_transactions_nonexistent_database(self):
|
||||||
|
"""Test getting transactions when database doesn't exist."""
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = Path("/nonexistent")
|
||||||
|
|
||||||
|
transactions = sqlite_db.get_transactions()
|
||||||
|
assert transactions == []
|
||||||
|
|
||||||
|
def test_persist_balances(self, mock_home_db_path, sample_balance):
|
||||||
|
"""Test persisting balance data."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
result = sqlite_db.persist_balances(ctx, sample_balance)
|
||||||
|
|
||||||
|
# Should return the balance data
|
||||||
|
assert result["account_id"] == "test-account-123"
|
||||||
|
|
||||||
|
def test_get_balances(self, mock_home_db_path, sample_balance):
|
||||||
|
"""Test retrieving balances."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
# Insert test balance
|
||||||
|
sqlite_db.persist_balances(ctx, sample_balance)
|
||||||
|
|
||||||
|
# Get balances
|
||||||
|
balances = sqlite_db.get_balances()
|
||||||
|
|
||||||
|
assert len(balances) == 1
|
||||||
|
assert balances[0]["account_id"] == "test-account-123"
|
||||||
|
assert balances[0]["amount"] == 1000.00
|
||||||
|
|
||||||
|
def test_get_balances_filtered_by_account(self, mock_home_db_path, sample_balance):
|
||||||
|
"""Test filtering balances by account ID."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
# Create balance for different account
|
||||||
|
other_balance = sample_balance.copy()
|
||||||
|
other_balance["account_id"] = "other-account"
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_balances(ctx, sample_balance)
|
||||||
|
sqlite_db.persist_balances(ctx, other_balance)
|
||||||
|
|
||||||
|
# Filter by account
|
||||||
|
balances = sqlite_db.get_balances(account_id="test-account-123")
|
||||||
|
|
||||||
|
assert len(balances) == 1
|
||||||
|
assert balances[0]["account_id"] == "test-account-123"
|
||||||
|
|
||||||
|
def test_get_account_summary(self, mock_home_db_path, sample_transactions):
|
||||||
|
"""Test getting account summary from transactions."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
summary = sqlite_db.get_account_summary("test-account-123")
|
||||||
|
|
||||||
|
assert summary is not None
|
||||||
|
assert summary["accountId"] == "test-account-123"
|
||||||
|
assert summary["institutionId"] == "REVOLUT_REVOLT21"
|
||||||
|
assert summary["iban"] == "LT313250081177977789"
|
||||||
|
|
||||||
|
def test_get_account_summary_nonexistent(self, mock_home_db_path):
|
||||||
|
"""Test getting summary for nonexistent account."""
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
summary = sqlite_db.get_account_summary("nonexistent")
|
||||||
|
assert summary is None
|
||||||
|
|
||||||
|
def test_get_transaction_count(self, mock_home_db_path, sample_transactions):
|
||||||
|
"""Test getting transaction count."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Get total count
|
||||||
|
count = sqlite_db.get_transaction_count()
|
||||||
|
assert count == 2
|
||||||
|
|
||||||
|
# Get count for specific account
|
||||||
|
count_filtered = sqlite_db.get_transaction_count(
|
||||||
|
account_id="test-account-123"
|
||||||
|
)
|
||||||
|
assert count_filtered == 2
|
||||||
|
|
||||||
|
# Get count for nonexistent account
|
||||||
|
count_none = sqlite_db.get_transaction_count(account_id="nonexistent")
|
||||||
|
assert count_none == 0
|
||||||
|
|
||||||
|
def test_get_transaction_count_with_filters(
|
||||||
|
self, mock_home_db_path, sample_transactions
|
||||||
|
):
|
||||||
|
"""Test getting transaction count with filters."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Filter by search
|
||||||
|
count = sqlite_db.get_transaction_count(search="Coffee")
|
||||||
|
assert count == 1
|
||||||
|
|
||||||
|
# Filter by amount
|
||||||
|
count = sqlite_db.get_transaction_count(min_amount=-20.0)
|
||||||
|
assert count == 1
|
||||||
|
|
||||||
|
def test_database_indexes_created(self, mock_home_db_path, sample_transactions):
|
||||||
|
"""Test that database indexes are created properly."""
|
||||||
|
ctx = MockContext()
|
||||||
|
|
||||||
|
with patch("pathlib.Path.home") as mock_home:
|
||||||
|
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||||
|
|
||||||
|
# Persist transactions to create tables and indexes
|
||||||
|
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||||
|
|
||||||
|
# Get transactions to ensure we can query the table (indexes working)
|
||||||
|
transactions = sqlite_db.get_transactions(account_id="test-account-123")
|
||||||
|
assert len(transactions) == 2
|
||||||
791
uv.lock
generated
Normal file
791
uv.lock
generated
Normal file
@@ -0,0 +1,791 @@
|
|||||||
|
version = 1
|
||||||
|
revision = 3
|
||||||
|
requires-python = "==3.13.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "annotated-types"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyio"
|
||||||
|
version = "4.10.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "idna" },
|
||||||
|
{ name = "sniffio" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "apscheduler"
|
||||||
|
version = "3.11.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "tzlocal" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347, upload-time = "2024-11-24T19:39:26.463Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004, upload-time = "2024-11-24T19:39:24.442Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2024.12.14"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010, upload-time = "2024-12-14T13:52:38.02Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927, upload-time = "2024-12-14T13:52:36.114Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cfgv"
|
||||||
|
version = "3.4.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "charset-normalizer"
|
||||||
|
version = "3.4.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698, upload-time = "2024-12-24T18:11:05.834Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162, upload-time = "2024-12-24T18:11:07.064Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263, upload-time = "2024-12-24T18:11:08.374Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966, upload-time = "2024-12-24T18:11:09.831Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992, upload-time = "2024-12-24T18:11:12.03Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162, upload-time = "2024-12-24T18:11:13.372Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972, upload-time = "2024-12-24T18:11:14.628Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095, upload-time = "2024-12-24T18:11:17.672Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668, upload-time = "2024-12-24T18:11:18.989Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073, upload-time = "2024-12-24T18:11:21.507Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732, upload-time = "2024-12-24T18:11:22.774Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391, upload-time = "2024-12-24T18:11:24.139Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702, upload-time = "2024-12-24T18:11:26.535Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click"
|
||||||
|
version = "8.1.8"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorama"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "discord-webhook"
|
||||||
|
version = "1.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "requests" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/e8/e6/660b07356a15d98787d893f879efc404eb15176312d457f2f6f7090acd32/discord_webhook-1.3.1.tar.gz", hash = "sha256:ee3e0f3ea4f3dc8dc42be91f75b894a01624c6c13fea28e23ebcf9a6c9a304f7", size = 11715, upload-time = "2024-01-31T17:23:14.463Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/92/e2/eed83ebc8d88da0930143a6dd1d0ba0b6deba1fd91b956f21c23a2608510/discord_webhook-1.3.1-py3-none-any.whl", hash = "sha256:ede07028316de76d24eb811836e2b818b2017510da786777adcb0d5970e7af79", size = 13206, upload-time = "2024-01-31T17:23:12.424Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "distlib"
|
||||||
|
version = "0.3.9"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fastapi"
|
||||||
|
version = "0.116.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pydantic" },
|
||||||
|
{ name = "starlette" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "filelock"
|
||||||
|
version = "3.16.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037, upload-time = "2024-09-17T19:02:01.779Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "h11"
|
||||||
|
version = "0.16.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpcore"
|
||||||
|
version = "1.0.9"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "h11" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httptools"
|
||||||
|
version = "0.6.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload-time = "2024-10-16T19:44:42.384Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload-time = "2024-10-16T19:44:43.959Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload-time = "2024-10-16T19:44:45.071Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpx"
|
||||||
|
version = "0.28.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "anyio" },
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "httpcore" },
|
||||||
|
{ name = "idna" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "identify"
|
||||||
|
version = "2.6.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/cf/92/69934b9ef3c31ca2470980423fda3d00f0460ddefdf30a67adf7f17e2e00/identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc", size = 99213, upload-time = "2025-01-04T17:01:41.99Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/fa/dce098f4cdf7621aa8f7b4f919ce545891f489482f0bfa5102f3eca8608b/identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566", size = 99078, upload-time = "2025-01-04T17:01:40.667Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.10"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iniconfig"
|
||||||
|
version = "2.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "leggen"
|
||||||
|
version = "2025.9.0"
|
||||||
|
source = { editable = "." }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "apscheduler" },
|
||||||
|
{ name = "click" },
|
||||||
|
{ name = "discord-webhook" },
|
||||||
|
{ name = "fastapi" },
|
||||||
|
{ name = "httpx" },
|
||||||
|
{ name = "loguru" },
|
||||||
|
{ name = "requests" },
|
||||||
|
{ name = "tabulate" },
|
||||||
|
{ name = "tomli-w" },
|
||||||
|
{ name = "uvicorn", extra = ["standard"] },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dev-dependencies]
|
||||||
|
dev = [
|
||||||
|
{ name = "mypy" },
|
||||||
|
{ name = "pre-commit" },
|
||||||
|
{ name = "pytest" },
|
||||||
|
{ name = "pytest-asyncio" },
|
||||||
|
{ name = "pytest-mock" },
|
||||||
|
{ name = "requests-mock" },
|
||||||
|
{ name = "respx" },
|
||||||
|
{ name = "ruff" },
|
||||||
|
{ name = "types-requests" },
|
||||||
|
{ name = "types-tabulate" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
requires-dist = [
|
||||||
|
{ name = "apscheduler", specifier = ">=3.10.0,<4" },
|
||||||
|
{ name = "click", specifier = ">=8.1.7,<9" },
|
||||||
|
{ name = "discord-webhook", specifier = ">=1.3.1,<2" },
|
||||||
|
{ name = "fastapi", specifier = ">=0.104.0,<1" },
|
||||||
|
{ name = "httpx", specifier = ">=0.28.1" },
|
||||||
|
{ name = "loguru", specifier = ">=0.7.2,<0.8" },
|
||||||
|
{ name = "requests", specifier = ">=2.31.0,<3" },
|
||||||
|
{ name = "tabulate", specifier = ">=0.9.0,<0.10" },
|
||||||
|
{ name = "tomli-w", specifier = ">=1.0.0,<2" },
|
||||||
|
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0,<1" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata.requires-dev]
|
||||||
|
dev = [
|
||||||
|
{ name = "mypy", specifier = ">=1.17.1" },
|
||||||
|
{ name = "pre-commit", specifier = ">=3.6.0" },
|
||||||
|
{ name = "pytest", specifier = ">=8.0.0" },
|
||||||
|
{ name = "pytest-asyncio", specifier = ">=0.23.0" },
|
||||||
|
{ name = "pytest-mock", specifier = ">=3.12.0" },
|
||||||
|
{ name = "requests-mock", specifier = ">=1.12.0" },
|
||||||
|
{ name = "respx", specifier = ">=0.21.0" },
|
||||||
|
{ name = "ruff", specifier = ">=0.6.1" },
|
||||||
|
{ name = "types-requests", specifier = ">=2.32.4.20250809" },
|
||||||
|
{ name = "types-tabulate", specifier = ">=0.9.0.20241207" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "loguru"
|
||||||
|
version = "0.7.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mypy"
|
||||||
|
version = "1.17.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "mypy-extensions" },
|
||||||
|
{ name = "pathspec" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5b/82/aec2fc9b9b149f372850291827537a508d6c4d3664b1750a324b91f71355/mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", size = 11075338, upload-time = "2025-07-31T07:53:38.873Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/07/ac/ee93fbde9d2242657128af8c86f5d917cd2887584cf948a8e3663d0cd737/mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", size = 10113066, upload-time = "2025-07-31T07:54:14.707Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/68/946a1e0be93f17f7caa56c45844ec691ca153ee8b62f21eddda336a2d203/mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", size = 11875473, upload-time = "2025-07-31T07:53:14.504Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9f/0f/478b4dce1cb4f43cf0f0d00fba3030b21ca04a01b74d1cd272a528cf446f/mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", size = 12744296, upload-time = "2025-07-31T07:53:03.896Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ca/70/afa5850176379d1b303f992a828de95fc14487429a7139a4e0bdd17a8279/mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", size = 12914657, upload-time = "2025-07-31T07:54:08.576Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/53/f9/4a83e1c856a3d9c8f6edaa4749a4864ee98486e9b9dbfbc93842891029c2/mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", size = 9593320, upload-time = "2025-07-31T07:53:01.341Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mypy-extensions"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nodeenv"
|
||||||
|
version = "1.9.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "25.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pathspec"
|
||||||
|
version = "0.12.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "platformdirs"
|
||||||
|
version = "4.3.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pluggy"
|
||||||
|
version = "1.6.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pre-commit"
|
||||||
|
version = "4.0.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "cfgv" },
|
||||||
|
{ name = "identify" },
|
||||||
|
{ name = "nodeenv" },
|
||||||
|
{ name = "pyyaml" },
|
||||||
|
{ name = "virtualenv" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678, upload-time = "2024-10-08T16:09:37.641Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713, upload-time = "2024-10-08T16:09:35.726Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic"
|
||||||
|
version = "2.11.7"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "annotated-types" },
|
||||||
|
{ name = "pydantic-core" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
{ name = "typing-inspection" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic-core"
|
||||||
|
version = "2.33.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pygments"
|
||||||
|
version = "2.19.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest"
|
||||||
|
version = "8.4.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "iniconfig" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
{ name = "pluggy" },
|
||||||
|
{ name = "pygments" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-asyncio"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pytest" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-mock"
|
||||||
|
version = "3.14.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pytest" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dotenv"
|
||||||
|
version = "1.1.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pyyaml"
|
||||||
|
version = "6.0.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "requests"
|
||||||
|
version = "2.32.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "charset-normalizer" },
|
||||||
|
{ name = "idna" },
|
||||||
|
{ name = "urllib3" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "requests-mock"
|
||||||
|
version = "1.12.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "requests" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/92/32/587625f91f9a0a3d84688bf9cfc4b2480a7e8ec327cefd0ff2ac891fd2cf/requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401", size = 60901, upload-time = "2024-03-29T03:54:29.446Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/97/ec/889fbc557727da0c34a33850950310240f2040f3b1955175fdb2b36a8910/requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", size = 27695, upload-time = "2024-03-29T03:54:27.64Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "respx"
|
||||||
|
version = "0.22.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "httpx" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ruff"
|
||||||
|
version = "0.9.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/67/3e/e89f736f01aa9517a97e2e7e0ce8d34a4d8207087b3cfdec95133fee13b5/ruff-0.9.1.tar.gz", hash = "sha256:fd2b25ecaf907d6458fa842675382c8597b3c746a2dde6717fe3415425df0c17", size = 3498844, upload-time = "2025-01-10T18:57:53.896Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dc/05/c3a2e0feb3d5d394cdfd552de01df9d3ec8a3a3771bbff247fab7e668653/ruff-0.9.1-py3-none-linux_armv6l.whl", hash = "sha256:84330dda7abcc270e6055551aca93fdde1b0685fc4fd358f26410f9349cf1743", size = 10645241, upload-time = "2025-01-10T18:56:45.897Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dd/da/59f0a40e5f88ee5c054ad175caaa2319fc96571e1d29ab4730728f2aad4f/ruff-0.9.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3cae39ba5d137054b0e5b472aee3b78a7c884e61591b100aeb544bcd1fc38d4f", size = 10391066, upload-time = "2025-01-10T18:56:52.224Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b7/fe/85e1c1acf0ba04a3f2d54ae61073da030f7a5dc386194f96f3c6ca444a78/ruff-0.9.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50c647ff96f4ba288db0ad87048257753733763b409b2faf2ea78b45c8bb7fcb", size = 10012308, upload-time = "2025-01-10T18:56:55.426Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6f/9b/780aa5d4bdca8dcea4309264b8faa304bac30e1ce0bcc910422bfcadd203/ruff-0.9.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c8b149e9c7353cace7d698e1656ffcf1e36e50f8ea3b5d5f7f87ff9986a7ca", size = 10881960, upload-time = "2025-01-10T18:56:59.539Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/f4/dac4361afbfe520afa7186439e8094e4884ae3b15c8fc75fb2e759c1f267/ruff-0.9.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:beb3298604540c884d8b282fe7625651378e1986c25df51dec5b2f60cafc31ce", size = 10414803, upload-time = "2025-01-10T18:57:04.919Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/a2/057a3cb7999513cb78d6cb33a7d1cc6401c82d7332583786e4dad9e38e44/ruff-0.9.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39d0174ccc45c439093971cc06ed3ac4dc545f5e8bdacf9f067adf879544d969", size = 11464929, upload-time = "2025-01-10T18:57:08.146Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/eb/c6/1ccfcc209bee465ced4874dcfeaadc88aafcc1ea9c9f31ef66f063c187f0/ruff-0.9.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69572926c0f0c9912288915214ca9b2809525ea263603370b9e00bed2ba56dbd", size = 12170717, upload-time = "2025-01-10T18:57:12.564Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/97/4a524027518525c7cf6931e9fd3b2382be5e4b75b2b61bec02681a7685a5/ruff-0.9.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:937267afce0c9170d6d29f01fcd1f4378172dec6760a9f4dface48cdabf9610a", size = 11708921, upload-time = "2025-01-10T18:57:17.216Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/a4/4e77cf6065c700d5593b25fca6cf725b1ab6d70674904f876254d0112ed0/ruff-0.9.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:186c2313de946f2c22bdf5954b8dd083e124bcfb685732cfb0beae0c47233d9b", size = 13058074, upload-time = "2025-01-10T18:57:20.57Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/d6/fcb78e0531e863d0a952c4c5600cc5cd317437f0e5f031cd2288b117bb37/ruff-0.9.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f94942a3bb767675d9a051867c036655fe9f6c8a491539156a6f7e6b5f31831", size = 11281093, upload-time = "2025-01-10T18:57:25.526Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/3b/7235bbeff00c95dc2d073cfdbf2b871b5bbf476754c5d277815d286b4328/ruff-0.9.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:728d791b769cc28c05f12c280f99e8896932e9833fef1dd8756a6af2261fd1ab", size = 10882610, upload-time = "2025-01-10T18:57:28.855Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2a/66/5599d23257c61cf038137f82999ca8f9d0080d9d5134440a461bef85b461/ruff-0.9.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f312c86fb40c5c02b44a29a750ee3b21002bd813b5233facdaf63a51d9a85e1", size = 10489273, upload-time = "2025-01-10T18:57:32.219Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/85/de4aa057e2532db0f9761e2c2c13834991e087787b93e4aeb5f1cb10d2df/ruff-0.9.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ae017c3a29bee341ba584f3823f805abbe5fe9cd97f87ed07ecbf533c4c88366", size = 11003314, upload-time = "2025-01-10T18:57:35.431Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/42/afedcaa089116d81447347f76041ff46025849fedb0ed2b187d24cf70fca/ruff-0.9.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5dc40a378a0e21b4cfe2b8a0f1812a6572fc7b230ef12cd9fac9161aa91d807f", size = 11342982, upload-time = "2025-01-10T18:57:38.642Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/39/c6/fe45f3eb27e3948b41a305d8b768e949bf6a39310e9df73f6c576d7f1d9f/ruff-0.9.1-py3-none-win32.whl", hash = "sha256:46ebf5cc106cf7e7378ca3c28ce4293b61b449cd121b98699be727d40b79ba72", size = 8819750, upload-time = "2025-01-10T18:57:41.93Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/8d/580db77c3b9d5c3d9479e55b0b832d279c30c8f00ab0190d4cd8fc67831c/ruff-0.9.1-py3-none-win_amd64.whl", hash = "sha256:342a824b46ddbcdddd3abfbb332fa7fcaac5488bf18073e841236aadf4ad5c19", size = 9701331, upload-time = "2025-01-10T18:57:46.334Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/94/0498cdb7316ed67a1928300dd87d659c933479f44dec51b4f62bfd1f8028/ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7", size = 9145708, upload-time = "2025-01-10T18:57:51.308Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sniffio"
|
||||||
|
version = "1.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "starlette"
|
||||||
|
version = "0.47.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "anyio" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tabulate"
|
||||||
|
version = "0.9.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tomli-w"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184, upload-time = "2025-01-15T12:07:24.262Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "types-requests"
|
||||||
|
version = "2.32.4.20250809"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "urllib3" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "types-tabulate"
|
||||||
|
version = "0.9.0.20241207"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/3f/43/16030404a327e4ff8c692f2273854019ed36718667b2993609dc37d14dd4/types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230", size = 8195, upload-time = "2024-12-07T02:54:42.554Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5e/86/a9ebfd509cbe74471106dffed320e208c72537f9aeb0a55eaa6b1b5e4d17/types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85", size = 8307, upload-time = "2024-12-07T02:54:41.031Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.15.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-inspection"
|
||||||
|
version = "0.4.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tzdata"
|
||||||
|
version = "2025.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tzlocal"
|
||||||
|
version = "5.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3"
|
||||||
|
version = "2.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uvicorn"
|
||||||
|
version = "0.35.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "click" },
|
||||||
|
{ name = "h11" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.optional-dependencies]
|
||||||
|
standard = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "httptools" },
|
||||||
|
{ name = "python-dotenv" },
|
||||||
|
{ name = "pyyaml" },
|
||||||
|
{ name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
|
||||||
|
{ name = "watchfiles" },
|
||||||
|
{ name = "websockets" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uvloop"
|
||||||
|
version = "0.21.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "virtualenv"
|
||||||
|
version = "20.28.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "distlib" },
|
||||||
|
{ name = "filelock" },
|
||||||
|
{ name = "platformdirs" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/50/39/689abee4adc85aad2af8174bb195a819d0be064bf55fcc73b49d2b28ae77/virtualenv-20.28.1.tar.gz", hash = "sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329", size = 7650532, upload-time = "2025-01-03T01:56:53.613Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/8f/dfb257ca6b4e27cb990f1631142361e4712badab8e3ca8dc134d96111515/virtualenv-20.28.1-py3-none-any.whl", hash = "sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb", size = 4276719, upload-time = "2025-01-03T01:56:50.498Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "watchfiles"
|
||||||
|
version = "1.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "anyio" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/42/fae874df96595556a9089ade83be34a2e04f0f11eb53a8dbf8a8a5e562b4/watchfiles-1.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5007f860c7f1f8df471e4e04aaa8c43673429047d63205d1630880f7637bca30", size = 402004, upload-time = "2025-06-15T19:05:38.499Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/55/a77e533e59c3003d9803c09c44c3651224067cbe7fb5d574ddbaa31e11ca/watchfiles-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:20ecc8abbd957046f1fe9562757903f5eaf57c3bce70929fda6c7711bb58074a", size = 393671, upload-time = "2025-06-15T19:05:39.52Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/68/b0afb3f79c8e832e6571022611adbdc36e35a44e14f129ba09709aa4bb7a/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2f0498b7d2a3c072766dba3274fe22a183dbea1f99d188f1c6c72209a1063dc", size = 449772, upload-time = "2025-06-15T19:05:40.897Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/05/46dd1f6879bc40e1e74c6c39a1b9ab9e790bf1f5a2fe6c08b463d9a807f4/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:239736577e848678e13b201bba14e89718f5c2133dfd6b1f7846fa1b58a8532b", size = 456789, upload-time = "2025-06-15T19:05:42.045Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8b/ca/0eeb2c06227ca7f12e50a47a3679df0cd1ba487ea19cf844a905920f8e95/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eff4b8d89f444f7e49136dc695599a591ff769300734446c0a86cba2eb2f9895", size = 482551, upload-time = "2025-06-15T19:05:43.781Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/47/2cecbd8694095647406645f822781008cc524320466ea393f55fe70eed3b/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12b0a02a91762c08f7264e2e79542f76870c3040bbc847fb67410ab81474932a", size = 597420, upload-time = "2025-06-15T19:05:45.244Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d9/7e/82abc4240e0806846548559d70f0b1a6dfdca75c1b4f9fa62b504ae9b083/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29e7bc2eee15cbb339c68445959108803dc14ee0c7b4eea556400131a8de462b", size = 477950, upload-time = "2025-06-15T19:05:46.332Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/25/0d/4d564798a49bf5482a4fa9416dea6b6c0733a3b5700cb8a5a503c4b15853/watchfiles-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9481174d3ed982e269c090f780122fb59cee6c3796f74efe74e70f7780ed94c", size = 451706, upload-time = "2025-06-15T19:05:47.459Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/b5/5516cf46b033192d544102ea07c65b6f770f10ed1d0a6d388f5d3874f6e4/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:80f811146831c8c86ab17b640801c25dc0a88c630e855e2bef3568f30434d52b", size = 625814, upload-time = "2025-06-15T19:05:48.654Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0c/dd/7c1331f902f30669ac3e754680b6edb9a0dd06dea5438e61128111fadd2c/watchfiles-1.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:60022527e71d1d1fda67a33150ee42869042bce3d0fcc9cc49be009a9cded3fb", size = 622820, upload-time = "2025-06-15T19:05:50.088Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1b/14/36d7a8e27cd128d7b1009e7715a7c02f6c131be9d4ce1e5c3b73d0e342d8/watchfiles-1.1.0-cp313-cp313-win32.whl", hash = "sha256:32d6d4e583593cb8576e129879ea0991660b935177c0f93c6681359b3654bfa9", size = 279194, upload-time = "2025-06-15T19:05:51.186Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/25/41/2dd88054b849aa546dbeef5696019c58f8e0774f4d1c42123273304cdb2e/watchfiles-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:f21af781a4a6fbad54f03c598ab620e3a77032c5878f3d780448421a6e1818c7", size = 292349, upload-time = "2025-06-15T19:05:52.201Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/cf/421d659de88285eb13941cf11a81f875c176f76a6d99342599be88e08d03/watchfiles-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:5366164391873ed76bfdf618818c82084c9db7fac82b64a20c44d335eec9ced5", size = 283836, upload-time = "2025-06-15T19:05:53.265Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/45/10/6faf6858d527e3599cc50ec9fcae73590fbddc1420bd4fdccfebffeedbc6/watchfiles-1.1.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:17ab167cca6339c2b830b744eaf10803d2a5b6683be4d79d8475d88b4a8a4be1", size = 400343, upload-time = "2025-06-15T19:05:54.252Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/20/5cb7d3966f5e8c718006d0e97dfe379a82f16fecd3caa7810f634412047a/watchfiles-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:328dbc9bff7205c215a7807da7c18dce37da7da718e798356212d22696404339", size = 392916, upload-time = "2025-06-15T19:05:55.264Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8c/07/d8f1176328fa9e9581b6f120b017e286d2a2d22ae3f554efd9515c8e1b49/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7208ab6e009c627b7557ce55c465c98967e8caa8b11833531fdf95799372633", size = 449582, upload-time = "2025-06-15T19:05:56.317Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/e8/80a14a453cf6038e81d072a86c05276692a1826471fef91df7537dba8b46/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a8f6f72974a19efead54195bc9bed4d850fc047bb7aa971268fd9a8387c89011", size = 456752, upload-time = "2025-06-15T19:05:57.359Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5a/25/0853b3fe0e3c2f5af9ea60eb2e781eade939760239a72c2d38fc4cc335f6/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d181ef50923c29cf0450c3cd47e2f0557b62218c50b2ab8ce2ecaa02bd97e670", size = 481436, upload-time = "2025-06-15T19:05:58.447Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fe/9e/4af0056c258b861fbb29dcb36258de1e2b857be4a9509e6298abcf31e5c9/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb4167043d3a78280d5d05ce0ba22055c266cf8655ce942f2fb881262ff3cdf", size = 596016, upload-time = "2025-06-15T19:05:59.59Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/fa/95d604b58aa375e781daf350897aaaa089cff59d84147e9ccff2447c8294/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c5701dc474b041e2934a26d31d39f90fac8a3dee2322b39f7729867f932b1d4", size = 476727, upload-time = "2025-06-15T19:06:01.086Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/65/95/fe479b2664f19be4cf5ceeb21be05afd491d95f142e72d26a42f41b7c4f8/watchfiles-1.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b067915e3c3936966a8607f6fe5487df0c9c4afb85226613b520890049deea20", size = 451864, upload-time = "2025-06-15T19:06:02.144Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/8a/3c4af14b93a15ce55901cd7a92e1a4701910f1768c78fb30f61d2b79785b/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:9c733cda03b6d636b4219625a4acb5c6ffb10803338e437fb614fef9516825ef", size = 625626, upload-time = "2025-06-15T19:06:03.578Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/da/f5/cf6aa047d4d9e128f4b7cde615236a915673775ef171ff85971d698f3c2c/watchfiles-1.1.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:cc08ef8b90d78bfac66f0def80240b0197008e4852c9f285907377b2947ffdcb", size = 622744, upload-time = "2025-06-15T19:06:05.066Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "websockets"
|
||||||
|
version = "15.0.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "win32-setctime"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
|
||||||
|
]
|
||||||
Reference in New Issue
Block a user