mirror of
https://github.com/elisiariocouto/leggen.git
synced 2025-12-28 21:59:14 +00:00
Compare commits
40 Commits
0.6.0
...
da456b4c80
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da456b4c80 | ||
|
|
a52a6274c0 | ||
|
|
1bc259bd3e | ||
|
|
fad426aee8 | ||
|
|
7f59e634a1 | ||
|
|
011c792e89 | ||
|
|
4e4c80be48 | ||
|
|
4c10307e12 | ||
|
|
de67ce9ec0 | ||
|
|
d99b1c73db | ||
|
|
69e381fffe | ||
|
|
f6ebb98de3 | ||
|
|
73d6bd32db | ||
|
|
6b2c19778b | ||
|
|
355fa5cfb6 | ||
|
|
7cf471402b | ||
|
|
7480094419 | ||
|
|
d69bd5d115 | ||
|
|
ca29d527c9 | ||
|
|
4ed1bf5abe | ||
|
|
eb73401896 | ||
|
|
33006f8f43 | ||
|
|
6b2cb8a52f | ||
|
|
75ca7f177f | ||
|
|
7efbccfc90 | ||
|
|
e7662bc3dd | ||
|
|
59346334db | ||
|
|
c70a4e5cb8 | ||
|
|
a29bd1ab68 | ||
|
|
a8fb3ad931 | ||
|
|
effabf0695 | ||
|
|
758a3a2257 | ||
|
|
6f5b5dc679 | ||
|
|
6c44beda67 | ||
|
|
ebe0a2fe86 | ||
|
|
3cb38e2e9f | ||
|
|
ad40b2207a | ||
|
|
9402c2535b | ||
|
|
e0351a8771 | ||
|
|
b60ba068cd |
22
.claude/settings.local.json
Normal file
22
.claude/settings.local.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(mkdir:*)",
|
||||
"Bash(uv sync:*)",
|
||||
"Bash(uv run pytest:*)",
|
||||
"Bash(git commit:*)",
|
||||
"Bash(ruff check:*)",
|
||||
"Bash(git add:*)",
|
||||
"Bash(mypy:*)",
|
||||
"WebFetch(domain:localhost)",
|
||||
"Bash(npm create:*)",
|
||||
"Bash(npm install)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(npx tailwindcss init:*)",
|
||||
"Bash(./node_modules/.bin/tailwindcss:*)",
|
||||
"Bash(npm run build:*)"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
}
|
||||
}
|
||||
43
.github/workflows/release.yml
vendored
43
.github/workflows/release.yml
vendored
@@ -6,28 +6,41 @@ on:
|
||||
- "**"
|
||||
|
||||
jobs:
|
||||
publish-pypi:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
python-version-file: "pyproject.toml"
|
||||
- name: Build Package
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install poetry
|
||||
poetry config virtualenvs.create false
|
||||
poetry build -f wheel
|
||||
run: uv build
|
||||
- name: Store the distribution packages
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
|
||||
publish-to-pypi:
|
||||
name: Publish Python distribution to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write # IMPORTANT: mandatory for trusted publishing
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download all the dists
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: python-package-distributions
|
||||
path: dist/
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Publish package
|
||||
env:
|
||||
POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }}
|
||||
run: poetry publish
|
||||
run: uv publish
|
||||
|
||||
push-docker:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -14,7 +14,6 @@ dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
|
||||
@@ -1,18 +1,23 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3.12
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: "v0.4.8"
|
||||
rev: "v0.12.11"
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
exclude: ".*\\.md$"
|
||||
- id: end-of-file-fixer
|
||||
- id: check-added-large-files
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: mypy
|
||||
name: Static type check with mypy
|
||||
entry: uv run mypy leggen leggend --check-untyped-defs
|
||||
files: "^leggen(d)?/.*"
|
||||
language: "system"
|
||||
types: ["python"]
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
100
CHANGELOG.md
100
CHANGELOG.md
@@ -1,3 +1,103 @@
|
||||
|
||||
## 0.6.11 (2025/02/23)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Add workdir to dockerfile last stage. ([355fa5cf](https://github.com/elisiariocouto/leggen/commit/355fa5cfb6ccc4ca225d921cdc2ad77d6bb9b2e6))
|
||||
|
||||
|
||||
|
||||
## 0.6.10 (2025/01/14)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **ci:** Install uv before publishing. ([74800944](https://github.com/elisiariocouto/leggen/commit/7480094419697a46515a88a635d4e73820b0d283))
|
||||
|
||||
|
||||
|
||||
## 0.6.9 (2025/01/14)
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Setup PyPI Trusted Publishing. ([ca29d527](https://github.com/elisiariocouto/leggen/commit/ca29d527c9e5f9391dfcad6601ad9c585b511b47))
|
||||
|
||||
|
||||
|
||||
## 0.6.8 (2025/01/13)
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Migrate from Poetry to uv, bump dependencies and python version. ([33006f8f](https://github.com/elisiariocouto/leggen/commit/33006f8f437da2b9b3c860f22a1fda2a2e5b19a1))
|
||||
- Fix typo in release script. ([eb734018](https://github.com/elisiariocouto/leggen/commit/eb734018964d8281450a8713d0a15688d2cb42bf))
|
||||
|
||||
|
||||
|
||||
## 0.6.7 (2024/09/15)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **notifications/telegram:** Escape characters when notifying via Telegram. ([7efbccfc](https://github.com/elisiariocouto/leggen/commit/7efbccfc90ea601da9029909bdd4f21640d73e6a))
|
||||
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Bump dependencies. ([75ca7f17](https://github.com/elisiariocouto/leggen/commit/75ca7f177fb9992395e576ba9038a63e90612e5c))
|
||||
|
||||
|
||||
|
||||
## 0.6.6 (2024/08/21)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **commands/status:** Handle exception when no `last_accessed` is returned from GoCardless API. ([c70a4e5c](https://github.com/elisiariocouto/leggen/commit/c70a4e5cb87a19a5a0ed194838e323c6246856ab))
|
||||
- **notifications/telegram:** Escape parenthesis. ([a29bd1ab](https://github.com/elisiariocouto/leggen/commit/a29bd1ab683bc9e068aefb722e9e87bb4fe6aa76))
|
||||
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Update dependencies, use ruff to format code. ([59346334](https://github.com/elisiariocouto/leggen/commit/59346334dbe999ccfd70f6687130aaedb50254fa))
|
||||
|
||||
|
||||
## 0.6.5 (2024/07/05)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **sync:** Continue on account deactivation. ([758a3a22](https://github.com/elisiariocouto/leggen/commit/758a3a2257c490a92fb0b0673c74d720ad7e87f7))
|
||||
|
||||
|
||||
### Miscellaneous Tasks
|
||||
|
||||
- Bump dependencies. ([effabf06](https://github.com/elisiariocouto/leggen/commit/effabf06954b08e05e3084fdbc54518ea5d947dc))
|
||||
|
||||
|
||||
## 0.6.4 (2024/06/07)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **sync:** Correctly calculate days left. ([6c44beda](https://github.com/elisiariocouto/leggen/commit/6c44beda672242714bab1100b1f0576cdce255ca))
|
||||
|
||||
|
||||
## 0.6.3 (2024/06/07)
|
||||
|
||||
### Features
|
||||
|
||||
- **sync:** Correctly calculate days left, based on the default 90 days period. ([3cb38e2e](https://github.com/elisiariocouto/leggen/commit/3cb38e2e9fb08e07664caa7daa9aa651262bd213))
|
||||
|
||||
|
||||
## 0.6.2 (2024/06/07)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **sync:** Use timezone-aware datetime objects. ([9402c253](https://github.com/elisiariocouto/leggen/commit/9402c2535baade84128bdfd0fc314d5225bbd822))
|
||||
|
||||
|
||||
## 0.6.1 (2024/06/07)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **sync:** Get correct parameter for requisition creation time. ([b60ba068](https://github.com/elisiariocouto/leggen/commit/b60ba068cd7facea5f60fca61bf5845cabf0c2c6))
|
||||
|
||||
|
||||
## 0.6.0 (2024/06/07)
|
||||
|
||||
### Features
|
||||
|
||||
69
CLAUDE.md
Normal file
69
CLAUDE.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Leggen is an Open Banking CLI tool built in Python that connects to banks using the GoCardless Open Banking API. It allows users to sync bank transactions to SQLite/MongoDB databases, visualize data with NocoDB, and send notifications based on transaction filters.
|
||||
|
||||
## Development Commands
|
||||
|
||||
- **Install dependencies**: `uv sync` (uses uv package manager)
|
||||
- **Run locally**: `uv run leggen --help`
|
||||
- **Lint code**: `ruff check` and `ruff format` (configured in pyproject.toml)
|
||||
- **Build Docker image**: `docker build -t leggen .`
|
||||
- **Run with Docker Compose**: `docker compose up -d`
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Structure
|
||||
- `leggen/main.py` - Main CLI entry point using Click framework with custom command loading
|
||||
- `leggen/commands/` - CLI command implementations (balances, sync, transactions, etc.)
|
||||
- `leggen/utils/` - Core utilities for authentication, database operations, network requests, and notifications
|
||||
- `leggen/database/` - Database adapters for SQLite and MongoDB
|
||||
- `leggen/notifications/` - Discord and Telegram notification handlers
|
||||
|
||||
### Key Components
|
||||
|
||||
**Configuration System**:
|
||||
- Uses TOML configuration files (default: `~/.config/leggen/config.toml`)
|
||||
- Configuration loaded via `leggen/utils/config.py`
|
||||
- Supports GoCardless API credentials, database settings, and notification configurations
|
||||
|
||||
**Authentication & API**:
|
||||
- GoCardless Open Banking API integration in `leggen/utils/gocardless.py`
|
||||
- Token-based authentication via `leggen/utils/auth.py`
|
||||
- Network utilities in `leggen/utils/network.py`
|
||||
|
||||
**Database Operations**:
|
||||
- Dual database support: SQLite (`database/sqlite.py`) and MongoDB (`database/mongo.py`)
|
||||
- Transaction persistence and balance tracking via `utils/database.py`
|
||||
- Data storage patterns follow bank account and transaction models
|
||||
|
||||
**Command Architecture**:
|
||||
- Dynamic command loading system in `main.py` with support for command groups
|
||||
- Commands organized as modules with individual click decorators
|
||||
- Bank management commands grouped under `commands/bank/`
|
||||
|
||||
### Data Flow
|
||||
1. Configuration loaded from TOML file
|
||||
2. GoCardless API authentication and bank requisition management
|
||||
3. Account and transaction data retrieval from banks
|
||||
4. Data persistence to configured databases (SQLite/MongoDB)
|
||||
5. Optional notifications sent via Discord/Telegram based on filters
|
||||
6. Data visualization available through NocoDB integration
|
||||
|
||||
## Docker & Deployment
|
||||
|
||||
The project uses multi-stage Docker builds with uv for dependency management. The compose.yml includes:
|
||||
- Main leggen service with sync scheduling via Ofelia
|
||||
- NocoDB for data visualization
|
||||
- Optional MongoDB with mongo-express admin interface
|
||||
|
||||
## Configuration Requirements
|
||||
|
||||
All operations require a valid `config.toml` file with GoCardless API credentials. The configuration structure includes sections for:
|
||||
- `[gocardless]` - API credentials and endpoint
|
||||
- `[database]` - Storage backend selection
|
||||
- `[notifications]` - Discord/Telegram webhook settings
|
||||
- `[filters]` - Transaction matching patterns for notifications
|
||||
30
Dockerfile
30
Dockerfile
@@ -1,14 +1,19 @@
|
||||
FROM python:3.12-alpine as builder
|
||||
ARG POETRY_VERSION="1.7.1"
|
||||
FROM python:3.13-alpine AS builder
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache gcc libffi-dev musl-dev && \
|
||||
pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir -q poetry=="${POETRY_VERSION}"
|
||||
COPY . .
|
||||
RUN poetry config virtualenvs.create false && poetry build -f wheel
|
||||
|
||||
FROM python:3.12-alpine
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
uv sync --frozen --no-install-project --no-editable
|
||||
|
||||
COPY . /app
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv sync --frozen --no-editable --no-group dev
|
||||
|
||||
FROM python:3.13-alpine
|
||||
|
||||
LABEL org.opencontainers.image.source="https://github.com/elisiariocouto/leggen"
|
||||
LABEL org.opencontainers.image.authors="Elisiário Couto <elisiario@couto.io>"
|
||||
@@ -18,7 +23,8 @@ LABEL org.opencontainers.image.description="An Open Banking CLI"
|
||||
LABEL org.opencontainers.image.url="https://github.com/elisiariocouto/leggen"
|
||||
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/dist/ /app/
|
||||
RUN pip --no-cache-dir install leggen*.whl && \
|
||||
rm leggen*.whl
|
||||
ENTRYPOINT ["/usr/local/bin/leggen"]
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
COPY --from=builder --chown=app:app /app/.venv /app/.venv
|
||||
|
||||
ENTRYPOINT ["/app/.venv/bin/leggen"]
|
||||
|
||||
91
PROJECT.md
Normal file
91
PROJECT.md
Normal file
@@ -0,0 +1,91 @@
|
||||
# Leggen Web Transformation Project
|
||||
|
||||
## Overview
|
||||
Transform leggen from CLI-only to web application with FastAPI backend (`leggend`) and SvelteKit frontend (`leggen-web`).
|
||||
|
||||
## Progress Tracking
|
||||
|
||||
### ✅ Phase 1: FastAPI Backend (`leggend`)
|
||||
|
||||
#### 1.1 Core Structure
|
||||
- [x] Create directory structure (`leggend/`, `api/`, `services/`, etc.)
|
||||
- [x] Add FastAPI dependencies to pyproject.toml
|
||||
- [x] Create configuration management system
|
||||
- [x] Set up FastAPI main application
|
||||
- [x] Create Pydantic models for API responses
|
||||
|
||||
#### 1.2 API Endpoints
|
||||
- [x] Banks API (`/api/v1/banks/`)
|
||||
- [x] `GET /institutions` - List available banks
|
||||
- [x] `POST /connect` - Connect to bank
|
||||
- [x] `GET /status` - Bank connection status
|
||||
- [x] Accounts API (`/api/v1/accounts/`)
|
||||
- [x] `GET /` - List all accounts
|
||||
- [x] `GET /{id}/balances` - Account balances
|
||||
- [x] `GET /{id}/transactions` - Account transactions
|
||||
- [x] Sync API (`/api/v1/sync/`)
|
||||
- [x] `POST /` - Trigger manual sync
|
||||
- [x] `GET /status` - Sync status
|
||||
- [x] Notifications API (`/api/v1/notifications/`)
|
||||
- [x] `GET/POST/PUT /settings` - Manage notification settings
|
||||
|
||||
#### 1.3 Background Jobs
|
||||
- [x] Implement APScheduler for sync scheduling
|
||||
- [x] Replace Ofelia with internal Python scheduler
|
||||
- [x] Migrate existing sync logic from CLI
|
||||
|
||||
### ⏳ Phase 2: SvelteKit Frontend (`leggen-web`)
|
||||
|
||||
#### 2.1 Project Setup
|
||||
- [ ] Create SvelteKit project structure
|
||||
- [ ] Set up API client for backend communication
|
||||
- [ ] Design component architecture
|
||||
|
||||
#### 2.2 UI Components
|
||||
- [ ] Dashboard with account overview
|
||||
- [ ] Bank connection wizard
|
||||
- [ ] Transaction history and filtering
|
||||
- [ ] Settings management
|
||||
- [ ] Real-time sync status
|
||||
|
||||
### ✅ Phase 3: CLI Refactoring
|
||||
|
||||
#### 3.1 API Client Integration
|
||||
- [x] Create HTTP client for FastAPI calls
|
||||
- [x] Refactor existing commands to use APIs
|
||||
- [x] Maintain CLI user experience
|
||||
- [x] Add API URL configuration option
|
||||
|
||||
### ✅ Phase 4: Docker & Deployment
|
||||
|
||||
#### 4.1 Container Setup
|
||||
- [x] Create Dockerfile for `leggend` service
|
||||
- [x] Update docker-compose.yml with `leggend` service
|
||||
- [x] Remove Ofelia dependency (scheduler now internal)
|
||||
- [ ] Create Dockerfile for `leggen-web` (deferred - not implementing web UI yet)
|
||||
|
||||
## Current Status
|
||||
**Active Phase**: Phase 2 - CLI Integration Complete
|
||||
**Last Updated**: 2025-09-01
|
||||
**Completion**: ~80% (FastAPI backend and CLI refactoring complete)
|
||||
|
||||
## Next Steps (Future Enhancements)
|
||||
1. Implement SvelteKit web frontend
|
||||
2. Add real-time WebSocket support for sync status
|
||||
3. Implement user authentication and multi-user support
|
||||
4. Add more comprehensive error handling and logging
|
||||
5. Implement database migrations for schema changes
|
||||
|
||||
## Recent Achievements
|
||||
- ✅ Complete FastAPI backend with all major endpoints
|
||||
- ✅ Configurable background job scheduler (replaces Ofelia)
|
||||
- ✅ CLI successfully refactored to use API endpoints
|
||||
- ✅ Docker configuration updated for new architecture
|
||||
- ✅ Maintained backward compatibility and user experience
|
||||
|
||||
## Architecture Decisions
|
||||
- **FastAPI**: For high-performance async API backend
|
||||
- **APScheduler**: For internal job scheduling (replacing Ofelia)
|
||||
- **SvelteKit**: For modern, reactive frontend
|
||||
- **Existing Logic**: Reuse all business logic from current CLI commands
|
||||
- **Configuration**: Centralize in `leggend` service, maintain TOML compatibility
|
||||
320
README.md
320
README.md
@@ -1,45 +1,100 @@
|
||||
# 💲 leggen
|
||||
|
||||
An Open Banking CLI.
|
||||
An Open Banking CLI and API service for managing bank connections and transactions.
|
||||
|
||||
This tool aims to provide a simple way to connect to banks using the GoCardless Open Banking API.
|
||||
This tool provides both a **FastAPI backend service** (`leggend`) and a **command-line interface** (`leggen`) to connect to banks using the GoCardless Open Banking API.
|
||||
|
||||
Having a simple CLI tool to connect to banks and list transactions can be very useful for developers and companies that need to access bank data.
|
||||
**New in v0.6.11**: Web-ready architecture with FastAPI backend, enhanced CLI, and background job scheduling.
|
||||
|
||||
Having your bank data in a database, gives you the power to backup, analyze and create reports with your data.
|
||||
Having your bank data accessible through both CLI and REST API gives you the power to backup, analyze, create reports, and integrate with other applications.
|
||||
|
||||
## 🛠️ Technologies
|
||||
|
||||
### 🔌 API & Backend
|
||||
- [FastAPI](https://fastapi.tiangolo.com/): High-performance async API backend (`leggend` service)
|
||||
- [GoCardless Open Banking API](https://developer.gocardless.com/bank-account-data/overview): for connecting to banks
|
||||
- [APScheduler](https://apscheduler.readthedocs.io/): Background job scheduling with configurable cron
|
||||
|
||||
### 📦 Storage
|
||||
- [SQLite](https://www.sqlite.org): for storing transactions, simple and easy to use
|
||||
- [MongoDB](https://www.mongodb.com/docs/): alternative store for transactions, good balance between performance and query capabilities
|
||||
|
||||
### ⏰ Scheduling
|
||||
- [Ofelia](https://github.com/mcuadros/ofelia): for scheduling regular syncs with the database when using Docker
|
||||
|
||||
### 📊 Visualization
|
||||
- [NocoDB](https://github.com/nocodb/nocodb): for visualizing and querying transactions, a simple and easy to use interface for SQLite
|
||||
|
||||
## ✨ Features
|
||||
- Connect to banks using GoCardless Open Banking API
|
||||
- List all connected banks and their statuses
|
||||
- List balances of all connected accounts
|
||||
- List transactions for all connected accounts
|
||||
- Sync all transactions with a SQLite and/or MongoDB database
|
||||
- Visualize and query transactions using NocoDB
|
||||
- Schedule regular syncs with the database using Ofelia
|
||||
- Send notifications to Discord and/or Telegram when transactions match certain filters
|
||||
|
||||
## 🚀 Installation and Configuration
|
||||
### 🎯 Core Banking Features
|
||||
- Connect to banks using GoCardless Open Banking API (30+ EU countries)
|
||||
- List all connected banks and their connection statuses
|
||||
- View balances of all connected accounts
|
||||
- List and filter transactions across all accounts
|
||||
- Support for both booked and pending transactions
|
||||
|
||||
In order to use `leggen`, you need to create a GoCardless account. GoCardless is a service that provides access to Open Banking APIs. You can create an account at https://gocardless.com/bank-account-data/.
|
||||
### 🔄 Data Management
|
||||
- Sync all transactions with SQLite database
|
||||
- Background sync scheduling with configurable cron expressions
|
||||
- Automatic transaction deduplication and status tracking
|
||||
- Real-time sync status monitoring
|
||||
|
||||
After creating an account and getting your API keys, the best way is to use the [compose file](compose.yml). Open the file and adapt it to your needs.
|
||||
### 📡 API & Integration
|
||||
- **REST API**: Complete FastAPI backend with comprehensive endpoints
|
||||
- **CLI Interface**: Enhanced command-line tools with new options
|
||||
- **Health Checks**: Service monitoring and dependency management
|
||||
- **Auto-reload**: Development mode with file watching
|
||||
|
||||
### Example Configuration
|
||||
### 🔔 Notifications & Monitoring
|
||||
- Discord and Telegram notifications for filtered transactions
|
||||
- Configurable transaction filters (case-sensitive/insensitive)
|
||||
- Account expiry notifications and status alerts
|
||||
- Comprehensive logging and error handling
|
||||
|
||||
Create a configuration file at with the following content:
|
||||
### 📊 Visualization & Analysis
|
||||
- NocoDB integration for visual data exploration
|
||||
- Transaction statistics and reporting
|
||||
- Account balance tracking over time
|
||||
- Export capabilities for further analysis
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Prerequisites
|
||||
1. Create a GoCardless account at [https://gocardless.com/bank-account-data/](https://gocardless.com/bank-account-data/)
|
||||
2. Get your API credentials (key and secret)
|
||||
|
||||
### Installation Options
|
||||
|
||||
#### Option 1: Docker Compose (Recommended)
|
||||
The easiest way to get started is with Docker Compose:
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/elisiariocouto/leggen.git
|
||||
cd leggen
|
||||
|
||||
# Create your configuration
|
||||
mkdir -p leggen && cp config.example.toml leggen/config.toml
|
||||
# Edit leggen/config.toml with your GoCardless credentials
|
||||
|
||||
# Start all services
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
#### Option 2: Local Development
|
||||
For development or local installation:
|
||||
|
||||
```bash
|
||||
# Install with uv (recommended) or pip
|
||||
uv sync # or pip install -e .
|
||||
|
||||
# Start the API service
|
||||
uv run leggend --reload # Development mode with auto-reload
|
||||
|
||||
# Use the CLI (in another terminal)
|
||||
uv run leggen --help
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Create a configuration file at `~/.config/leggen/config.toml`:
|
||||
|
||||
```toml
|
||||
[gocardless]
|
||||
@@ -49,70 +104,219 @@ url = "https://bankaccountdata.gocardless.com/api/v2"
|
||||
|
||||
[database]
|
||||
sqlite = true
|
||||
mongodb = true
|
||||
|
||||
[database.mongodb]
|
||||
uri = "mongodb://localhost:27017"
|
||||
# Optional: Background sync scheduling
|
||||
[scheduler.sync]
|
||||
enabled = true
|
||||
hour = 3 # 3 AM
|
||||
minute = 0
|
||||
# cron = "0 3 * * *" # Alternative: use cron expression
|
||||
|
||||
# Optional: Discord notifications
|
||||
[notifications.discord]
|
||||
webhook = "https://discord.com/api/webhooks/..."
|
||||
enabled = true
|
||||
|
||||
# Optional: Telegram notifications
|
||||
[notifications.telegram]
|
||||
# See gist for telegram instructions
|
||||
# https://gist.github.com/nafiesl/4ad622f344cd1dc3bb1ecbe468ff9f8a
|
||||
token = "12345:abcdefghijklmnopqrstuvxwyz"
|
||||
chat-id = 12345
|
||||
token = "your-bot-token"
|
||||
chat_id = 12345
|
||||
enabled = true
|
||||
|
||||
# Optional: Transaction filters for notifications
|
||||
[filters.case-insensitive]
|
||||
filter1 = "company-name"
|
||||
salary = "salary"
|
||||
bills = "utility"
|
||||
```
|
||||
|
||||
### Running Leggen with Docker
|
||||
## 📖 Usage
|
||||
|
||||
After adapting the compose file, run the following command:
|
||||
### API Service (`leggend`)
|
||||
|
||||
Start the FastAPI backend service:
|
||||
|
||||
```bash
|
||||
$ docker compose up -d
|
||||
# Production mode
|
||||
leggend
|
||||
|
||||
# Development mode with auto-reload
|
||||
leggend --reload
|
||||
|
||||
# Custom host and port
|
||||
leggend --host 127.0.0.1 --port 8080
|
||||
```
|
||||
|
||||
The leggen container will exit, this is expected since you didn't connect any bank accounts yet.
|
||||
**API Documentation**: Visit `http://localhost:8000/docs` for interactive API documentation.
|
||||
|
||||
Run the following command and follow the instructions:
|
||||
### CLI Commands (`leggen`)
|
||||
|
||||
#### Basic Commands
|
||||
```bash
|
||||
# Check connection status
|
||||
leggen status
|
||||
|
||||
# Connect to a new bank
|
||||
leggen bank add
|
||||
|
||||
# View account balances
|
||||
leggen balances
|
||||
|
||||
# List recent transactions
|
||||
leggen transactions --limit 20
|
||||
|
||||
# View detailed transactions
|
||||
leggen transactions --full
|
||||
```
|
||||
|
||||
#### Sync Operations
|
||||
```bash
|
||||
# Start background sync
|
||||
leggen sync
|
||||
|
||||
# Synchronous sync (wait for completion)
|
||||
leggen sync --wait
|
||||
|
||||
# Force sync (override running sync)
|
||||
leggen sync --force --wait
|
||||
```
|
||||
|
||||
#### API Integration
|
||||
```bash
|
||||
# Use custom API URL
|
||||
leggen --api-url http://localhost:8080 status
|
||||
|
||||
# Set via environment variable
|
||||
export LEGGEND_API_URL=http://localhost:8080
|
||||
leggen status
|
||||
```
|
||||
|
||||
### Docker Usage
|
||||
|
||||
```bash
|
||||
$ docker compose run leggen bank add
|
||||
# Start all services
|
||||
docker compose up -d
|
||||
|
||||
# Connect to a bank
|
||||
docker compose run leggen bank add
|
||||
|
||||
# Run a sync
|
||||
docker compose run leggen sync --wait
|
||||
|
||||
# Check logs
|
||||
docker compose logs leggend
|
||||
```
|
||||
|
||||
To sync all transactions with the database, run the following command:
|
||||
## 🔌 API Endpoints
|
||||
|
||||
The FastAPI backend provides comprehensive REST endpoints:
|
||||
|
||||
### Banks & Connections
|
||||
- `GET /api/v1/banks/institutions?country=PT` - List available banks
|
||||
- `POST /api/v1/banks/connect` - Create bank connection
|
||||
- `GET /api/v1/banks/status` - Connection status
|
||||
- `GET /api/v1/banks/countries` - Supported countries
|
||||
|
||||
### Accounts & Balances
|
||||
- `GET /api/v1/accounts` - List all accounts
|
||||
- `GET /api/v1/accounts/{id}` - Account details
|
||||
- `GET /api/v1/accounts/{id}/balances` - Account balances
|
||||
- `GET /api/v1/accounts/{id}/transactions` - Account transactions
|
||||
|
||||
### Transactions
|
||||
- `GET /api/v1/transactions` - All transactions with filtering
|
||||
- `GET /api/v1/transactions/stats` - Transaction statistics
|
||||
|
||||
### Sync & Scheduling
|
||||
- `POST /api/v1/sync` - Trigger background sync
|
||||
- `POST /api/v1/sync/now` - Synchronous sync
|
||||
- `GET /api/v1/sync/status` - Sync status
|
||||
- `GET/PUT /api/v1/sync/scheduler` - Scheduler configuration
|
||||
|
||||
### Notifications
|
||||
- `GET/PUT /api/v1/notifications/settings` - Manage notifications
|
||||
- `POST /api/v1/notifications/test` - Test notifications
|
||||
|
||||
## 🛠️ Development
|
||||
|
||||
### Local Development Setup
|
||||
```bash
|
||||
# Clone and setup
|
||||
git clone https://github.com/elisiariocouto/leggen.git
|
||||
cd leggen
|
||||
|
||||
# Install dependencies
|
||||
uv sync
|
||||
|
||||
# Start API service with auto-reload
|
||||
uv run leggend --reload
|
||||
|
||||
# Use CLI commands
|
||||
uv run leggen status
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Run the comprehensive test suite with:
|
||||
|
||||
```bash
|
||||
$ docker compose run leggen sync
|
||||
# Run all tests
|
||||
uv run pytest
|
||||
|
||||
# Run unit tests only
|
||||
uv run pytest tests/unit/
|
||||
|
||||
# Run with verbose output
|
||||
uv run pytest tests/unit/ -v
|
||||
|
||||
# Run specific test files
|
||||
uv run pytest tests/unit/test_config.py -v
|
||||
uv run pytest tests/unit/test_scheduler.py -v
|
||||
uv run pytest tests/unit/test_api_banks.py -v
|
||||
|
||||
# Run tests by markers
|
||||
uv run pytest -m unit # Unit tests
|
||||
uv run pytest -m api # API endpoint tests
|
||||
uv run pytest -m cli # CLI tests
|
||||
```
|
||||
|
||||
## 👩🏫 Usage
|
||||
The test suite includes:
|
||||
- **Configuration management tests** - TOML config loading/saving
|
||||
- **API endpoint tests** - FastAPI route testing with mocked dependencies
|
||||
- **CLI API client tests** - HTTP client integration testing
|
||||
- **Background scheduler tests** - APScheduler job management
|
||||
- **Mock data and fixtures** - Realistic test data for banks, accounts, transactions
|
||||
|
||||
### Code Structure
|
||||
```
|
||||
$ leggen --help
|
||||
Usage: leggen [OPTIONS] COMMAND [ARGS]...
|
||||
leggen/ # CLI application
|
||||
├── commands/ # CLI command implementations
|
||||
├── utils/ # Shared utilities
|
||||
└── api_client.py # API client for leggend service
|
||||
|
||||
Leggen: An Open Banking CLI
|
||||
leggend/ # FastAPI backend service
|
||||
├── api/ # API routes and models
|
||||
├── services/ # Business logic
|
||||
├── background/ # Background job scheduler
|
||||
└── main.py # FastAPI application
|
||||
|
||||
Options:
|
||||
--version Show the version and exit.
|
||||
-c, --config FILE Path to TOML configuration file
|
||||
[env var: LEGGEN_CONFIG_FILE;
|
||||
default: ~/.config/leggen/config.toml]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Command Groups:
|
||||
bank Manage banks connections
|
||||
|
||||
Commands:
|
||||
balances List balances of all connected accounts
|
||||
status List all connected banks and their status
|
||||
sync Sync all transactions with database
|
||||
transactions List transactions
|
||||
tests/ # Test suite
|
||||
├── conftest.py # Shared test fixtures
|
||||
└── unit/ # Unit tests
|
||||
├── test_config.py # Configuration tests
|
||||
├── test_scheduler.py # Background scheduler tests
|
||||
├── test_api_banks.py # Banks API tests
|
||||
├── test_api_accounts.py # Accounts API tests
|
||||
└── test_api_client.py # CLI API client tests
|
||||
```
|
||||
|
||||
## ⚠️ Caveats
|
||||
- This project is still in early development, breaking changes may occur.
|
||||
### Contributing
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Make your changes with tests
|
||||
4. Submit a pull request
|
||||
|
||||
## ⚠️ Notes
|
||||
- This project is in active development
|
||||
- Web frontend planned for future releases
|
||||
- GoCardless API rate limits apply
|
||||
- Some banks may require additional authorization steps
|
||||
|
||||
39
compose.yml
39
compose.yml
@@ -1,12 +1,21 @@
|
||||
services:
|
||||
# Defaults to `sync` command.
|
||||
leggen:
|
||||
image: elisiariocouto/leggen:latest
|
||||
command: sync
|
||||
restart: "no"
|
||||
# FastAPI backend service
|
||||
leggend:
|
||||
build:
|
||||
context: .
|
||||
restart: "unless-stopped"
|
||||
ports:
|
||||
- "127.0.0.1:8000:8000"
|
||||
volumes:
|
||||
- "./leggen:/root/.config/leggen" # Default configuration file should be in this directory, named `config.toml`
|
||||
- "./db:/app"
|
||||
- "./leggen:/root/.config/leggen" # Configuration file directory
|
||||
- "./db:/app" # Database storage
|
||||
environment:
|
||||
- LEGGEN_CONFIG_FILE=/root/.config/leggen/config.toml
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
nocodb:
|
||||
image: nocodb/nocodb:latest
|
||||
@@ -17,20 +26,8 @@ services:
|
||||
ports:
|
||||
- "127.0.0.1:8080:8080"
|
||||
depends_on:
|
||||
- leggen
|
||||
|
||||
# Recommended: Run `leggen sync` every day.
|
||||
ofelia:
|
||||
image: mcuadros/ofelia:latest
|
||||
restart: "unless-stopped"
|
||||
depends_on:
|
||||
- leggen
|
||||
command: daemon --docker -f label=com.docker.compose.project=${COMPOSE_PROJECT_NAME}
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
labels:
|
||||
ofelia.job-run.leggen-sync.schedule: "0 0 3 * * *"
|
||||
ofelia.job-run.leggen-sync.container: ${COMPOSE_PROJECT_NAME}-leggen-1
|
||||
leggend:
|
||||
condition: service_healthy
|
||||
|
||||
# Optional: If you want to have a mongodb, uncomment the following lines
|
||||
# mongo:
|
||||
|
||||
24
frontend/.gitignore
vendored
Normal file
24
frontend/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
102
frontend/README.md
Normal file
102
frontend/README.md
Normal file
@@ -0,0 +1,102 @@
|
||||
# Leggen Frontend
|
||||
|
||||
A modern React dashboard for the Leggen Open Banking CLI tool. This frontend provides a user-friendly interface to view bank accounts, transactions, and balances.
|
||||
|
||||
## Features
|
||||
|
||||
- **Modern Dashboard**: Clean, responsive interface built with React and TypeScript
|
||||
- **Bank Accounts Overview**: View all connected bank accounts with real-time balances
|
||||
- **Transaction Management**: Browse, search, and filter transactions across all accounts
|
||||
- **Responsive Design**: Works seamlessly on desktop, tablet, and mobile devices
|
||||
- **Real-time Data**: Powered by React Query for efficient data fetching and caching
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js 18+ and npm
|
||||
- Leggen API server running on `localhost:8000`
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. **Install dependencies:**
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. **Start the development server:**
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
3. **Open your browser to:**
|
||||
```
|
||||
http://localhost:5173
|
||||
```
|
||||
|
||||
## Available Scripts
|
||||
|
||||
- `npm run dev` - Start development server
|
||||
- `npm run build` - Build for production
|
||||
- `npm run preview` - Preview production build
|
||||
- `npm run lint` - Run ESLint
|
||||
|
||||
## Architecture
|
||||
|
||||
### Key Technologies
|
||||
|
||||
- **React 18** - Modern React with hooks and concurrent features
|
||||
- **TypeScript** - Type-safe JavaScript development
|
||||
- **Vite** - Fast build tool and development server
|
||||
- **Tailwind CSS** - Utility-first CSS framework
|
||||
- **React Query** - Data fetching and caching
|
||||
- **Axios** - HTTP client for API calls
|
||||
- **Lucide React** - Modern icon library
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
src/
|
||||
├── components/ # React components
|
||||
│ ├── Dashboard.tsx # Main dashboard layout
|
||||
│ ├── AccountsOverview.tsx
|
||||
│ └── TransactionsList.tsx
|
||||
├── lib/ # Utilities and API client
|
||||
│ ├── api.ts # API client and endpoints
|
||||
│ └── utils.ts # Helper functions
|
||||
├── types/ # TypeScript type definitions
|
||||
│ └── api.ts # API response types
|
||||
└── App.tsx # Main application component
|
||||
```
|
||||
|
||||
## API Integration
|
||||
|
||||
The frontend connects to the Leggen API server running on `localhost:8000`. The API client handles:
|
||||
|
||||
- Account retrieval and management
|
||||
- Transaction fetching with filtering
|
||||
- Balance information
|
||||
- Error handling and loading states
|
||||
|
||||
## Configuration
|
||||
|
||||
The API base URL is configured in `src/lib/api.ts`. Update the `API_BASE_URL` constant if your Leggen server runs on a different port or host.
|
||||
|
||||
## Development
|
||||
|
||||
The dashboard is designed to work with the Leggen CLI tool's API endpoints. Make sure your Leggen server is running before starting the frontend development server.
|
||||
|
||||
### Adding New Features
|
||||
|
||||
1. Define TypeScript types in `src/types/api.ts`
|
||||
2. Add API methods to `src/lib/api.ts`
|
||||
3. Create React components in `src/components/`
|
||||
4. Use React Query for data fetching and state management
|
||||
|
||||
## Deployment
|
||||
|
||||
Build the application for production:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
The built files will be in the `dist/` directory, ready to be served by any static web server.
|
||||
23
frontend/eslint.config.js
Normal file
23
frontend/eslint.config.js
Normal file
@@ -0,0 +1,23 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import tseslint from 'typescript-eslint'
|
||||
import { globalIgnores } from 'eslint/config'
|
||||
|
||||
export default tseslint.config([
|
||||
globalIgnores(['dist']),
|
||||
{
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
extends: [
|
||||
js.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
reactHooks.configs['recommended-latest'],
|
||||
reactRefresh.configs.vite,
|
||||
],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
},
|
||||
},
|
||||
])
|
||||
13
frontend/index.html
Normal file
13
frontend/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Vite + React + TS</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
4904
frontend/package-lock.json
generated
Normal file
4904
frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
37
frontend/package.json
Normal file
37
frontend/package.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tailwindcss/forms": "^0.5.10",
|
||||
"@tanstack/react-query": "^5.87.1",
|
||||
"autoprefixer": "^10.4.21",
|
||||
"axios": "^1.11.0",
|
||||
"clsx": "^2.1.1",
|
||||
"lucide-react": "^0.542.0",
|
||||
"postcss": "^8.5.6",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1",
|
||||
"tailwindcss": "^3.4.17"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.33.0",
|
||||
"@types/react": "^19.1.10",
|
||||
"@types/react-dom": "^19.1.7",
|
||||
"@vitejs/plugin-react": "^5.0.0",
|
||||
"eslint": "^9.33.0",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.20",
|
||||
"globals": "^16.3.0",
|
||||
"typescript": "~5.8.3",
|
||||
"typescript-eslint": "^8.39.1",
|
||||
"vite": "^7.1.2"
|
||||
}
|
||||
}
|
||||
6
frontend/postcss.config.js
Normal file
6
frontend/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
1
frontend/public/vite.svg
Normal file
1
frontend/public/vite.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
1
frontend/src/App.css
Normal file
1
frontend/src/App.css
Normal file
@@ -0,0 +1 @@
|
||||
/* Additional styles if needed */
|
||||
23
frontend/src/App.tsx
Normal file
23
frontend/src/App.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import Dashboard from './components/Dashboard';
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
refetchOnWindowFocus: false,
|
||||
retry: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
function App() {
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<div className="min-h-screen bg-gray-50">
|
||||
<Dashboard />
|
||||
</div>
|
||||
</QueryClientProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
1
frontend/src/assets/react.svg
Normal file
1
frontend/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
181
frontend/src/components/AccountsOverview.tsx
Normal file
181
frontend/src/components/AccountsOverview.tsx
Normal file
@@ -0,0 +1,181 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import {
|
||||
CreditCard,
|
||||
TrendingUp,
|
||||
TrendingDown,
|
||||
Building2,
|
||||
RefreshCw,
|
||||
AlertCircle
|
||||
} from 'lucide-react';
|
||||
import { apiClient } from '../lib/api';
|
||||
import { formatCurrency, formatDate } from '../lib/utils';
|
||||
import LoadingSpinner from './LoadingSpinner';
|
||||
import type { Account, Balance } from '../types/api';
|
||||
|
||||
export default function AccountsOverview() {
|
||||
const {
|
||||
data: accounts,
|
||||
isLoading: accountsLoading,
|
||||
error: accountsError,
|
||||
refetch: refetchAccounts
|
||||
} = useQuery<Account[]>({
|
||||
queryKey: ['accounts'],
|
||||
queryFn: apiClient.getAccounts,
|
||||
});
|
||||
|
||||
const {
|
||||
data: balances
|
||||
} = useQuery<Balance[]>({
|
||||
queryKey: ['balances'],
|
||||
queryFn: () => apiClient.getBalances(),
|
||||
});
|
||||
|
||||
if (accountsLoading) {
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow">
|
||||
<LoadingSpinner message="Loading accounts..." />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (accountsError) {
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<div className="flex items-center justify-center text-center">
|
||||
<div>
|
||||
<AlertCircle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-2">Failed to load accounts</h3>
|
||||
<p className="text-gray-600 mb-4">
|
||||
Unable to connect to the Leggen API. Make sure the server is running on localhost:8000.
|
||||
</p>
|
||||
<button
|
||||
onClick={() => refetchAccounts()}
|
||||
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const totalBalance = accounts?.reduce((sum, account) => sum + (account.balance || 0), 0) || 0;
|
||||
const totalAccounts = accounts?.length || 0;
|
||||
const uniqueBanks = new Set(accounts?.map(acc => acc.bank_name) || []).size;
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Summary Cards */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-3 gap-6">
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<p className="text-sm font-medium text-gray-600">Total Balance</p>
|
||||
<p className="text-2xl font-bold text-gray-900">
|
||||
{formatCurrency(totalBalance)}
|
||||
</p>
|
||||
</div>
|
||||
<div className="p-3 bg-green-100 rounded-full">
|
||||
<TrendingUp className="h-6 w-6 text-green-600" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<p className="text-sm font-medium text-gray-600">Total Accounts</p>
|
||||
<p className="text-2xl font-bold text-gray-900">{totalAccounts}</p>
|
||||
</div>
|
||||
<div className="p-3 bg-blue-100 rounded-full">
|
||||
<CreditCard className="h-6 w-6 text-blue-600" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<p className="text-sm font-medium text-gray-600">Connected Banks</p>
|
||||
<p className="text-2xl font-bold text-gray-900">{uniqueBanks}</p>
|
||||
</div>
|
||||
<div className="p-3 bg-purple-100 rounded-full">
|
||||
<Building2 className="h-6 w-6 text-purple-600" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Accounts List */}
|
||||
<div className="bg-white rounded-lg shadow">
|
||||
<div className="px-6 py-4 border-b border-gray-200">
|
||||
<h3 className="text-lg font-medium text-gray-900">Bank Accounts</h3>
|
||||
<p className="text-sm text-gray-600">Manage your connected bank accounts</p>
|
||||
</div>
|
||||
|
||||
{!accounts || accounts.length === 0 ? (
|
||||
<div className="p-6 text-center">
|
||||
<CreditCard className="h-12 w-12 text-gray-400 mx-auto mb-4" />
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-2">No accounts found</h3>
|
||||
<p className="text-gray-600">
|
||||
Connect your first bank account to get started with Leggen.
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="divide-y divide-gray-200">
|
||||
{accounts.map((account) => {
|
||||
const accountBalance = balances?.find(b => b.account_id === account.id);
|
||||
const balance = account.balance || accountBalance?.balance_amount || 0;
|
||||
const isPositive = balance >= 0;
|
||||
|
||||
return (
|
||||
<div key={account.id} className="p-6 hover:bg-gray-50 transition-colors">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center space-x-4">
|
||||
<div className="p-3 bg-gray-100 rounded-full">
|
||||
<Building2 className="h-6 w-6 text-gray-600" />
|
||||
</div>
|
||||
<div>
|
||||
<h4 className="text-lg font-medium text-gray-900">
|
||||
{account.name}
|
||||
</h4>
|
||||
<p className="text-sm text-gray-600">
|
||||
{account.bank_name} • {account.account_type}
|
||||
</p>
|
||||
{account.iban && (
|
||||
<p className="text-xs text-gray-500 mt-1">
|
||||
IBAN: {account.iban}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="text-right">
|
||||
<div className="flex items-center space-x-2">
|
||||
{isPositive ? (
|
||||
<TrendingUp className="h-4 w-4 text-green-500" />
|
||||
) : (
|
||||
<TrendingDown className="h-4 w-4 text-red-500" />
|
||||
)}
|
||||
<p className={`text-lg font-semibold ${
|
||||
isPositive ? 'text-green-600' : 'text-red-600'
|
||||
}`}>
|
||||
{formatCurrency(balance, account.currency)}
|
||||
</p>
|
||||
</div>
|
||||
<p className="text-sm text-gray-500">
|
||||
Updated {formatDate(account.updated_at)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
151
frontend/src/components/Dashboard.tsx
Normal file
151
frontend/src/components/Dashboard.tsx
Normal file
@@ -0,0 +1,151 @@
|
||||
import { useState } from 'react';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import {
|
||||
CreditCard,
|
||||
TrendingUp,
|
||||
Activity,
|
||||
Menu,
|
||||
X,
|
||||
Home,
|
||||
List,
|
||||
BarChart3
|
||||
} from 'lucide-react';
|
||||
import { apiClient } from '../lib/api';
|
||||
import AccountsOverview from './AccountsOverview';
|
||||
import TransactionsList from './TransactionsList';
|
||||
import ErrorBoundary from './ErrorBoundary';
|
||||
import { cn } from '../lib/utils';
|
||||
import type { Account } from '../types/api';
|
||||
|
||||
type TabType = 'overview' | 'transactions' | 'analytics';
|
||||
|
||||
export default function Dashboard() {
|
||||
const [activeTab, setActiveTab] = useState<TabType>('overview');
|
||||
const [sidebarOpen, setSidebarOpen] = useState(false);
|
||||
|
||||
const { data: accounts } = useQuery<Account[]>({
|
||||
queryKey: ['accounts'],
|
||||
queryFn: apiClient.getAccounts,
|
||||
});
|
||||
|
||||
const navigation = [
|
||||
{ name: 'Overview', icon: Home, id: 'overview' as TabType },
|
||||
{ name: 'Transactions', icon: List, id: 'transactions' as TabType },
|
||||
{ name: 'Analytics', icon: BarChart3, id: 'analytics' as TabType },
|
||||
];
|
||||
|
||||
const totalBalance = accounts?.reduce((sum, account) => sum + (account.balance || 0), 0) || 0;
|
||||
|
||||
return (
|
||||
<div className="flex h-screen bg-gray-100">
|
||||
{/* Sidebar */}
|
||||
<div className={cn(
|
||||
"fixed inset-y-0 left-0 z-50 w-64 bg-white shadow-lg transform transition-transform duration-300 ease-in-out lg:translate-x-0 lg:static lg:inset-0",
|
||||
sidebarOpen ? "translate-x-0" : "-translate-x-full"
|
||||
)}>
|
||||
<div className="flex items-center justify-between h-16 px-6 border-b border-gray-200">
|
||||
<div className="flex items-center space-x-2">
|
||||
<CreditCard className="h-8 w-8 text-blue-600" />
|
||||
<h1 className="text-xl font-bold text-gray-900">Leggen</h1>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => setSidebarOpen(false)}
|
||||
className="lg:hidden p-1 rounded-md text-gray-400 hover:text-gray-500"
|
||||
>
|
||||
<X className="h-6 w-6" />
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<nav className="px-6 py-4">
|
||||
<div className="space-y-1">
|
||||
{navigation.map((item) => (
|
||||
<button
|
||||
key={item.id}
|
||||
onClick={() => {
|
||||
setActiveTab(item.id);
|
||||
setSidebarOpen(false);
|
||||
}}
|
||||
className={cn(
|
||||
"flex items-center w-full px-3 py-2 text-sm font-medium rounded-md transition-colors",
|
||||
activeTab === item.id
|
||||
? "bg-blue-100 text-blue-700"
|
||||
: "text-gray-700 hover:text-gray-900 hover:bg-gray-100"
|
||||
)}
|
||||
>
|
||||
<item.icon className="mr-3 h-5 w-5" />
|
||||
{item.name}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
{/* Account Summary in Sidebar */}
|
||||
<div className="px-6 py-4 border-t border-gray-200 mt-auto">
|
||||
<div className="bg-gray-50 rounded-lg p-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm font-medium text-gray-600">Total Balance</span>
|
||||
<TrendingUp className="h-4 w-4 text-green-500" />
|
||||
</div>
|
||||
<p className="text-2xl font-bold text-gray-900 mt-1">
|
||||
{new Intl.NumberFormat('en-US', {
|
||||
style: 'currency',
|
||||
currency: 'EUR',
|
||||
}).format(totalBalance)}
|
||||
</p>
|
||||
<p className="text-sm text-gray-500 mt-1">
|
||||
{accounts?.length || 0} accounts
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Overlay for mobile */}
|
||||
{sidebarOpen && (
|
||||
<div
|
||||
className="fixed inset-0 z-40 bg-gray-600 bg-opacity-75 lg:hidden"
|
||||
onClick={() => setSidebarOpen(false)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Main content */}
|
||||
<div className="flex flex-col flex-1 overflow-hidden">
|
||||
{/* Header */}
|
||||
<header className="bg-white shadow-sm border-b border-gray-200">
|
||||
<div className="flex items-center justify-between h-16 px-6">
|
||||
<div className="flex items-center">
|
||||
<button
|
||||
onClick={() => setSidebarOpen(true)}
|
||||
className="lg:hidden p-1 rounded-md text-gray-400 hover:text-gray-500"
|
||||
>
|
||||
<Menu className="h-6 w-6" />
|
||||
</button>
|
||||
<h2 className="text-lg font-semibold text-gray-900 lg:ml-0 ml-4">
|
||||
{navigation.find(item => item.id === activeTab)?.name}
|
||||
</h2>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2">
|
||||
<div className="flex items-center space-x-1">
|
||||
<Activity className="h-4 w-4 text-green-500" />
|
||||
<span className="text-sm text-gray-600">Connected</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
{/* Main content area */}
|
||||
<main className="flex-1 overflow-y-auto p-6">
|
||||
<ErrorBoundary>
|
||||
{activeTab === 'overview' && <AccountsOverview />}
|
||||
{activeTab === 'transactions' && <TransactionsList />}
|
||||
{activeTab === 'analytics' && (
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-4">Analytics</h3>
|
||||
<p className="text-gray-600">Analytics dashboard coming soon...</p>
|
||||
</div>
|
||||
)}
|
||||
</ErrorBoundary>
|
||||
</main>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
84
frontend/src/components/ErrorBoundary.tsx
Normal file
84
frontend/src/components/ErrorBoundary.tsx
Normal file
@@ -0,0 +1,84 @@
|
||||
import { Component } from 'react';
|
||||
import type { ErrorInfo, ReactNode } from 'react';
|
||||
import { AlertTriangle, RefreshCw } from 'lucide-react';
|
||||
|
||||
interface Props {
|
||||
children: ReactNode;
|
||||
fallback?: ReactNode;
|
||||
}
|
||||
|
||||
interface State {
|
||||
hasError: boolean;
|
||||
error?: Error;
|
||||
errorInfo?: ErrorInfo;
|
||||
}
|
||||
|
||||
class ErrorBoundary extends Component<Props, State> {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
this.state = { hasError: false };
|
||||
}
|
||||
|
||||
static getDerivedStateFromError(error: Error): State {
|
||||
return { hasError: true, error };
|
||||
}
|
||||
|
||||
componentDidCatch(error: Error, errorInfo: ErrorInfo) {
|
||||
console.error('ErrorBoundary caught an error:', error, errorInfo);
|
||||
this.setState({ error, errorInfo });
|
||||
}
|
||||
|
||||
handleReset = () => {
|
||||
this.setState({ hasError: false, error: undefined, errorInfo: undefined });
|
||||
};
|
||||
|
||||
render() {
|
||||
if (this.state.hasError) {
|
||||
if (this.props.fallback) {
|
||||
return this.props.fallback;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<div className="flex items-center justify-center text-center">
|
||||
<div>
|
||||
<AlertTriangle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-2">Something went wrong</h3>
|
||||
<p className="text-gray-600 mb-4">
|
||||
An error occurred while rendering this component. Please try refreshing or check the console for more details.
|
||||
</p>
|
||||
|
||||
{this.state.error && (
|
||||
<div className="bg-red-50 border border-red-200 rounded-md p-3 mb-4 text-left">
|
||||
<p className="text-sm font-mono text-red-800">
|
||||
<strong>Error:</strong> {this.state.error.message}
|
||||
</p>
|
||||
{this.state.error.stack && (
|
||||
<details className="mt-2">
|
||||
<summary className="text-sm text-red-600 cursor-pointer">Stack trace</summary>
|
||||
<pre className="text-xs text-red-700 mt-1 whitespace-pre-wrap">
|
||||
{this.state.error.stack}
|
||||
</pre>
|
||||
</details>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<button
|
||||
onClick={this.handleReset}
|
||||
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Try Again
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return this.props.children;
|
||||
}
|
||||
}
|
||||
|
||||
export default ErrorBoundary;
|
||||
16
frontend/src/components/LoadingSpinner.tsx
Normal file
16
frontend/src/components/LoadingSpinner.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
import { RefreshCw } from 'lucide-react';
|
||||
|
||||
interface LoadingSpinnerProps {
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export default function LoadingSpinner({ message = 'Loading...' }: LoadingSpinnerProps) {
|
||||
return (
|
||||
<div className="flex items-center justify-center p-8">
|
||||
<div className="text-center">
|
||||
<RefreshCw className="h-8 w-8 animate-spin text-blue-600 mx-auto mb-2" />
|
||||
<p className="text-gray-600 text-sm">{message}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
309
frontend/src/components/TransactionsList.tsx
Normal file
309
frontend/src/components/TransactionsList.tsx
Normal file
@@ -0,0 +1,309 @@
|
||||
import { useState } from 'react';
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import {
|
||||
Filter,
|
||||
Search,
|
||||
TrendingUp,
|
||||
TrendingDown,
|
||||
Calendar,
|
||||
RefreshCw,
|
||||
AlertCircle,
|
||||
X
|
||||
} from 'lucide-react';
|
||||
import { apiClient } from '../lib/api';
|
||||
import { formatCurrency, formatDate } from '../lib/utils';
|
||||
import LoadingSpinner from './LoadingSpinner';
|
||||
import type { Account, Transaction } from '../types/api';
|
||||
|
||||
export default function TransactionsList() {
|
||||
const [searchTerm, setSearchTerm] = useState('');
|
||||
const [selectedAccount, setSelectedAccount] = useState<string>('');
|
||||
const [startDate, setStartDate] = useState('');
|
||||
const [endDate, setEndDate] = useState('');
|
||||
const [showFilters, setShowFilters] = useState(false);
|
||||
|
||||
const {
|
||||
data: accounts
|
||||
} = useQuery<Account[]>({
|
||||
queryKey: ['accounts'],
|
||||
queryFn: apiClient.getAccounts,
|
||||
});
|
||||
|
||||
const {
|
||||
data: transactions,
|
||||
isLoading: transactionsLoading,
|
||||
error: transactionsError,
|
||||
refetch: refetchTransactions
|
||||
} = useQuery<Transaction[]>({
|
||||
queryKey: ['transactions', selectedAccount, startDate, endDate],
|
||||
queryFn: () => apiClient.getTransactions({
|
||||
accountId: selectedAccount || undefined,
|
||||
startDate: startDate || undefined,
|
||||
endDate: endDate || undefined,
|
||||
}),
|
||||
});
|
||||
|
||||
const filteredTransactions = (transactions || []).filter(transaction => {
|
||||
// Additional validation (API client should have already filtered out invalid ones)
|
||||
if (!transaction || !transaction.account_id) {
|
||||
console.warn('Invalid transaction found after API filtering:', transaction);
|
||||
return false;
|
||||
}
|
||||
|
||||
const description = transaction.description || '';
|
||||
const creditorName = transaction.creditor_name || '';
|
||||
const debtorName = transaction.debtor_name || '';
|
||||
const reference = transaction.reference || '';
|
||||
|
||||
const matchesSearch = searchTerm === '' ||
|
||||
description.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||
creditorName.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||
debtorName.toLowerCase().includes(searchTerm.toLowerCase()) ||
|
||||
reference.toLowerCase().includes(searchTerm.toLowerCase());
|
||||
|
||||
return matchesSearch;
|
||||
});
|
||||
|
||||
const clearFilters = () => {
|
||||
setSearchTerm('');
|
||||
setSelectedAccount('');
|
||||
setStartDate('');
|
||||
setEndDate('');
|
||||
};
|
||||
|
||||
const hasActiveFilters = searchTerm || selectedAccount || startDate || endDate;
|
||||
|
||||
if (transactionsLoading) {
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow">
|
||||
<LoadingSpinner message="Loading transactions..." />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (transactionsError) {
|
||||
return (
|
||||
<div className="bg-white rounded-lg shadow p-6">
|
||||
<div className="flex items-center justify-center text-center">
|
||||
<div>
|
||||
<AlertCircle className="h-12 w-12 text-red-400 mx-auto mb-4" />
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-2">Failed to load transactions</h3>
|
||||
<p className="text-gray-600 mb-4">
|
||||
Unable to fetch transactions from the Leggen API.
|
||||
</p>
|
||||
<button
|
||||
onClick={() => refetchTransactions()}
|
||||
className="inline-flex items-center px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Retry
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Filters */}
|
||||
<div className="bg-white rounded-lg shadow">
|
||||
<div className="px-6 py-4 border-b border-gray-200">
|
||||
<div className="flex items-center justify-between">
|
||||
<h3 className="text-lg font-medium text-gray-900">Transactions</h3>
|
||||
<div className="flex items-center space-x-2">
|
||||
{hasActiveFilters && (
|
||||
<button
|
||||
onClick={clearFilters}
|
||||
className="inline-flex items-center px-3 py-1 text-sm bg-gray-100 text-gray-700 rounded-full hover:bg-gray-200 transition-colors"
|
||||
>
|
||||
<X className="h-3 w-3 mr-1" />
|
||||
Clear filters
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
onClick={() => setShowFilters(!showFilters)}
|
||||
className="inline-flex items-center px-3 py-2 bg-blue-100 text-blue-700 rounded-md hover:bg-blue-200 transition-colors"
|
||||
>
|
||||
<Filter className="h-4 w-4 mr-2" />
|
||||
Filters
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{showFilters && (
|
||||
<div className="px-6 py-4 border-b border-gray-200 bg-gray-50">
|
||||
<div className="grid grid-cols-1 md:grid-cols-4 gap-4">
|
||||
{/* Search */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Search
|
||||
</label>
|
||||
<div className="relative">
|
||||
<Search className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-gray-400" />
|
||||
<input
|
||||
type="text"
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
placeholder="Description, name, reference..."
|
||||
className="pl-10 pr-3 py-2 w-full border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Account Filter */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Account
|
||||
</label>
|
||||
<select
|
||||
value={selectedAccount}
|
||||
onChange={(e) => setSelectedAccount(e.target.value)}
|
||||
className="w-full border border-gray-300 rounded-md px-3 py-2 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||
>
|
||||
<option value="">All accounts</option>
|
||||
{accounts?.map((account) => (
|
||||
<option key={account.id} value={account.id}>
|
||||
{account.name} ({account.bank_name})
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{/* Start Date */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
Start Date
|
||||
</label>
|
||||
<div className="relative">
|
||||
<Calendar className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-gray-400" />
|
||||
<input
|
||||
type="date"
|
||||
value={startDate}
|
||||
onChange={(e) => setStartDate(e.target.value)}
|
||||
className="pl-10 pr-3 py-2 w-full border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* End Date */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium text-gray-700 mb-1">
|
||||
End Date
|
||||
</label>
|
||||
<div className="relative">
|
||||
<Calendar className="absolute left-3 top-1/2 transform -translate-y-1/2 h-4 w-4 text-gray-400" />
|
||||
<input
|
||||
type="date"
|
||||
value={endDate}
|
||||
onChange={(e) => setEndDate(e.target.value)}
|
||||
className="pl-10 pr-3 py-2 w-full border border-gray-300 rounded-md focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Results Summary */}
|
||||
<div className="px-6 py-3 bg-gray-50 border-b border-gray-200">
|
||||
<p className="text-sm text-gray-600">
|
||||
Showing {filteredTransactions.length} transaction{filteredTransactions.length !== 1 ? 's' : ''}
|
||||
{selectedAccount && accounts && (
|
||||
<span className="ml-1">
|
||||
for {accounts.find(acc => acc.id === selectedAccount)?.name}
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Transactions List */}
|
||||
{filteredTransactions.length === 0 ? (
|
||||
<div className="bg-white rounded-lg shadow p-6 text-center">
|
||||
<div className="text-gray-400 mb-4">
|
||||
<TrendingUp className="h-12 w-12 mx-auto" />
|
||||
</div>
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-2">No transactions found</h3>
|
||||
<p className="text-gray-600">
|
||||
{hasActiveFilters ?
|
||||
"Try adjusting your filters to see more results." :
|
||||
"No transactions are available for the selected criteria."
|
||||
}
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="bg-white rounded-lg shadow divide-y divide-gray-200">
|
||||
{filteredTransactions.map((transaction) => {
|
||||
const account = accounts?.find(acc => acc.id === transaction.account_id);
|
||||
const isPositive = transaction.amount > 0;
|
||||
|
||||
return (
|
||||
<div key={transaction.internal_transaction_id || `${transaction.account_id}-${transaction.date}-${transaction.amount}`} className="p-6 hover:bg-gray-50 transition-colors">
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex-1">
|
||||
<div className="flex items-start space-x-3">
|
||||
<div className={`p-2 rounded-full ${
|
||||
isPositive ? 'bg-green-100' : 'bg-red-100'
|
||||
}`}>
|
||||
{isPositive ? (
|
||||
<TrendingUp className="h-4 w-4 text-green-600" />
|
||||
) : (
|
||||
<TrendingDown className="h-4 w-4 text-red-600" />
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex-1">
|
||||
<h4 className="text-sm font-medium text-gray-900 mb-1">
|
||||
{transaction.description}
|
||||
</h4>
|
||||
|
||||
<div className="text-xs text-gray-500 space-y-1">
|
||||
{account && (
|
||||
<p>{account.name} • {account.bank_name}</p>
|
||||
)}
|
||||
|
||||
{(transaction.creditor_name || transaction.debtor_name) && (
|
||||
<p>
|
||||
{isPositive ? 'From: ' : 'To: '}
|
||||
{transaction.creditor_name || transaction.debtor_name}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{transaction.reference && (
|
||||
<p>Ref: {transaction.reference}</p>
|
||||
)}
|
||||
|
||||
{transaction.internal_transaction_id && (
|
||||
<p>ID: {transaction.internal_transaction_id}</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="text-right ml-4">
|
||||
<p className={`text-lg font-semibold ${
|
||||
isPositive ? 'text-green-600' : 'text-red-600'
|
||||
}`}>
|
||||
{isPositive ? '+' : ''}{formatCurrency(transaction.amount, transaction.currency)}
|
||||
</p>
|
||||
<p className="text-sm text-gray-500">
|
||||
{transaction.date ? formatDate(transaction.date) : 'No date'}
|
||||
</p>
|
||||
{transaction.booking_date && transaction.booking_date !== transaction.date && (
|
||||
<p className="text-xs text-gray-400">
|
||||
Booked: {formatDate(transaction.booking_date)}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
3
frontend/src/index.css
Normal file
3
frontend/src/index.css
Normal file
@@ -0,0 +1,3 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
67
frontend/src/lib/api.ts
Normal file
67
frontend/src/lib/api.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import axios from 'axios';
|
||||
import type { Account, Transaction, Balance, ApiResponse } from '../types/api';
|
||||
|
||||
const API_BASE_URL = import.meta.env.VITE_API_URL || 'http://localhost:8000/api/v1';
|
||||
|
||||
const api = axios.create({
|
||||
baseURL: API_BASE_URL,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
export const apiClient = {
|
||||
// Get all accounts
|
||||
getAccounts: async (): Promise<Account[]> => {
|
||||
const response = await api.get<ApiResponse<Account[]>>('/accounts');
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get account by ID
|
||||
getAccount: async (id: string): Promise<Account> => {
|
||||
const response = await api.get<ApiResponse<Account>>(`/accounts/${id}`);
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get all balances
|
||||
getBalances: async (): Promise<Balance[]> => {
|
||||
const response = await api.get<ApiResponse<Balance[]>>('/balances');
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get balances for specific account
|
||||
getAccountBalances: async (accountId: string): Promise<Balance[]> => {
|
||||
const response = await api.get<ApiResponse<Balance[]>>(`/accounts/${accountId}/balances`);
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get transactions with optional filters
|
||||
getTransactions: async (params?: {
|
||||
accountId?: string;
|
||||
startDate?: string;
|
||||
endDate?: string;
|
||||
page?: number;
|
||||
perPage?: number;
|
||||
search?: string;
|
||||
}): Promise<Transaction[]> => {
|
||||
const queryParams = new URLSearchParams();
|
||||
|
||||
if (params?.accountId) queryParams.append('account_id', params.accountId);
|
||||
if (params?.startDate) queryParams.append('start_date', params.startDate);
|
||||
if (params?.endDate) queryParams.append('end_date', params.endDate);
|
||||
if (params?.page) queryParams.append('page', params.page.toString());
|
||||
if (params?.perPage) queryParams.append('per_page', params.perPage.toString());
|
||||
if (params?.search) queryParams.append('search', params.search);
|
||||
|
||||
const response = await api.get<ApiResponse<Transaction[]>>(`/transactions?${queryParams.toString()}`);
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get transaction by ID
|
||||
getTransaction: async (id: string): Promise<Transaction> => {
|
||||
const response = await api.get<ApiResponse<Transaction>>(`/transactions/${id}`);
|
||||
return response.data.data;
|
||||
},
|
||||
};
|
||||
|
||||
export default apiClient;
|
||||
46
frontend/src/lib/utils.ts
Normal file
46
frontend/src/lib/utils.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { clsx, type ClassValue } from 'clsx';
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return clsx(inputs);
|
||||
}
|
||||
|
||||
export function formatCurrency(amount: number, currency: string = 'EUR'): string {
|
||||
return new Intl.NumberFormat('en-US', {
|
||||
style: 'currency',
|
||||
currency: currency,
|
||||
}).format(amount);
|
||||
}
|
||||
|
||||
export function formatDate(date: string): string {
|
||||
if (!date) return 'No date';
|
||||
|
||||
const parsedDate = new Date(date);
|
||||
if (isNaN(parsedDate.getTime())) {
|
||||
console.warn('Invalid date string:', date);
|
||||
return 'Invalid date';
|
||||
}
|
||||
|
||||
return new Intl.DateTimeFormat('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
}).format(parsedDate);
|
||||
}
|
||||
|
||||
export function formatDateTime(date: string): string {
|
||||
if (!date) return 'No date';
|
||||
|
||||
const parsedDate = new Date(date);
|
||||
if (isNaN(parsedDate.getTime())) {
|
||||
console.warn('Invalid date string:', date);
|
||||
return 'Invalid date';
|
||||
}
|
||||
|
||||
return new Intl.DateTimeFormat('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
}).format(parsedDate);
|
||||
}
|
||||
10
frontend/src/main.tsx
Normal file
10
frontend/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.tsx'
|
||||
|
||||
createRoot(document.getElementById('root')!).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
81
frontend/src/types/api.ts
Normal file
81
frontend/src/types/api.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
export interface Account {
|
||||
id: string;
|
||||
name: string;
|
||||
bank_name: string;
|
||||
account_type: string;
|
||||
currency: string;
|
||||
balance?: number;
|
||||
iban?: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
export interface Transaction {
|
||||
internal_transaction_id: string | null;
|
||||
account_id: string;
|
||||
amount: number;
|
||||
currency: string;
|
||||
description: string;
|
||||
date: string;
|
||||
status: string;
|
||||
// Optional fields that may be present in some transactions
|
||||
booking_date?: string;
|
||||
value_date?: string;
|
||||
creditor_name?: string;
|
||||
debtor_name?: string;
|
||||
reference?: string;
|
||||
category?: string;
|
||||
created_at?: string;
|
||||
updated_at?: string;
|
||||
}
|
||||
|
||||
// Type for raw transaction data from API (before sanitization)
|
||||
export interface RawTransaction {
|
||||
id?: string;
|
||||
internal_id?: string;
|
||||
account_id?: string;
|
||||
amount?: number;
|
||||
currency?: string;
|
||||
description?: string;
|
||||
transaction_date?: string;
|
||||
booking_date?: string;
|
||||
value_date?: string;
|
||||
creditor_name?: string;
|
||||
debtor_name?: string;
|
||||
reference?: string;
|
||||
category?: string;
|
||||
created_at?: string;
|
||||
updated_at?: string;
|
||||
}
|
||||
|
||||
export interface Balance {
|
||||
id: string;
|
||||
account_id: string;
|
||||
balance_amount: number;
|
||||
balance_type: string;
|
||||
currency: string;
|
||||
reference_date: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
export interface Bank {
|
||||
id: string;
|
||||
name: string;
|
||||
country_code: string;
|
||||
logo_url?: string;
|
||||
}
|
||||
|
||||
export interface ApiResponse<T> {
|
||||
data: T;
|
||||
message?: string;
|
||||
success: boolean;
|
||||
}
|
||||
|
||||
export interface PaginatedResponse<T> {
|
||||
data: T[];
|
||||
total: number;
|
||||
page: number;
|
||||
per_page: number;
|
||||
total_pages: number;
|
||||
}
|
||||
1
frontend/src/vite-env.d.ts
vendored
Normal file
1
frontend/src/vite-env.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/// <reference types="vite/client" />
|
||||
13
frontend/tailwind.config.js
Normal file
13
frontend/tailwind.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
export default {
|
||||
content: [
|
||||
"./index.html",
|
||||
"./src/**/*.{js,ts,jsx,tsx}",
|
||||
],
|
||||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [
|
||||
require('@tailwindcss/forms'),
|
||||
],
|
||||
};
|
||||
27
frontend/tsconfig.app.json
Normal file
27
frontend/tsconfig.app.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||
"target": "ES2022",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2022", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
7
frontend/tsconfig.json
Normal file
7
frontend/tsconfig.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"files": [],
|
||||
"references": [
|
||||
{ "path": "./tsconfig.app.json" },
|
||||
{ "path": "./tsconfig.node.json" }
|
||||
]
|
||||
}
|
||||
25
frontend/tsconfig.node.json
Normal file
25
frontend/tsconfig.node.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||
"target": "ES2023",
|
||||
"lib": ["ES2023"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"erasableSyntaxOnly": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
7
frontend/vite.config.ts
Normal file
7
frontend/vite.config.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { defineConfig } from 'vite'
|
||||
import react from '@vitejs/plugin-react'
|
||||
|
||||
// https://vite.dev/config/
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
})
|
||||
0
leggen/__init__.py
Normal file
0
leggen/__init__.py
Normal file
188
leggen/api_client.py
Normal file
188
leggen/api_client.py
Normal file
@@ -0,0 +1,188 @@
|
||||
import os
|
||||
import requests
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from leggen.utils.text import error
|
||||
|
||||
|
||||
class LeggendAPIClient:
|
||||
"""Client for communicating with the leggend FastAPI service"""
|
||||
|
||||
base_url: str
|
||||
|
||||
def __init__(self, base_url: Optional[str] = None):
|
||||
self.base_url = (
|
||||
base_url
|
||||
or os.environ.get("LEGGEND_API_URL", "http://localhost:8000")
|
||||
or "http://localhost:8000"
|
||||
)
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{"Content-Type": "application/json", "Accept": "application/json"}
|
||||
)
|
||||
|
||||
def _make_request(self, method: str, endpoint: str, **kwargs) -> Dict[str, Any]:
|
||||
"""Make HTTP request to the API"""
|
||||
url = urljoin(self.base_url, endpoint)
|
||||
|
||||
try:
|
||||
response = self.session.request(method, url, **kwargs)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.ConnectionError:
|
||||
error("Could not connect to leggend service. Is it running?")
|
||||
error(f"Trying to connect to: {self.base_url}")
|
||||
raise
|
||||
except requests.exceptions.HTTPError as e:
|
||||
error(f"API request failed: {e}")
|
||||
if response.text:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error(f"Error details: {error_data.get('detail', 'Unknown error')}")
|
||||
except Exception:
|
||||
error(f"Response: {response.text}")
|
||||
raise
|
||||
except Exception as e:
|
||||
error(f"Unexpected error: {e}")
|
||||
raise
|
||||
|
||||
def health_check(self) -> bool:
|
||||
"""Check if the leggend service is healthy"""
|
||||
try:
|
||||
response = self._make_request("GET", "/health")
|
||||
return response.get("status") == "healthy"
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# Bank endpoints
|
||||
def get_institutions(self, country: str = "PT") -> List[Dict[str, Any]]:
|
||||
"""Get bank institutions for a country"""
|
||||
response = self._make_request(
|
||||
"GET", "/api/v1/banks/institutions", params={"country": country}
|
||||
)
|
||||
return response.get("data", [])
|
||||
|
||||
def connect_to_bank(
|
||||
self, institution_id: str, redirect_url: str = "http://localhost:8000/"
|
||||
) -> Dict[str, Any]:
|
||||
"""Connect to a bank"""
|
||||
response = self._make_request(
|
||||
"POST",
|
||||
"/api/v1/banks/connect",
|
||||
json={"institution_id": institution_id, "redirect_url": redirect_url},
|
||||
)
|
||||
return response.get("data", {})
|
||||
|
||||
def get_bank_status(self) -> List[Dict[str, Any]]:
|
||||
"""Get bank connection status"""
|
||||
response = self._make_request("GET", "/api/v1/banks/status")
|
||||
return response.get("data", [])
|
||||
|
||||
def get_supported_countries(self) -> List[Dict[str, Any]]:
|
||||
"""Get supported countries"""
|
||||
response = self._make_request("GET", "/api/v1/banks/countries")
|
||||
return response.get("data", [])
|
||||
|
||||
# Account endpoints
|
||||
def get_accounts(self) -> List[Dict[str, Any]]:
|
||||
"""Get all accounts"""
|
||||
response = self._make_request("GET", "/api/v1/accounts")
|
||||
return response.get("data", [])
|
||||
|
||||
def get_account_details(self, account_id: str) -> Dict[str, Any]:
|
||||
"""Get account details"""
|
||||
response = self._make_request("GET", f"/api/v1/accounts/{account_id}")
|
||||
return response.get("data", {})
|
||||
|
||||
def get_account_balances(self, account_id: str) -> List[Dict[str, Any]]:
|
||||
"""Get account balances"""
|
||||
response = self._make_request("GET", f"/api/v1/accounts/{account_id}/balances")
|
||||
return response.get("data", [])
|
||||
|
||||
def get_account_transactions(
|
||||
self, account_id: str, limit: int = 100, summary_only: bool = False
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get account transactions"""
|
||||
response = self._make_request(
|
||||
"GET",
|
||||
f"/api/v1/accounts/{account_id}/transactions",
|
||||
params={"limit": limit, "summary_only": summary_only},
|
||||
)
|
||||
return response.get("data", [])
|
||||
|
||||
# Transaction endpoints
|
||||
def get_all_transactions(
|
||||
self, limit: int = 100, summary_only: bool = True, **filters
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get all transactions with optional filters"""
|
||||
params = {"limit": limit, "summary_only": summary_only}
|
||||
params.update(filters)
|
||||
|
||||
response = self._make_request("GET", "/api/v1/transactions", params=params)
|
||||
return response.get("data", [])
|
||||
|
||||
def get_transaction_stats(
|
||||
self, days: int = 30, account_id: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Get transaction statistics"""
|
||||
params: Dict[str, Union[int, str]] = {"days": days}
|
||||
if account_id:
|
||||
params["account_id"] = account_id
|
||||
|
||||
response = self._make_request(
|
||||
"GET", "/api/v1/transactions/stats", params=params
|
||||
)
|
||||
return response.get("data", {})
|
||||
|
||||
# Sync endpoints
|
||||
def get_sync_status(self) -> Dict[str, Any]:
|
||||
"""Get sync status"""
|
||||
response = self._make_request("GET", "/api/v1/sync/status")
|
||||
return response.get("data", {})
|
||||
|
||||
def trigger_sync(
|
||||
self, account_ids: Optional[List[str]] = None, force: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""Trigger a sync"""
|
||||
data: Dict[str, Union[bool, List[str]]] = {"force": force}
|
||||
if account_ids:
|
||||
data["account_ids"] = account_ids
|
||||
|
||||
response = self._make_request("POST", "/api/v1/sync", json=data)
|
||||
return response.get("data", {})
|
||||
|
||||
def sync_now(
|
||||
self, account_ids: Optional[List[str]] = None, force: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""Run sync synchronously"""
|
||||
data: Dict[str, Union[bool, List[str]]] = {"force": force}
|
||||
if account_ids:
|
||||
data["account_ids"] = account_ids
|
||||
|
||||
response = self._make_request("POST", "/api/v1/sync/now", json=data)
|
||||
return response.get("data", {})
|
||||
|
||||
def get_scheduler_config(self) -> Dict[str, Any]:
|
||||
"""Get scheduler configuration"""
|
||||
response = self._make_request("GET", "/api/v1/sync/scheduler")
|
||||
return response.get("data", {})
|
||||
|
||||
def update_scheduler_config(
|
||||
self,
|
||||
enabled: bool = True,
|
||||
hour: int = 3,
|
||||
minute: int = 0,
|
||||
cron: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""Update scheduler configuration"""
|
||||
data: Dict[str, Union[bool, int, str]] = {
|
||||
"enabled": enabled,
|
||||
"hour": hour,
|
||||
"minute": minute,
|
||||
}
|
||||
if cron:
|
||||
data["cron"] = cron
|
||||
|
||||
response = self._make_request("PUT", "/api/v1/sync/scheduler", json=data)
|
||||
return response.get("data", {})
|
||||
@@ -1,7 +1,7 @@
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
from leggen.utils.network import get
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import datefmt, print_table
|
||||
|
||||
|
||||
@@ -11,36 +11,33 @@ def balances(ctx: click.Context):
|
||||
"""
|
||||
List balances of all connected accounts
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
res = get(ctx, "/requisitions/")
|
||||
accounts = set()
|
||||
for r in res.get("results", []):
|
||||
accounts.update(r.get("accounts", []))
|
||||
# Check if leggend service is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
accounts = api_client.get_accounts()
|
||||
|
||||
all_balances = []
|
||||
for account in accounts:
|
||||
account_ballances = get(ctx, f"/accounts/{account}/balances/").get(
|
||||
"balances", []
|
||||
)
|
||||
for balance in account_ballances:
|
||||
balance_amount = balance["balanceAmount"]
|
||||
amount = round(float(balance_amount["amount"]), 2)
|
||||
symbol = (
|
||||
"€"
|
||||
if balance_amount["currency"] == "EUR"
|
||||
else f" {balance_amount['currency']}"
|
||||
)
|
||||
for balance in account.get("balances", []):
|
||||
amount = round(float(balance["amount"]), 2)
|
||||
symbol = "€" if balance["currency"] == "EUR" else f" {balance['currency']}"
|
||||
amount_str = f"{amount}{symbol}"
|
||||
date = (
|
||||
datefmt(balance.get("lastChangeDateTime"))
|
||||
if balance.get("lastChangeDateTime")
|
||||
datefmt(balance.get("last_change_date"))
|
||||
if balance.get("last_change_date")
|
||||
else ""
|
||||
)
|
||||
all_balances.append(
|
||||
{
|
||||
"Account": account,
|
||||
"Account": account["id"],
|
||||
"Amount": amount_str,
|
||||
"Type": balance["balanceType"],
|
||||
"Type": balance["balance_type"],
|
||||
"Last change at": date,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
import os
|
||||
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
|
||||
cmd_folder = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
class BankGroup(click.Group):
|
||||
def list_commands(self, ctx):
|
||||
rv = []
|
||||
for filename in os.listdir(cmd_folder):
|
||||
if filename.endswith(".py") and not filename.startswith("__init__"):
|
||||
if filename == "list_banks.py":
|
||||
rv.append("list")
|
||||
else:
|
||||
rv.append(filename[:-3])
|
||||
rv.sort()
|
||||
return rv
|
||||
|
||||
def get_command(self, ctx, name):
|
||||
try:
|
||||
if name == "list":
|
||||
name = "list_banks"
|
||||
mod = __import__(f"leggen.commands.bank.{name}", None, None, [name])
|
||||
except ImportError:
|
||||
return
|
||||
return getattr(mod, name)
|
||||
|
||||
|
||||
@cli.group(cls=BankGroup)
|
||||
@click.pass_context
|
||||
def bank(ctx):
|
||||
"""Manage banks connections"""
|
||||
return
|
||||
@@ -1,9 +1,9 @@
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.disk import save_file
|
||||
from leggen.utils.network import get, post
|
||||
from leggen.utils.text import info, print_table, warning
|
||||
from leggen.utils.text import info, print_table, warning, success
|
||||
|
||||
|
||||
@cli.command()
|
||||
@@ -12,69 +12,70 @@ def add(ctx):
|
||||
"""
|
||||
Connect to a bank
|
||||
"""
|
||||
country = click.prompt(
|
||||
"Bank Country",
|
||||
type=click.Choice(
|
||||
[
|
||||
"AT",
|
||||
"BE",
|
||||
"BG",
|
||||
"HR",
|
||||
"CY",
|
||||
"CZ",
|
||||
"DK",
|
||||
"EE",
|
||||
"FI",
|
||||
"FR",
|
||||
"DE",
|
||||
"GR",
|
||||
"HU",
|
||||
"IS",
|
||||
"IE",
|
||||
"IT",
|
||||
"LV",
|
||||
"LI",
|
||||
"LT",
|
||||
"LU",
|
||||
"MT",
|
||||
"NL",
|
||||
"NO",
|
||||
"PL",
|
||||
"PT",
|
||||
"RO",
|
||||
"SK",
|
||||
"SI",
|
||||
"ES",
|
||||
"SE",
|
||||
"GB",
|
||||
],
|
||||
case_sensitive=True,
|
||||
),
|
||||
default="PT",
|
||||
)
|
||||
info(f"Getting bank list for country: {country}")
|
||||
banks = get(ctx, "/institutions/", {"country": country})
|
||||
filtered_banks = [
|
||||
{
|
||||
"id": bank["id"],
|
||||
"name": bank["name"],
|
||||
"max_transaction_days": bank["transaction_total_days"],
|
||||
}
|
||||
for bank in banks
|
||||
]
|
||||
print_table(filtered_banks)
|
||||
allowed_ids = [str(bank["id"]) for bank in banks]
|
||||
bank_id = click.prompt("Bank ID", type=click.Choice(allowed_ids))
|
||||
click.confirm("Do you agree to connect to this bank?", abort=True)
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
info(f"Connecting to bank with ID: {bank_id}")
|
||||
# Check if leggend service is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
res = post(
|
||||
ctx,
|
||||
"/requisitions/",
|
||||
{"institution_id": bank_id, "redirect": "http://localhost:8000/"},
|
||||
)
|
||||
try:
|
||||
# Get supported countries
|
||||
countries = api_client.get_supported_countries()
|
||||
country_codes = [c["code"] for c in countries]
|
||||
|
||||
save_file(f"req_{res['id']}.json", res)
|
||||
country = click.prompt(
|
||||
"Bank Country",
|
||||
type=click.Choice(country_codes, case_sensitive=True),
|
||||
default="PT",
|
||||
)
|
||||
|
||||
warning(f"Please open the following URL in your browser to accept: {res['link']}")
|
||||
info(f"Getting bank list for country: {country}")
|
||||
banks = api_client.get_institutions(country)
|
||||
|
||||
if not banks:
|
||||
warning(f"No banks available for country {country}")
|
||||
return
|
||||
|
||||
filtered_banks = [
|
||||
{
|
||||
"id": bank["id"],
|
||||
"name": bank["name"],
|
||||
"max_transaction_days": bank["transaction_total_days"],
|
||||
}
|
||||
for bank in banks
|
||||
]
|
||||
print_table(filtered_banks)
|
||||
|
||||
allowed_ids = [str(bank["id"]) for bank in banks]
|
||||
bank_id = click.prompt("Bank ID", type=click.Choice(allowed_ids))
|
||||
|
||||
# Show bank details
|
||||
selected_bank = next(bank for bank in banks if bank["id"] == bank_id)
|
||||
info(f"Selected bank: {selected_bank['name']}")
|
||||
|
||||
click.confirm("Do you agree to connect to this bank?", abort=True)
|
||||
|
||||
info(f"Connecting to bank with ID: {bank_id}")
|
||||
|
||||
# Connect to bank via API
|
||||
result = api_client.connect_to_bank(bank_id, "http://localhost:8000/")
|
||||
|
||||
# Save requisition details
|
||||
save_file(f"req_{result['id']}.json", result)
|
||||
|
||||
success("Bank connection request created successfully!")
|
||||
warning(
|
||||
"Please open the following URL in your browser to complete the authorization:"
|
||||
)
|
||||
click.echo(f"\n{result['link']}\n")
|
||||
|
||||
info(f"Requisition ID: {result['id']}")
|
||||
info(
|
||||
"After completing the authorization, you can check the connection status with 'leggen status'"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error: Failed to connect to bank: {str(e)}")
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
from leggen.utils.network import delete as http_delete
|
||||
from leggen.utils.text import info, success
|
||||
|
||||
|
||||
@@ -16,11 +15,12 @@ def delete(ctx, requisition_id: str):
|
||||
|
||||
Check `leggen status` to get the REQUISITION_ID
|
||||
"""
|
||||
import requests
|
||||
|
||||
info(f"Deleting Bank Requisition: {requisition_id}")
|
||||
|
||||
_ = http_delete(
|
||||
ctx,
|
||||
f"/requisitions/{requisition_id}",
|
||||
)
|
||||
api_url = ctx.obj.get("api_url", "http://localhost:8000")
|
||||
res = requests.delete(f"{api_url}/requisitions/{requisition_id}")
|
||||
res.raise_for_status()
|
||||
|
||||
success(f"Bank Requisition {requisition_id} deleted")
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
from leggen.utils.gocardless import REQUISITION_STATUS
|
||||
from leggen.utils.network import get
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import datefmt, echo, info, print_table
|
||||
|
||||
|
||||
@@ -12,34 +11,46 @@ def status(ctx: click.Context):
|
||||
"""
|
||||
List all connected banks and their status
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
res = get(ctx, "/requisitions/")
|
||||
# Check if leggend service is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
# Get bank connection status
|
||||
bank_connections = api_client.get_bank_status()
|
||||
requisitions = []
|
||||
accounts = set()
|
||||
for r in res["results"]:
|
||||
for conn in bank_connections:
|
||||
requisitions.append(
|
||||
{
|
||||
"Bank": r["institution_id"],
|
||||
"Status": REQUISITION_STATUS.get(r["status"], "UNKNOWN"),
|
||||
"Created at": datefmt(r["created"]),
|
||||
"Requisition ID": r["id"],
|
||||
"Bank": conn["bank_id"],
|
||||
"Status": conn["status_display"],
|
||||
"Created at": datefmt(conn["created_at"]),
|
||||
"Requisition ID": conn["requisition_id"],
|
||||
}
|
||||
)
|
||||
accounts.update(r.get("accounts", []))
|
||||
info("Banks")
|
||||
print_table(requisitions)
|
||||
|
||||
# Get account details
|
||||
accounts = api_client.get_accounts()
|
||||
account_details = []
|
||||
for account in accounts:
|
||||
details = get(ctx, f"/accounts/{account}")
|
||||
account_details.append(
|
||||
{
|
||||
"ID": details["id"],
|
||||
"Bank": details["institution_id"],
|
||||
"Status": details["status"],
|
||||
"IBAN": details.get("iban", "N/A"),
|
||||
"Created at": datefmt(details["created"]),
|
||||
"Last accessed at": datefmt(details["last_accessed"]),
|
||||
"ID": account["id"],
|
||||
"Bank": account["institution_id"],
|
||||
"Status": account["status"],
|
||||
"IBAN": account.get("iban", "N/A"),
|
||||
"Created at": datefmt(account["created"]),
|
||||
"Last accessed at": (
|
||||
datefmt(account["last_accessed"])
|
||||
if account.get("last_accessed")
|
||||
else "N/A"
|
||||
),
|
||||
}
|
||||
)
|
||||
echo()
|
||||
|
||||
@@ -1,80 +1,61 @@
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
from leggen.utils.database import persist_balance, save_transactions
|
||||
from leggen.utils.gocardless import REQUISITION_STATUS
|
||||
from leggen.utils.network import get
|
||||
from leggen.utils.notifications import send_expire_notification, send_notification
|
||||
from leggen.utils.text import error, info
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import error, info, success
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("--wait", is_flag=True, help="Wait for sync to complete (synchronous)")
|
||||
@click.option("--force", is_flag=True, help="Force sync even if already running")
|
||||
@click.pass_context
|
||||
def sync(ctx: click.Context):
|
||||
def sync(ctx: click.Context, wait: bool, force: bool):
|
||||
"""
|
||||
Sync all transactions with database
|
||||
"""
|
||||
info("Getting accounts details")
|
||||
res = get(ctx, "/requisitions/")
|
||||
accounts = set()
|
||||
for r in res.get("results", []):
|
||||
accounts.update(r.get("accounts", []))
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
for r in res["results"]:
|
||||
account_status = REQUISITION_STATUS.get(r["status"], "UNKNOWN")
|
||||
if account_status != "LINKED":
|
||||
created_at = datetime.fromisoformat(r["createdAt"])
|
||||
now = datetime.now()
|
||||
if (created_at - now).days <= 15:
|
||||
n = {
|
||||
"bank": r["institution_id"],
|
||||
"status": REQUISITION_STATUS.get(r["status"], "UNKNOWN"),
|
||||
"created_at": created_at.timestamp(),
|
||||
"requisition_id": r["id"],
|
||||
"days_left": (created_at - now).days,
|
||||
}
|
||||
send_expire_notification(ctx, n)
|
||||
# Check if leggend service is available
|
||||
if not api_client.health_check():
|
||||
error("Cannot connect to leggend service. Please ensure it's running.")
|
||||
return
|
||||
|
||||
info(f"Syncing balances for {len(accounts)} accounts")
|
||||
try:
|
||||
if wait:
|
||||
# Run sync synchronously and wait for completion
|
||||
info("Starting synchronous sync...")
|
||||
result = api_client.sync_now(force=force)
|
||||
|
||||
for account in accounts:
|
||||
account_details = get(ctx, f"/accounts/{account}")
|
||||
account_balances = get(ctx, f"/accounts/{account}/balances/").get(
|
||||
"balances", []
|
||||
)
|
||||
for balance in account_balances:
|
||||
balance_amount = balance["balanceAmount"]
|
||||
amount = round(float(balance_amount["amount"]), 2)
|
||||
balance_document = {
|
||||
"account_id": account,
|
||||
"bank": account_details["institution_id"],
|
||||
"status": account_details["status"],
|
||||
"iban": account_details.get("iban", "N/A"),
|
||||
"amount": amount,
|
||||
"currency": balance_amount["currency"],
|
||||
"type": balance["balanceType"],
|
||||
"timestamp": datetime.now().timestamp(),
|
||||
}
|
||||
try:
|
||||
persist_balance(ctx, account, balance_document)
|
||||
except Exception as e:
|
||||
error(
|
||||
f"[{account}] Error: Sync failed, skipping account, exception: {e}"
|
||||
if result.get("success"):
|
||||
success("Sync completed successfully!")
|
||||
info(f"Accounts processed: {result.get('accounts_processed', 0)}")
|
||||
info(f"Transactions added: {result.get('transactions_added', 0)}")
|
||||
info(f"Balances updated: {result.get('balances_updated', 0)}")
|
||||
if result.get("duration_seconds"):
|
||||
info(f"Duration: {result['duration_seconds']:.2f} seconds")
|
||||
|
||||
if result.get("errors"):
|
||||
error(f"Errors encountered: {len(result['errors'])}")
|
||||
for err in result["errors"]:
|
||||
error(f" - {err}")
|
||||
else:
|
||||
error("Sync failed")
|
||||
if result.get("errors"):
|
||||
for err in result["errors"]:
|
||||
error(f" - {err}")
|
||||
else:
|
||||
# Trigger async sync
|
||||
info("Starting background sync...")
|
||||
result = api_client.trigger_sync(force=force)
|
||||
|
||||
if result.get("sync_started"):
|
||||
success("Sync started successfully in the background")
|
||||
info(
|
||||
"Use 'leggen sync --wait' to run synchronously or check status with API"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
error("Failed to start sync")
|
||||
|
||||
info(f"Syncing transactions for {len(accounts)} accounts")
|
||||
|
||||
for account in accounts:
|
||||
try:
|
||||
new_transactions = save_transactions(ctx, account)
|
||||
except Exception as e:
|
||||
error(f"[{account}] Error: Sync failed, skipping account, exception: {e}")
|
||||
continue
|
||||
try:
|
||||
send_notification(ctx, new_transactions)
|
||||
except Exception as e:
|
||||
error(f"[{account}] Error: Notification failed, exception: {e}")
|
||||
continue
|
||||
except Exception as e:
|
||||
error(f"Sync failed: {str(e)}")
|
||||
return
|
||||
|
||||
@@ -1,31 +1,18 @@
|
||||
import click
|
||||
|
||||
from leggen.main import cli
|
||||
from leggen.utils.network import get
|
||||
from leggen.utils.text import info, print_table
|
||||
|
||||
|
||||
def print_transactions(
|
||||
ctx: click.Context, account_info: dict, account_transactions: dict
|
||||
):
|
||||
info(f"Bank: {account_info['institution_id']}")
|
||||
info(f"IBAN: {account_info.get('iban', 'N/A')}")
|
||||
all_transactions = []
|
||||
for transaction in account_transactions.get("booked", []):
|
||||
transaction["TYPE"] = "booked"
|
||||
all_transactions.append(transaction)
|
||||
|
||||
for transaction in account_transactions.get("pending", []):
|
||||
transaction["TYPE"] = "pending"
|
||||
all_transactions.append(transaction)
|
||||
|
||||
print_table(all_transactions)
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import datefmt, info, print_table
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-a", "--account", type=str, help="Account ID")
|
||||
@click.option(
|
||||
"-l", "--limit", type=int, default=50, help="Number of transactions to show"
|
||||
)
|
||||
@click.option("--full", is_flag=True, help="Show full transaction details")
|
||||
@click.pass_context
|
||||
def transactions(ctx: click.Context, account: str):
|
||||
def transactions(ctx: click.Context, account: str, limit: int, full: bool):
|
||||
"""
|
||||
List transactions
|
||||
|
||||
@@ -33,20 +20,76 @@ def transactions(ctx: click.Context, account: str):
|
||||
|
||||
If the --account option is used, it will only list transactions for that account.
|
||||
"""
|
||||
if account:
|
||||
account_info = get(ctx, f"/accounts/{account}")
|
||||
account_transactions = get(ctx, f"/accounts/{account}/transactions/").get(
|
||||
"transactions", []
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
# Check if leggend service is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
)
|
||||
print_transactions(ctx, account_info, account_transactions)
|
||||
else:
|
||||
res = get(ctx, "/requisitions/")
|
||||
accounts = set()
|
||||
for r in res["results"]:
|
||||
accounts.update(r.get("accounts", []))
|
||||
for account in accounts:
|
||||
account_details = get(ctx, f"/accounts/{account}")
|
||||
account_transactions = get(ctx, f"/accounts/{account}/transactions/").get(
|
||||
"transactions", []
|
||||
return
|
||||
|
||||
try:
|
||||
if account:
|
||||
# Get transactions for specific account
|
||||
account_details = api_client.get_account_details(account)
|
||||
transactions_data = api_client.get_account_transactions(
|
||||
account, limit=limit, summary_only=not full
|
||||
)
|
||||
print_transactions(ctx, account_details, account_transactions)
|
||||
|
||||
info(f"Bank: {account_details['institution_id']}")
|
||||
info(f"IBAN: {account_details.get('iban', 'N/A')}")
|
||||
|
||||
else:
|
||||
# Get all transactions
|
||||
transactions_data = api_client.get_all_transactions(
|
||||
limit=limit, summary_only=not full, account_id=account
|
||||
)
|
||||
|
||||
# Format transactions for display
|
||||
if full:
|
||||
# Full transaction details
|
||||
formatted_transactions = []
|
||||
for txn in transactions_data:
|
||||
# Handle optional internal_transaction_id
|
||||
txn_id = txn.get("internal_transaction_id")
|
||||
txn_id_display = txn_id[:12] + "..." if txn_id else "N/A"
|
||||
|
||||
formatted_transactions.append(
|
||||
{
|
||||
"ID": txn_id_display,
|
||||
"Date": datefmt(txn["transaction_date"]),
|
||||
"Description": txn["description"][:50] + "..."
|
||||
if len(txn["description"]) > 50
|
||||
else txn["description"],
|
||||
"Amount": f"{txn['transaction_value']:.2f} {txn['transaction_currency']}",
|
||||
"Status": txn["transaction_status"].upper(),
|
||||
"Account": txn["account_id"][:8] + "...",
|
||||
}
|
||||
)
|
||||
else:
|
||||
# Summary view
|
||||
formatted_transactions = []
|
||||
for txn in transactions_data:
|
||||
# Handle optional internal_transaction_id
|
||||
txn_id = txn.get("internal_transaction_id")
|
||||
|
||||
formatted_transactions.append(
|
||||
{
|
||||
"Date": datefmt(txn["date"]),
|
||||
"Description": txn["description"][:60] + "..."
|
||||
if len(txn["description"]) > 60
|
||||
else txn["description"],
|
||||
"Amount": f"{txn['amount']:.2f} {txn['currency']}",
|
||||
"Status": txn["status"].upper(),
|
||||
}
|
||||
)
|
||||
|
||||
if formatted_transactions:
|
||||
print_table(formatted_transactions)
|
||||
info(f"Showing {len(formatted_transactions)} transactions")
|
||||
else:
|
||||
info("No transactions found")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error: Failed to get transactions: {str(e)}")
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
import click
|
||||
from pymongo import MongoClient
|
||||
from pymongo.errors import DuplicateKeyError
|
||||
|
||||
from leggen.utils.text import success, warning
|
||||
|
||||
|
||||
def persist_balances(ctx: click.Context, balance: dict) -> None:
|
||||
# Connect to MongoDB
|
||||
mongo_uri = ctx.obj.get("database", {}).get("mongodb", {}).get("uri")
|
||||
client = MongoClient(mongo_uri)
|
||||
db = client["leggen"]
|
||||
balances_collection = db["balances"]
|
||||
|
||||
# Insert balance into MongoDB
|
||||
try:
|
||||
balances_collection.insert_one(balance)
|
||||
success(
|
||||
f"[{balance['account_id']}] Inserted new balance if type {balance['type']}"
|
||||
)
|
||||
except DuplicateKeyError:
|
||||
warning(f"[{balance['account_id']}] Skipped duplicate balance")
|
||||
|
||||
client.close()
|
||||
|
||||
|
||||
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||
# Connect to MongoDB
|
||||
mongo_uri = ctx.obj.get("database", {}).get("mongodb", {}).get("uri")
|
||||
client = MongoClient(mongo_uri)
|
||||
db = client["leggen"]
|
||||
transactions_collection = db["transactions"]
|
||||
|
||||
# Create a unique index on internalTransactionId
|
||||
transactions_collection.create_index("internalTransactionId", unique=True)
|
||||
|
||||
# Insert transactions into MongoDB
|
||||
duplicates_count = 0
|
||||
|
||||
new_transactions = []
|
||||
|
||||
for transaction in transactions:
|
||||
try:
|
||||
transactions_collection.insert_one(transaction)
|
||||
new_transactions.append(transaction)
|
||||
except DuplicateKeyError:
|
||||
# A transaction with the same ID already exists, skip insertion
|
||||
duplicates_count += 1
|
||||
|
||||
success(f"[{account}] Inserted {len(new_transactions)} new transactions")
|
||||
if duplicates_count:
|
||||
warning(f"[{account}] Skipped {duplicates_count} duplicate transactions")
|
||||
|
||||
return new_transactions
|
||||
@@ -9,7 +9,11 @@ from leggen.utils.text import success, warning
|
||||
|
||||
def persist_balances(ctx: click.Context, balance: dict):
|
||||
# Connect to SQLite database
|
||||
conn = sqlite3.connect("./leggen.db")
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the balances table if it doesn't exist
|
||||
@@ -27,6 +31,20 @@ def persist_balances(ctx: click.Context, balance: dict):
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_account_id
|
||||
ON balances(account_id)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_timestamp
|
||||
ON balances(timestamp)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp
|
||||
ON balances(account_id, type, timestamp)"""
|
||||
)
|
||||
|
||||
# Insert balance into SQLite database
|
||||
try:
|
||||
cursor.execute(
|
||||
@@ -65,7 +83,11 @@ def persist_balances(ctx: click.Context, balance: dict):
|
||||
|
||||
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||
# Connect to SQLite database
|
||||
conn = sqlite3.connect("./leggen.db")
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the transactions table if it doesn't exist
|
||||
@@ -84,6 +106,24 @@ def persist_transactions(ctx: click.Context, account: str, transactions: list) -
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_id
|
||||
ON transactions(accountId)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_date
|
||||
ON transactions(transactionDate)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_date
|
||||
ON transactions(accountId, transactionDate)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_amount
|
||||
ON transactions(transactionValue)"""
|
||||
)
|
||||
|
||||
# Insert transactions into SQLite database
|
||||
duplicates_count = 0
|
||||
|
||||
@@ -134,3 +174,210 @@ def persist_transactions(ctx: click.Context, account: str, transactions: list) -
|
||||
warning(f"[{account}] Skipped {duplicates_count} duplicate transactions")
|
||||
|
||||
return new_transactions
|
||||
|
||||
|
||||
def get_transactions(
|
||||
account_id=None,
|
||||
limit=100,
|
||||
offset=0,
|
||||
date_from=None,
|
||||
date_to=None,
|
||||
min_amount=None,
|
||||
max_amount=None,
|
||||
search=None,
|
||||
):
|
||||
"""Get transactions from SQLite database with optional filtering"""
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row # Enable dict-like access
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build query with filters
|
||||
query = "SELECT * FROM transactions WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND accountId = ?"
|
||||
params.append(account_id)
|
||||
|
||||
if date_from:
|
||||
query += " AND transactionDate >= ?"
|
||||
params.append(date_from)
|
||||
|
||||
if date_to:
|
||||
query += " AND transactionDate <= ?"
|
||||
params.append(date_to)
|
||||
|
||||
if min_amount is not None:
|
||||
query += " AND transactionValue >= ?"
|
||||
params.append(min_amount)
|
||||
|
||||
if max_amount is not None:
|
||||
query += " AND transactionValue <= ?"
|
||||
params.append(max_amount)
|
||||
|
||||
if search:
|
||||
query += " AND description LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
# Add ordering and pagination
|
||||
query += " ORDER BY transactionDate DESC"
|
||||
|
||||
if limit:
|
||||
query += " LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
if offset:
|
||||
query += " OFFSET ?"
|
||||
params.append(offset)
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Convert to list of dicts and parse JSON fields
|
||||
transactions = []
|
||||
for row in rows:
|
||||
transaction = dict(row)
|
||||
if transaction["rawTransaction"]:
|
||||
transaction["rawTransaction"] = json.loads(
|
||||
transaction["rawTransaction"]
|
||||
)
|
||||
transactions.append(transaction)
|
||||
|
||||
conn.close()
|
||||
return transactions
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_balances(account_id=None):
|
||||
"""Get latest balances from SQLite database"""
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get latest balance for each account_id and type combination
|
||||
query = """
|
||||
SELECT * FROM balances b1
|
||||
WHERE b1.timestamp = (
|
||||
SELECT MAX(b2.timestamp)
|
||||
FROM balances b2
|
||||
WHERE b2.account_id = b1.account_id AND b2.type = b1.type
|
||||
)
|
||||
"""
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND b1.account_id = ?"
|
||||
params.append(account_id)
|
||||
|
||||
query += " ORDER BY b1.account_id, b1.type"
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
balances = [dict(row) for row in rows]
|
||||
conn.close()
|
||||
return balances
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_account_summary(account_id):
|
||||
"""Get basic account info from transactions table (avoids GoCardless API call)"""
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
if not db_path.exists():
|
||||
return None
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Get account info from most recent transaction
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT DISTINCT accountId, institutionId, iban
|
||||
FROM transactions
|
||||
WHERE accountId = ?
|
||||
ORDER BY transactionDate DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(account_id,),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_transaction_count(account_id=None, **filters):
|
||||
"""Get total count of transactions matching filters"""
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT COUNT(*) FROM transactions WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND accountId = ?"
|
||||
params.append(account_id)
|
||||
|
||||
# Add same filters as get_transactions
|
||||
if filters.get("date_from"):
|
||||
query += " AND transactionDate >= ?"
|
||||
params.append(filters["date_from"])
|
||||
|
||||
if filters.get("date_to"):
|
||||
query += " AND transactionDate <= ?"
|
||||
params.append(filters["date_to"])
|
||||
|
||||
if filters.get("min_amount") is not None:
|
||||
query += " AND transactionValue >= ?"
|
||||
params.append(filters["min_amount"])
|
||||
|
||||
if filters.get("max_amount") is not None:
|
||||
query += " AND transactionValue <= ?"
|
||||
params.append(filters["max_amount"])
|
||||
|
||||
if filters.get("search"):
|
||||
query += " AND description LIKE ?"
|
||||
params.append(f"%{filters['search']}%")
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
count = cursor.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
@@ -5,7 +5,6 @@ from pathlib import Path
|
||||
|
||||
import click
|
||||
|
||||
from leggen.utils.auth import get_token
|
||||
from leggen.utils.config import load_config
|
||||
from leggen.utils.text import error
|
||||
|
||||
@@ -78,7 +77,7 @@ class Group(click.Group):
|
||||
"-c",
|
||||
"--config",
|
||||
type=click.Path(dir_okay=False),
|
||||
default=click.get_app_dir("leggen") / Path("config.toml"),
|
||||
default=Path.home() / ".config" / "leggen" / "config.toml",
|
||||
show_default=True,
|
||||
callback=load_config,
|
||||
is_eager=True,
|
||||
@@ -87,13 +86,21 @@ class Group(click.Group):
|
||||
show_envvar=True,
|
||||
help="Path to TOML configuration file",
|
||||
)
|
||||
@click.option(
|
||||
"--api-url",
|
||||
type=str,
|
||||
default="http://localhost:8000",
|
||||
envvar="LEGGEND_API_URL",
|
||||
show_envvar=True,
|
||||
help="URL of the leggend API service",
|
||||
)
|
||||
@click.group(
|
||||
cls=Group,
|
||||
context_settings={"help_option_names": ["-h", "--help"]},
|
||||
)
|
||||
@click.version_option(package_name="leggen")
|
||||
@click.pass_context
|
||||
def cli(ctx: click.Context):
|
||||
def cli(ctx: click.Context, api_url: str):
|
||||
"""
|
||||
Leggen: An Open Banking CLI
|
||||
"""
|
||||
@@ -102,5 +109,5 @@ def cli(ctx: click.Context):
|
||||
if "--help" in sys.argv[1:] or "-h" in sys.argv[1:]:
|
||||
return
|
||||
|
||||
token = get_token(ctx)
|
||||
ctx.obj["headers"] = {"Authorization": f"Bearer {token}"}
|
||||
# Store API URL in context for commands to use
|
||||
ctx.obj["api_url"] = api_url
|
||||
|
||||
@@ -7,11 +7,24 @@ from leggen.utils.text import info
|
||||
def escape_markdown(text: str) -> str:
|
||||
return (
|
||||
str(text)
|
||||
.replace("-", "\\-")
|
||||
.replace("_", "\\_")
|
||||
.replace("*", "\\*")
|
||||
.replace("[", "\\[")
|
||||
.replace("]", "\\]")
|
||||
.replace("(", "\\(")
|
||||
.replace(")", "\\)")
|
||||
.replace("~", "\\~")
|
||||
.replace("`", "\\`")
|
||||
.replace(">", "\\>")
|
||||
.replace("#", "\\#")
|
||||
.replace(".", "\\.")
|
||||
.replace("$", "\\$")
|
||||
.replace("+", "\\+")
|
||||
.replace("-", "\\-")
|
||||
.replace("=", "\\=")
|
||||
.replace("|", "\\|")
|
||||
.replace("{", "\\{")
|
||||
.replace("}", "\\}")
|
||||
.replace(".", "\\.")
|
||||
.replace("!", "\\!")
|
||||
)
|
||||
|
||||
|
||||
@@ -21,13 +34,15 @@ def send_expire_notification(ctx: click.Context, notification: dict):
|
||||
bot_url = f"https://api.telegram.org/bot{token}/sendMessage"
|
||||
info("Sending expiration notification to Telegram")
|
||||
message = "*💲 [Leggen](https://github.com/elisiariocouto/leggen)*\n"
|
||||
message += f"Your account {notification['bank']} ({notification['requisition_id']}) is in {notification['status']} status. Days left: {notification['days_left']}\n"
|
||||
message += escape_markdown(
|
||||
f"Your account {notification['bank']} ({notification['requisition_id']}) is in {notification['status']} status. Days left: {notification['days_left']}\n"
|
||||
)
|
||||
|
||||
res = requests.post(
|
||||
bot_url,
|
||||
json={
|
||||
"chat_id": chat_id,
|
||||
"text": escape_markdown(message),
|
||||
"text": message,
|
||||
"parse_mode": "MarkdownV2",
|
||||
},
|
||||
)
|
||||
@@ -47,15 +62,15 @@ def send_transaction_message(ctx: click.Context, transactions: list):
|
||||
message += f"{len(transactions)} new transaction matches\n\n"
|
||||
|
||||
for transaction in transactions:
|
||||
message += f"*Name*: {transaction['name']}\n"
|
||||
message += f"*Value*: {transaction['value']}{transaction['currency']}\n"
|
||||
message += f"*Date*: {transaction['date']}\n\n"
|
||||
message += f"*Name*: {escape_markdown(transaction['name'])}\n"
|
||||
message += f"*Value*: {escape_markdown(transaction['value'])}{escape_markdown(transaction['currency'])}\n"
|
||||
message += f"*Date*: {escape_markdown(transaction['date'])}\n\n"
|
||||
|
||||
res = requests.post(
|
||||
bot_url,
|
||||
json={
|
||||
"chat_id": chat_id,
|
||||
"text": escape_markdown(message),
|
||||
"text": message,
|
||||
"parse_mode": "MarkdownV2",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
import requests
|
||||
|
||||
from leggen.utils.text import warning
|
||||
|
||||
|
||||
def create_token(ctx: click.Context) -> str:
|
||||
"""
|
||||
Create a new token
|
||||
"""
|
||||
res = requests.post(
|
||||
f"{ctx.obj['gocardless']['url']}/token/new/",
|
||||
json={
|
||||
"secret_id": ctx.obj["gocardless"]["key"],
|
||||
"secret_key": ctx.obj["gocardless"]["secret"],
|
||||
},
|
||||
)
|
||||
res.raise_for_status()
|
||||
auth = res.json()
|
||||
save_auth(auth)
|
||||
return auth["access"]
|
||||
|
||||
|
||||
def get_token(ctx: click.Context) -> str:
|
||||
"""
|
||||
Get the token from the auth file or request a new one
|
||||
"""
|
||||
auth_file = click.get_app_dir("leggen") / Path("auth.json")
|
||||
if auth_file.exists():
|
||||
with click.open_file(str(auth_file), "r") as f:
|
||||
auth = json.load(f)
|
||||
if not auth.get("access"):
|
||||
return create_token(ctx)
|
||||
|
||||
res = requests.post(
|
||||
f"{ctx.obj['gocardless']['url']}/token/refresh/",
|
||||
json={"refresh": auth["refresh"]},
|
||||
)
|
||||
try:
|
||||
res.raise_for_status()
|
||||
auth.update(res.json())
|
||||
save_auth(auth)
|
||||
return auth["access"]
|
||||
except requests.exceptions.HTTPError:
|
||||
warning(
|
||||
f"Token probably expired, requesting a new one.\nResponse: {res.status_code}\n{res.text}"
|
||||
)
|
||||
return create_token(ctx)
|
||||
else:
|
||||
return create_token(ctx)
|
||||
|
||||
|
||||
def save_auth(d: dict):
|
||||
Path.mkdir(Path(click.get_app_dir("leggen")), exist_ok=True)
|
||||
auth_file = click.get_app_dir("leggen") / Path("auth.json")
|
||||
|
||||
with click.open_file(str(auth_file), "w") as f:
|
||||
json.dump(d, f)
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import tomllib
|
||||
|
||||
import click
|
||||
import tomllib
|
||||
|
||||
from leggen.utils.text import error
|
||||
|
||||
|
||||
@@ -2,55 +2,50 @@ from datetime import datetime
|
||||
|
||||
import click
|
||||
|
||||
import leggen.database.mongo as mongodb_engine
|
||||
import leggen.database.sqlite as sqlite_engine
|
||||
from leggen.utils.network import get
|
||||
from leggen.utils.text import info, warning
|
||||
|
||||
|
||||
def persist_balance(ctx: click.Context, account: str, balance: dict) -> None:
|
||||
sqlite = ctx.obj.get("database", {}).get("sqlite", False)
|
||||
mongodb = ctx.obj.get("database", {}).get("mongodb", False)
|
||||
sqlite = ctx.obj.get("database", {}).get("sqlite", True)
|
||||
|
||||
if not sqlite and not mongodb:
|
||||
warning("No database engine is enabled, skipping balance saving")
|
||||
if not sqlite:
|
||||
warning("SQLite database is disabled, skipping balance saving")
|
||||
return
|
||||
|
||||
if sqlite:
|
||||
info(f"[{account}] Fetched balances, saving to SQLite")
|
||||
sqlite_engine.persist_balances(ctx, balance)
|
||||
else:
|
||||
info(f"[{account}] Fetched balances, saving to MongoDB")
|
||||
mongodb_engine.persist_balances(ctx, balance)
|
||||
info(f"[{account}] Fetched balances, saving to SQLite")
|
||||
sqlite_engine.persist_balances(ctx, balance)
|
||||
|
||||
|
||||
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||
sqlite = ctx.obj.get("database", {}).get("sqlite", False)
|
||||
mongodb = ctx.obj.get("database", {}).get("mongodb", False)
|
||||
sqlite = ctx.obj.get("database", {}).get("sqlite", True)
|
||||
|
||||
if not sqlite and not mongodb:
|
||||
warning("No database engine is enabled, skipping transaction saving")
|
||||
if not sqlite:
|
||||
warning("SQLite database is disabled, skipping transaction saving")
|
||||
# WARNING: This will return the transactions list as is, without saving it to any database
|
||||
# Possible duplicate notifications will be sent if the filters are enabled
|
||||
return transactions
|
||||
|
||||
if sqlite:
|
||||
info(f"[{account}] Fetched {len(transactions)} transactions, saving to SQLite")
|
||||
return sqlite_engine.persist_transactions(ctx, account, transactions)
|
||||
else:
|
||||
info(f"[{account}] Fetched {len(transactions)} transactions, saving to MongoDB")
|
||||
return mongodb_engine.persist_transactions(ctx, account, transactions)
|
||||
info(f"[{account}] Fetched {len(transactions)} transactions, saving to SQLite")
|
||||
return sqlite_engine.persist_transactions(ctx, account, transactions)
|
||||
|
||||
|
||||
def save_transactions(ctx: click.Context, account: str) -> list:
|
||||
import requests
|
||||
|
||||
api_url = ctx.obj.get("api_url", "http://localhost:8000")
|
||||
|
||||
info(f"[{account}] Getting account details")
|
||||
account_info = get(ctx, f"/accounts/{account}")
|
||||
res = requests.get(f"{api_url}/accounts/{account}")
|
||||
res.raise_for_status()
|
||||
account_info = res.json()
|
||||
|
||||
info(f"[{account}] Getting transactions")
|
||||
transactions = []
|
||||
|
||||
account_transactions = get(ctx, f"/accounts/{account}/transactions/").get(
|
||||
"transactions", []
|
||||
)
|
||||
res = requests.get(f"{api_url}/accounts/{account}/transactions/")
|
||||
res.raise_for_status()
|
||||
account_transactions = res.json().get("transactions", [])
|
||||
|
||||
for transaction in account_transactions.get("booked", []):
|
||||
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
import click
|
||||
import requests
|
||||
|
||||
from leggen.utils.text import error
|
||||
|
||||
|
||||
def get(ctx: click.Context, path: str, params: dict = {}):
|
||||
"""
|
||||
GET request to the GoCardless API
|
||||
"""
|
||||
|
||||
url = f"{ctx.obj['gocardless']['url']}{path}"
|
||||
res = requests.get(url, headers=ctx.obj["headers"], params=params)
|
||||
try:
|
||||
res.raise_for_status()
|
||||
except Exception as e:
|
||||
error(f"Error: {e}\n{res.text}")
|
||||
ctx.abort()
|
||||
return res.json()
|
||||
|
||||
|
||||
def post(ctx: click.Context, path: str, data: dict = {}):
|
||||
"""
|
||||
POST request to the GoCardless API
|
||||
"""
|
||||
|
||||
url = f"{ctx.obj['gocardless']['url']}{path}"
|
||||
res = requests.post(url, headers=ctx.obj["headers"], json=data)
|
||||
try:
|
||||
res.raise_for_status()
|
||||
except Exception as e:
|
||||
error(f"Error: {e}\n{res.text}")
|
||||
ctx.abort()
|
||||
return res.json()
|
||||
|
||||
|
||||
def put(ctx: click.Context, path: str, data: dict = {}):
|
||||
"""
|
||||
PUT request to the GoCardless API
|
||||
"""
|
||||
|
||||
url = f"{ctx.obj['gocardless']['url']}{path}"
|
||||
res = requests.put(url, headers=ctx.obj["headers"], json=data)
|
||||
try:
|
||||
res.raise_for_status()
|
||||
except Exception as e:
|
||||
error(f"Error: {e}\n{res.text}")
|
||||
ctx.abort()
|
||||
return res.json()
|
||||
|
||||
|
||||
def delete(ctx: click.Context, path: str):
|
||||
"""
|
||||
DELETE request to the GoCardless API
|
||||
"""
|
||||
|
||||
url = f"{ctx.obj['gocardless']['url']}{path}"
|
||||
res = requests.delete(url, headers=ctx.obj["headers"])
|
||||
try:
|
||||
res.raise_for_status()
|
||||
except Exception as e:
|
||||
error(f"Error: {e}\n{res.text}")
|
||||
ctx.abort()
|
||||
return res.json()
|
||||
0
leggend/__init__.py
Normal file
0
leggend/__init__.py
Normal file
66
leggend/api/models/accounts.py
Normal file
66
leggend/api/models/accounts.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Dict, Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AccountBalance(BaseModel):
|
||||
"""Account balance model"""
|
||||
|
||||
amount: float
|
||||
currency: str
|
||||
balance_type: str
|
||||
last_change_date: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
||||
|
||||
|
||||
class AccountDetails(BaseModel):
|
||||
"""Account details model"""
|
||||
|
||||
id: str
|
||||
institution_id: str
|
||||
status: str
|
||||
iban: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
currency: Optional[str] = None
|
||||
created: datetime
|
||||
last_accessed: Optional[datetime] = None
|
||||
balances: List[AccountBalance] = []
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
||||
|
||||
|
||||
class Transaction(BaseModel):
|
||||
"""Transaction model"""
|
||||
|
||||
internal_transaction_id: Optional[str] = None
|
||||
institution_id: str
|
||||
iban: Optional[str] = None
|
||||
account_id: str
|
||||
transaction_date: datetime
|
||||
description: str
|
||||
transaction_value: float
|
||||
transaction_currency: str
|
||||
transaction_status: str # "booked" or "pending"
|
||||
raw_transaction: Dict[str, Any]
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||
|
||||
|
||||
class TransactionSummary(BaseModel):
|
||||
"""Transaction summary for lists"""
|
||||
|
||||
internal_transaction_id: Optional[str] = None
|
||||
date: datetime
|
||||
description: str
|
||||
amount: float
|
||||
currency: str
|
||||
status: str
|
||||
account_id: str
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||
52
leggend/api/models/banks.py
Normal file
52
leggend/api/models/banks.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class BankInstitution(BaseModel):
|
||||
"""Bank institution model"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
bic: Optional[str] = None
|
||||
transaction_total_days: int
|
||||
countries: List[str]
|
||||
logo: Optional[str] = None
|
||||
|
||||
|
||||
class BankConnectionRequest(BaseModel):
|
||||
"""Request to connect to a bank"""
|
||||
|
||||
institution_id: str
|
||||
redirect_url: Optional[str] = "http://localhost:8000/"
|
||||
|
||||
|
||||
class BankRequisition(BaseModel):
|
||||
"""Bank requisition/connection model"""
|
||||
|
||||
id: str
|
||||
institution_id: str
|
||||
status: str
|
||||
status_display: Optional[str] = None
|
||||
created: datetime
|
||||
link: str
|
||||
accounts: List[str] = []
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||
|
||||
|
||||
class BankConnectionStatus(BaseModel):
|
||||
"""Bank connection status response"""
|
||||
|
||||
bank_id: str
|
||||
bank_name: str
|
||||
status: str
|
||||
status_display: str
|
||||
created_at: datetime
|
||||
requisition_id: str
|
||||
accounts_count: int
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||
29
leggend/api/models/common.py
Normal file
29
leggend/api/models/common.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class APIResponse(BaseModel):
|
||||
"""Base API response model"""
|
||||
|
||||
success: bool = True
|
||||
message: Optional[str] = None
|
||||
data: Optional[Any] = None
|
||||
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
"""Error response model"""
|
||||
|
||||
success: bool = False
|
||||
message: str
|
||||
error_code: Optional[str] = None
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class PaginatedResponse(BaseModel):
|
||||
"""Paginated response model"""
|
||||
|
||||
success: bool = True
|
||||
data: list
|
||||
pagination: Dict[str, Any]
|
||||
message: Optional[str] = None
|
||||
53
leggend/api/models/notifications.py
Normal file
53
leggend/api/models/notifications.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from typing import Dict, Optional, List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class DiscordConfig(BaseModel):
|
||||
"""Discord notification configuration"""
|
||||
|
||||
webhook: str
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
class TelegramConfig(BaseModel):
|
||||
"""Telegram notification configuration"""
|
||||
|
||||
token: str
|
||||
chat_id: int
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
class NotificationFilters(BaseModel):
|
||||
"""Notification filters configuration"""
|
||||
|
||||
case_insensitive: Dict[str, str] = {}
|
||||
case_sensitive: Optional[Dict[str, str]] = None
|
||||
amount_threshold: Optional[float] = None
|
||||
keywords: List[str] = []
|
||||
|
||||
|
||||
class NotificationSettings(BaseModel):
|
||||
"""Complete notification settings"""
|
||||
|
||||
discord: Optional[DiscordConfig] = None
|
||||
telegram: Optional[TelegramConfig] = None
|
||||
filters: NotificationFilters = NotificationFilters()
|
||||
|
||||
|
||||
class NotificationTest(BaseModel):
|
||||
"""Test notification request"""
|
||||
|
||||
service: str # "discord" or "telegram"
|
||||
message: str = "Test notification from Leggen"
|
||||
|
||||
|
||||
class NotificationHistory(BaseModel):
|
||||
"""Notification history entry"""
|
||||
|
||||
id: str
|
||||
service: str
|
||||
message: str
|
||||
status: str # "sent", "failed"
|
||||
sent_at: str
|
||||
error: Optional[str] = None
|
||||
55
leggend/api/models/sync.py
Normal file
55
leggend/api/models/sync.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class SyncRequest(BaseModel):
|
||||
"""Request to trigger a sync"""
|
||||
|
||||
account_ids: Optional[list[str]] = None # If None, sync all accounts
|
||||
force: bool = False # Force sync even if recently synced
|
||||
|
||||
|
||||
class SyncStatus(BaseModel):
|
||||
"""Sync operation status"""
|
||||
|
||||
is_running: bool
|
||||
last_sync: Optional[datetime] = None
|
||||
next_sync: Optional[datetime] = None
|
||||
accounts_synced: int = 0
|
||||
total_accounts: int = 0
|
||||
transactions_added: int = 0
|
||||
errors: list[str] = []
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat() if v else None}
|
||||
|
||||
|
||||
class SyncResult(BaseModel):
|
||||
"""Result of a sync operation"""
|
||||
|
||||
success: bool
|
||||
accounts_processed: int
|
||||
transactions_added: int
|
||||
transactions_updated: int
|
||||
balances_updated: int
|
||||
duration_seconds: float
|
||||
errors: list[str] = []
|
||||
started_at: datetime
|
||||
completed_at: datetime
|
||||
|
||||
class Config:
|
||||
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||
|
||||
|
||||
class SchedulerConfig(BaseModel):
|
||||
"""Scheduler configuration model"""
|
||||
|
||||
enabled: bool = True
|
||||
hour: Optional[int] = 3
|
||||
minute: Optional[int] = 0
|
||||
cron: Optional[str] = None # Custom cron expression
|
||||
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
230
leggend/api/routes/accounts.py
Normal file
230
leggend/api/routes/accounts.py
Normal file
@@ -0,0 +1,230 @@
|
||||
from typing import Optional, List, Union
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.accounts import (
|
||||
AccountDetails,
|
||||
AccountBalance,
|
||||
Transaction,
|
||||
TransactionSummary,
|
||||
)
|
||||
from leggend.services.gocardless_service import GoCardlessService
|
||||
from leggend.services.database_service import DatabaseService
|
||||
|
||||
router = APIRouter()
|
||||
gocardless_service = GoCardlessService()
|
||||
database_service = DatabaseService()
|
||||
|
||||
|
||||
@router.get("/accounts", response_model=APIResponse)
|
||||
async def get_all_accounts() -> APIResponse:
|
||||
"""Get all connected accounts"""
|
||||
try:
|
||||
requisitions_data = await gocardless_service.get_requisitions()
|
||||
|
||||
all_accounts = set()
|
||||
for req in requisitions_data.get("results", []):
|
||||
all_accounts.update(req.get("accounts", []))
|
||||
|
||||
accounts = []
|
||||
for account_id in all_accounts:
|
||||
try:
|
||||
account_details = await gocardless_service.get_account_details(
|
||||
account_id
|
||||
)
|
||||
balances_data = await gocardless_service.get_account_balances(
|
||||
account_id
|
||||
)
|
||||
|
||||
# Process balances
|
||||
balances = []
|
||||
for balance in balances_data.get("balances", []):
|
||||
balance_amount = balance["balanceAmount"]
|
||||
balances.append(
|
||||
AccountBalance(
|
||||
amount=float(balance_amount["amount"]),
|
||||
currency=balance_amount["currency"],
|
||||
balance_type=balance["balanceType"],
|
||||
last_change_date=balance.get("lastChangeDateTime"),
|
||||
)
|
||||
)
|
||||
|
||||
accounts.append(
|
||||
AccountDetails(
|
||||
id=account_details["id"],
|
||||
institution_id=account_details["institution_id"],
|
||||
status=account_details["status"],
|
||||
iban=account_details.get("iban"),
|
||||
name=account_details.get("name"),
|
||||
currency=account_details.get("currency"),
|
||||
created=account_details["created"],
|
||||
last_accessed=account_details.get("last_accessed"),
|
||||
balances=balances,
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get details for account {account_id}: {e}")
|
||||
continue
|
||||
|
||||
return APIResponse(
|
||||
success=True, data=accounts, message=f"Retrieved {len(accounts)} accounts"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get accounts: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get accounts: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/accounts/{account_id}", response_model=APIResponse)
|
||||
async def get_account_details(account_id: str) -> APIResponse:
|
||||
"""Get details for a specific account"""
|
||||
try:
|
||||
account_details = await gocardless_service.get_account_details(account_id)
|
||||
balances_data = await gocardless_service.get_account_balances(account_id)
|
||||
|
||||
# Process balances
|
||||
balances = []
|
||||
for balance in balances_data.get("balances", []):
|
||||
balance_amount = balance["balanceAmount"]
|
||||
balances.append(
|
||||
AccountBalance(
|
||||
amount=float(balance_amount["amount"]),
|
||||
currency=balance_amount["currency"],
|
||||
balance_type=balance["balanceType"],
|
||||
last_change_date=balance.get("lastChangeDateTime"),
|
||||
)
|
||||
)
|
||||
|
||||
account = AccountDetails(
|
||||
id=account_details["id"],
|
||||
institution_id=account_details["institution_id"],
|
||||
status=account_details["status"],
|
||||
iban=account_details.get("iban"),
|
||||
name=account_details.get("name"),
|
||||
currency=account_details.get("currency"),
|
||||
created=account_details["created"],
|
||||
last_accessed=account_details.get("last_accessed"),
|
||||
balances=balances,
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=account,
|
||||
message=f"Account details retrieved for {account_id}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get account details for {account_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Account not found: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/accounts/{account_id}/balances", response_model=APIResponse)
|
||||
async def get_account_balances(account_id: str) -> APIResponse:
|
||||
"""Get balances for a specific account from database"""
|
||||
try:
|
||||
# Get balances from database instead of GoCardless API
|
||||
db_balances = await database_service.get_balances_from_db(account_id=account_id)
|
||||
|
||||
balances = []
|
||||
for balance in db_balances:
|
||||
balances.append(
|
||||
AccountBalance(
|
||||
amount=balance["amount"],
|
||||
currency=balance["currency"],
|
||||
balance_type=balance["type"],
|
||||
last_change_date=balance.get("timestamp"),
|
||||
)
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=balances,
|
||||
message=f"Retrieved {len(balances)} balances for account {account_id}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to get balances from database for account {account_id}: {e}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Failed to get balances: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/accounts/{account_id}/transactions", response_model=APIResponse)
|
||||
async def get_account_transactions(
|
||||
account_id: str,
|
||||
limit: Optional[int] = Query(default=100, le=500),
|
||||
offset: Optional[int] = Query(default=0, ge=0),
|
||||
summary_only: bool = Query(
|
||||
default=False, description="Return transaction summaries only"
|
||||
),
|
||||
) -> APIResponse:
|
||||
"""Get transactions for a specific account from database"""
|
||||
try:
|
||||
# Get transactions from database instead of GoCardless API
|
||||
db_transactions = await database_service.get_transactions_from_db(
|
||||
account_id=account_id,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
|
||||
# Get total count for pagination info
|
||||
total_transactions = await database_service.get_transaction_count_from_db(
|
||||
account_id=account_id,
|
||||
)
|
||||
|
||||
data: Union[List[TransactionSummary], List[Transaction]]
|
||||
|
||||
if summary_only:
|
||||
# Return simplified transaction summaries
|
||||
data = [
|
||||
TransactionSummary(
|
||||
internal_transaction_id=txn["internalTransactionId"],
|
||||
date=txn["transactionDate"],
|
||||
description=txn["description"],
|
||||
amount=txn["transactionValue"],
|
||||
currency=txn["transactionCurrency"],
|
||||
status=txn["transactionStatus"],
|
||||
account_id=txn["accountId"],
|
||||
)
|
||||
for txn in db_transactions
|
||||
]
|
||||
else:
|
||||
# Return full transaction details
|
||||
data = [
|
||||
Transaction(
|
||||
internal_transaction_id=txn["internalTransactionId"],
|
||||
institution_id=txn["institutionId"],
|
||||
iban=txn["iban"],
|
||||
account_id=txn["accountId"],
|
||||
transaction_date=txn["transactionDate"],
|
||||
description=txn["description"],
|
||||
transaction_value=txn["transactionValue"],
|
||||
transaction_currency=txn["transactionCurrency"],
|
||||
transaction_status=txn["transactionStatus"],
|
||||
raw_transaction=txn["rawTransaction"],
|
||||
)
|
||||
for txn in db_transactions
|
||||
]
|
||||
|
||||
actual_offset = offset or 0
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=data,
|
||||
message=f"Retrieved {len(data)} transactions (showing {actual_offset + 1}-{actual_offset + len(data)} of {total_transactions})",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to get transactions from database for account {account_id}: {e}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Failed to get transactions: {str(e)}"
|
||||
) from e
|
||||
179
leggend/api/routes/banks.py
Normal file
179
leggend/api/routes/banks.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.banks import (
|
||||
BankInstitution,
|
||||
BankConnectionRequest,
|
||||
BankRequisition,
|
||||
BankConnectionStatus,
|
||||
)
|
||||
from leggend.services.gocardless_service import GoCardlessService
|
||||
from leggend.utils.gocardless import REQUISITION_STATUS
|
||||
|
||||
router = APIRouter()
|
||||
gocardless_service = GoCardlessService()
|
||||
|
||||
|
||||
@router.get("/banks/institutions", response_model=APIResponse)
|
||||
async def get_bank_institutions(
|
||||
country: str = Query(default="PT", description="Country code (e.g., PT, ES, FR)"),
|
||||
) -> APIResponse:
|
||||
"""Get available bank institutions for a country"""
|
||||
try:
|
||||
institutions_data = await gocardless_service.get_institutions(country)
|
||||
|
||||
institutions = [
|
||||
BankInstitution(
|
||||
id=inst["id"],
|
||||
name=inst["name"],
|
||||
bic=inst.get("bic"),
|
||||
transaction_total_days=inst["transaction_total_days"],
|
||||
countries=inst["countries"],
|
||||
logo=inst.get("logo"),
|
||||
)
|
||||
for inst in institutions_data
|
||||
]
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=institutions,
|
||||
message=f"Found {len(institutions)} institutions for {country}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get institutions for {country}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get institutions: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/banks/connect", response_model=APIResponse)
|
||||
async def connect_to_bank(request: BankConnectionRequest) -> APIResponse:
|
||||
"""Create a connection to a bank (requisition)"""
|
||||
try:
|
||||
redirect_url = request.redirect_url or "http://localhost:8000/"
|
||||
requisition_data = await gocardless_service.create_requisition(
|
||||
request.institution_id, redirect_url
|
||||
)
|
||||
|
||||
requisition = BankRequisition(
|
||||
id=requisition_data["id"],
|
||||
institution_id=requisition_data["institution_id"],
|
||||
status=requisition_data["status"],
|
||||
created=requisition_data["created"],
|
||||
link=requisition_data["link"],
|
||||
accounts=requisition_data.get("accounts", []),
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=requisition,
|
||||
message="Bank connection created. Please visit the link to authorize.",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to bank {request.institution_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to bank: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/banks/status", response_model=APIResponse)
|
||||
async def get_bank_connections_status() -> APIResponse:
|
||||
"""Get status of all bank connections"""
|
||||
try:
|
||||
requisitions_data = await gocardless_service.get_requisitions()
|
||||
|
||||
connections = []
|
||||
for req in requisitions_data.get("results", []):
|
||||
status = req["status"]
|
||||
status_display = REQUISITION_STATUS.get(status, "UNKNOWN")
|
||||
|
||||
connections.append(
|
||||
BankConnectionStatus(
|
||||
bank_id=req["institution_id"],
|
||||
bank_name=req[
|
||||
"institution_id"
|
||||
], # Could be enhanced with actual bank names
|
||||
status=status,
|
||||
status_display=status_display,
|
||||
created_at=req["created"],
|
||||
requisition_id=req["id"],
|
||||
accounts_count=len(req.get("accounts", [])),
|
||||
)
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=connections,
|
||||
message=f"Found {len(connections)} bank connections",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get bank connection status: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get bank status: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.delete("/banks/connections/{requisition_id}", response_model=APIResponse)
|
||||
async def delete_bank_connection(requisition_id: str) -> APIResponse:
|
||||
"""Delete a bank connection"""
|
||||
try:
|
||||
# This would need to be implemented in GoCardlessService
|
||||
# For now, return success
|
||||
return APIResponse(
|
||||
success=True,
|
||||
message=f"Bank connection {requisition_id} deleted successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete bank connection {requisition_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to delete connection: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/banks/countries", response_model=APIResponse)
|
||||
async def get_supported_countries() -> APIResponse:
|
||||
"""Get list of supported countries"""
|
||||
countries = [
|
||||
{"code": "AT", "name": "Austria"},
|
||||
{"code": "BE", "name": "Belgium"},
|
||||
{"code": "BG", "name": "Bulgaria"},
|
||||
{"code": "HR", "name": "Croatia"},
|
||||
{"code": "CY", "name": "Cyprus"},
|
||||
{"code": "CZ", "name": "Czech Republic"},
|
||||
{"code": "DK", "name": "Denmark"},
|
||||
{"code": "EE", "name": "Estonia"},
|
||||
{"code": "FI", "name": "Finland"},
|
||||
{"code": "FR", "name": "France"},
|
||||
{"code": "DE", "name": "Germany"},
|
||||
{"code": "GR", "name": "Greece"},
|
||||
{"code": "HU", "name": "Hungary"},
|
||||
{"code": "IS", "name": "Iceland"},
|
||||
{"code": "IE", "name": "Ireland"},
|
||||
{"code": "IT", "name": "Italy"},
|
||||
{"code": "LV", "name": "Latvia"},
|
||||
{"code": "LI", "name": "Liechtenstein"},
|
||||
{"code": "LT", "name": "Lithuania"},
|
||||
{"code": "LU", "name": "Luxembourg"},
|
||||
{"code": "MT", "name": "Malta"},
|
||||
{"code": "NL", "name": "Netherlands"},
|
||||
{"code": "NO", "name": "Norway"},
|
||||
{"code": "PL", "name": "Poland"},
|
||||
{"code": "PT", "name": "Portugal"},
|
||||
{"code": "RO", "name": "Romania"},
|
||||
{"code": "SK", "name": "Slovakia"},
|
||||
{"code": "SI", "name": "Slovenia"},
|
||||
{"code": "ES", "name": "Spain"},
|
||||
{"code": "SE", "name": "Sweden"},
|
||||
{"code": "GB", "name": "United Kingdom"},
|
||||
]
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=countries,
|
||||
message="Supported countries retrieved successfully",
|
||||
)
|
||||
209
leggend/api/routes/notifications.py
Normal file
209
leggend/api/routes/notifications.py
Normal file
@@ -0,0 +1,209 @@
|
||||
from typing import Dict, Any
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.notifications import (
|
||||
NotificationSettings,
|
||||
NotificationTest,
|
||||
DiscordConfig,
|
||||
TelegramConfig,
|
||||
NotificationFilters,
|
||||
)
|
||||
from leggend.services.notification_service import NotificationService
|
||||
from leggend.config import config
|
||||
|
||||
router = APIRouter()
|
||||
notification_service = NotificationService()
|
||||
|
||||
|
||||
@router.get("/notifications/settings", response_model=APIResponse)
|
||||
async def get_notification_settings() -> APIResponse:
|
||||
"""Get current notification settings"""
|
||||
try:
|
||||
notifications_config = config.notifications_config
|
||||
filters_config = config.filters_config
|
||||
|
||||
# Build response safely without exposing secrets
|
||||
discord_config = notifications_config.get("discord", {})
|
||||
telegram_config = notifications_config.get("telegram", {})
|
||||
|
||||
settings = NotificationSettings(
|
||||
discord=DiscordConfig(
|
||||
webhook="***" if discord_config.get("webhook") else "",
|
||||
enabled=discord_config.get("enabled", True),
|
||||
)
|
||||
if discord_config.get("webhook")
|
||||
else None,
|
||||
telegram=TelegramConfig(
|
||||
token="***" if telegram_config.get("token") else "",
|
||||
chat_id=telegram_config.get("chat_id", 0),
|
||||
enabled=telegram_config.get("enabled", True),
|
||||
)
|
||||
if telegram_config.get("token")
|
||||
else None,
|
||||
filters=NotificationFilters(
|
||||
case_insensitive=filters_config.get("case-insensitive", {}),
|
||||
case_sensitive=filters_config.get("case-sensitive"),
|
||||
amount_threshold=filters_config.get("amount_threshold"),
|
||||
keywords=filters_config.get("keywords", []),
|
||||
),
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=settings,
|
||||
message="Notification settings retrieved successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get notification settings: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get notification settings: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.put("/notifications/settings", response_model=APIResponse)
|
||||
async def update_notification_settings(settings: NotificationSettings) -> APIResponse:
|
||||
"""Update notification settings"""
|
||||
try:
|
||||
# Update notifications config
|
||||
notifications_config = {}
|
||||
|
||||
if settings.discord:
|
||||
notifications_config["discord"] = {
|
||||
"webhook": settings.discord.webhook,
|
||||
"enabled": settings.discord.enabled,
|
||||
}
|
||||
|
||||
if settings.telegram:
|
||||
notifications_config["telegram"] = {
|
||||
"token": settings.telegram.token,
|
||||
"chat_id": settings.telegram.chat_id,
|
||||
"enabled": settings.telegram.enabled,
|
||||
}
|
||||
|
||||
# Update filters config
|
||||
filters_config: Dict[str, Any] = {}
|
||||
if settings.filters.case_insensitive:
|
||||
filters_config["case-insensitive"] = settings.filters.case_insensitive
|
||||
if settings.filters.case_sensitive:
|
||||
filters_config["case-sensitive"] = settings.filters.case_sensitive
|
||||
if settings.filters.amount_threshold:
|
||||
filters_config["amount_threshold"] = settings.filters.amount_threshold
|
||||
if settings.filters.keywords:
|
||||
filters_config["keywords"] = settings.filters.keywords
|
||||
|
||||
# Save to config
|
||||
if notifications_config:
|
||||
config.update_section("notifications", notifications_config)
|
||||
if filters_config:
|
||||
config.update_section("filters", filters_config)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data={"updated": True},
|
||||
message="Notification settings updated successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update notification settings: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to update notification settings: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/notifications/test", response_model=APIResponse)
|
||||
async def test_notification(test_request: NotificationTest) -> APIResponse:
|
||||
"""Send a test notification"""
|
||||
try:
|
||||
success = await notification_service.send_test_notification(
|
||||
test_request.service, test_request.message
|
||||
)
|
||||
|
||||
if success:
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data={"sent": True},
|
||||
message=f"Test notification sent to {test_request.service} successfully",
|
||||
)
|
||||
else:
|
||||
return APIResponse(
|
||||
success=False,
|
||||
message=f"Failed to send test notification to {test_request.service}",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send test notification: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to send test notification: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/notifications/services", response_model=APIResponse)
|
||||
async def get_notification_services() -> APIResponse:
|
||||
"""Get available notification services and their status"""
|
||||
try:
|
||||
notifications_config = config.notifications_config
|
||||
|
||||
services = {
|
||||
"discord": {
|
||||
"name": "Discord",
|
||||
"enabled": bool(notifications_config.get("discord", {}).get("webhook")),
|
||||
"configured": bool(
|
||||
notifications_config.get("discord", {}).get("webhook")
|
||||
),
|
||||
"active": notifications_config.get("discord", {}).get("enabled", True),
|
||||
},
|
||||
"telegram": {
|
||||
"name": "Telegram",
|
||||
"enabled": bool(
|
||||
notifications_config.get("telegram", {}).get("token")
|
||||
and notifications_config.get("telegram", {}).get("chat_id")
|
||||
),
|
||||
"configured": bool(
|
||||
notifications_config.get("telegram", {}).get("token")
|
||||
and notifications_config.get("telegram", {}).get("chat_id")
|
||||
),
|
||||
"active": notifications_config.get("telegram", {}).get("enabled", True),
|
||||
},
|
||||
}
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=services,
|
||||
message="Notification services status retrieved successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get notification services: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get notification services: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.delete("/notifications/settings/{service}", response_model=APIResponse)
|
||||
async def delete_notification_service(service: str) -> APIResponse:
|
||||
"""Delete/disable a notification service"""
|
||||
try:
|
||||
if service not in ["discord", "telegram"]:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Service must be 'discord' or 'telegram'"
|
||||
)
|
||||
|
||||
notifications_config = config.notifications_config.copy()
|
||||
if service in notifications_config:
|
||||
del notifications_config[service]
|
||||
config.update_section("notifications", notifications_config)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data={"deleted": service},
|
||||
message=f"{service.capitalize()} notification service deleted successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete notification service {service}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to delete notification service: {str(e)}"
|
||||
) from e
|
||||
212
leggend/api/routes/sync.py
Normal file
212
leggend/api/routes/sync.py
Normal file
@@ -0,0 +1,212 @@
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.sync import SyncRequest, SchedulerConfig
|
||||
from leggend.services.sync_service import SyncService
|
||||
from leggend.background.scheduler import scheduler
|
||||
from leggend.config import config
|
||||
|
||||
router = APIRouter()
|
||||
sync_service = SyncService()
|
||||
|
||||
|
||||
@router.get("/sync/status", response_model=APIResponse)
|
||||
async def get_sync_status() -> APIResponse:
|
||||
"""Get current sync status"""
|
||||
try:
|
||||
status = await sync_service.get_sync_status()
|
||||
|
||||
# Add scheduler information
|
||||
next_sync_time = scheduler.get_next_sync_time()
|
||||
if next_sync_time:
|
||||
status.next_sync = next_sync_time
|
||||
|
||||
return APIResponse(
|
||||
success=True, data=status, message="Sync status retrieved successfully"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get sync status: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get sync status: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/sync", response_model=APIResponse)
|
||||
async def trigger_sync(
|
||||
background_tasks: BackgroundTasks, sync_request: Optional[SyncRequest] = None
|
||||
) -> APIResponse:
|
||||
"""Trigger a manual sync operation"""
|
||||
try:
|
||||
# Check if sync is already running
|
||||
status = await sync_service.get_sync_status()
|
||||
if status.is_running and not (sync_request and sync_request.force):
|
||||
return APIResponse(
|
||||
success=False,
|
||||
message="Sync is already running. Use 'force: true' to override.",
|
||||
)
|
||||
|
||||
# Determine what to sync
|
||||
if sync_request and sync_request.account_ids:
|
||||
# Sync specific accounts in background
|
||||
background_tasks.add_task(
|
||||
sync_service.sync_specific_accounts,
|
||||
sync_request.account_ids,
|
||||
sync_request.force if sync_request else False,
|
||||
)
|
||||
message = (
|
||||
f"Started sync for {len(sync_request.account_ids)} specific accounts"
|
||||
)
|
||||
else:
|
||||
# Sync all accounts in background
|
||||
background_tasks.add_task(
|
||||
sync_service.sync_all_accounts,
|
||||
sync_request.force if sync_request else False,
|
||||
)
|
||||
message = "Started sync for all accounts"
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data={
|
||||
"sync_started": True,
|
||||
"force": sync_request.force if sync_request else False,
|
||||
},
|
||||
message=message,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to trigger sync: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to trigger sync: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/sync/now", response_model=APIResponse)
|
||||
async def sync_now(sync_request: Optional[SyncRequest] = None) -> APIResponse:
|
||||
"""Run sync synchronously and return results (slower, for testing)"""
|
||||
try:
|
||||
if sync_request and sync_request.account_ids:
|
||||
result = await sync_service.sync_specific_accounts(
|
||||
sync_request.account_ids, sync_request.force
|
||||
)
|
||||
else:
|
||||
result = await sync_service.sync_all_accounts(
|
||||
sync_request.force if sync_request else False
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=result.success,
|
||||
data=result,
|
||||
message="Sync completed"
|
||||
if result.success
|
||||
else f"Sync failed with {len(result.errors)} errors",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to run sync: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to run sync: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/sync/scheduler", response_model=APIResponse)
|
||||
async def get_scheduler_config() -> APIResponse:
|
||||
"""Get current scheduler configuration"""
|
||||
try:
|
||||
scheduler_config = config.scheduler_config
|
||||
next_sync_time = scheduler.get_next_sync_time()
|
||||
|
||||
response_data = {
|
||||
**scheduler_config,
|
||||
"next_scheduled_sync": next_sync_time.isoformat()
|
||||
if next_sync_time
|
||||
else None,
|
||||
"is_running": scheduler.scheduler.running
|
||||
if hasattr(scheduler, "scheduler")
|
||||
else False,
|
||||
}
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=response_data,
|
||||
message="Scheduler configuration retrieved successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get scheduler config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get scheduler config: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.put("/sync/scheduler", response_model=APIResponse)
|
||||
async def update_scheduler_config(scheduler_config: SchedulerConfig) -> APIResponse:
|
||||
"""Update scheduler configuration"""
|
||||
try:
|
||||
# Validate cron expression if provided
|
||||
if scheduler_config.cron:
|
||||
try:
|
||||
cron_parts = scheduler_config.cron.split()
|
||||
if len(cron_parts) != 5:
|
||||
raise ValueError(
|
||||
"Cron expression must have 5 parts: minute hour day month day_of_week"
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid cron expression: {str(e)}"
|
||||
) from e
|
||||
|
||||
# Update configuration
|
||||
schedule_data = scheduler_config.dict(exclude_none=True)
|
||||
config.update_section("scheduler", {"sync": schedule_data})
|
||||
|
||||
# Reschedule the job
|
||||
scheduler.reschedule_sync(schedule_data)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=schedule_data,
|
||||
message="Scheduler configuration updated successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update scheduler config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to update scheduler config: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/sync/scheduler/start", response_model=APIResponse)
|
||||
async def start_scheduler() -> APIResponse:
|
||||
"""Start the background scheduler"""
|
||||
try:
|
||||
if not scheduler.scheduler.running:
|
||||
scheduler.start()
|
||||
return APIResponse(success=True, message="Scheduler started successfully")
|
||||
else:
|
||||
return APIResponse(success=True, message="Scheduler is already running")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start scheduler: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to start scheduler: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/sync/scheduler/stop", response_model=APIResponse)
|
||||
async def stop_scheduler() -> APIResponse:
|
||||
"""Stop the background scheduler"""
|
||||
try:
|
||||
if scheduler.scheduler.running:
|
||||
scheduler.shutdown()
|
||||
return APIResponse(success=True, message="Scheduler stopped successfully")
|
||||
else:
|
||||
return APIResponse(success=True, message="Scheduler is already stopped")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to stop scheduler: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to stop scheduler: {str(e)}"
|
||||
) from e
|
||||
192
leggend/api/routes/transactions.py
Normal file
192
leggend/api/routes/transactions.py
Normal file
@@ -0,0 +1,192 @@
|
||||
from typing import Optional, List, Union
|
||||
from datetime import datetime, timedelta
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.accounts import Transaction, TransactionSummary
|
||||
from leggend.services.gocardless_service import GoCardlessService
|
||||
from leggend.services.database_service import DatabaseService
|
||||
|
||||
router = APIRouter()
|
||||
gocardless_service = GoCardlessService()
|
||||
database_service = DatabaseService()
|
||||
|
||||
|
||||
@router.get("/transactions", response_model=APIResponse)
|
||||
async def get_all_transactions(
|
||||
limit: Optional[int] = Query(default=100, le=500),
|
||||
offset: Optional[int] = Query(default=0, ge=0),
|
||||
summary_only: bool = Query(
|
||||
default=True, description="Return transaction summaries only"
|
||||
),
|
||||
date_from: Optional[str] = Query(
|
||||
default=None, description="Filter from date (YYYY-MM-DD)"
|
||||
),
|
||||
date_to: Optional[str] = Query(
|
||||
default=None, description="Filter to date (YYYY-MM-DD)"
|
||||
),
|
||||
min_amount: Optional[float] = Query(
|
||||
default=None, description="Minimum transaction amount"
|
||||
),
|
||||
max_amount: Optional[float] = Query(
|
||||
default=None, description="Maximum transaction amount"
|
||||
),
|
||||
search: Optional[str] = Query(
|
||||
default=None, description="Search in transaction descriptions"
|
||||
),
|
||||
account_id: Optional[str] = Query(default=None, description="Filter by account ID"),
|
||||
) -> APIResponse:
|
||||
"""Get all transactions from database with filtering options"""
|
||||
try:
|
||||
# Get transactions from database instead of GoCardless API
|
||||
db_transactions = await database_service.get_transactions_from_db(
|
||||
account_id=account_id,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
min_amount=min_amount,
|
||||
max_amount=max_amount,
|
||||
search=search,
|
||||
)
|
||||
|
||||
# Get total count for pagination info
|
||||
total_transactions = await database_service.get_transaction_count_from_db(
|
||||
account_id=account_id,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
min_amount=min_amount,
|
||||
max_amount=max_amount,
|
||||
search=search,
|
||||
)
|
||||
|
||||
data: Union[List[TransactionSummary], List[Transaction]]
|
||||
|
||||
if summary_only:
|
||||
# Return simplified transaction summaries
|
||||
data = [
|
||||
TransactionSummary(
|
||||
internal_transaction_id=txn["internalTransactionId"],
|
||||
date=txn["transactionDate"],
|
||||
description=txn["description"],
|
||||
amount=txn["transactionValue"],
|
||||
currency=txn["transactionCurrency"],
|
||||
status=txn["transactionStatus"],
|
||||
account_id=txn["accountId"],
|
||||
)
|
||||
for txn in db_transactions
|
||||
]
|
||||
else:
|
||||
# Return full transaction details
|
||||
data = [
|
||||
Transaction(
|
||||
internal_transaction_id=txn["internalTransactionId"],
|
||||
institution_id=txn["institutionId"],
|
||||
iban=txn["iban"],
|
||||
account_id=txn["accountId"],
|
||||
transaction_date=txn["transactionDate"],
|
||||
description=txn["description"],
|
||||
transaction_value=txn["transactionValue"],
|
||||
transaction_currency=txn["transactionCurrency"],
|
||||
transaction_status=txn["transactionStatus"],
|
||||
raw_transaction=txn["rawTransaction"],
|
||||
)
|
||||
for txn in db_transactions
|
||||
]
|
||||
|
||||
actual_offset = offset or 0
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=data,
|
||||
message=f"Retrieved {len(data)} transactions (showing {actual_offset + 1}-{actual_offset + len(data)} of {total_transactions})",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transactions from database: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get transactions: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/transactions/stats", response_model=APIResponse)
|
||||
async def get_transaction_stats(
|
||||
days: int = Query(default=30, description="Number of days to include in stats"),
|
||||
account_id: Optional[str] = Query(default=None, description="Filter by account ID"),
|
||||
) -> APIResponse:
|
||||
"""Get transaction statistics for the last N days from database"""
|
||||
try:
|
||||
# Date range for stats
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
# Format dates for database query
|
||||
date_from = start_date.isoformat()
|
||||
date_to = end_date.isoformat()
|
||||
|
||||
# Get transactions from database
|
||||
recent_transactions = await database_service.get_transactions_from_db(
|
||||
account_id=account_id,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
limit=None, # Get all matching transactions for stats
|
||||
)
|
||||
|
||||
# Calculate stats
|
||||
total_transactions = len(recent_transactions)
|
||||
total_income = sum(
|
||||
txn["transactionValue"]
|
||||
for txn in recent_transactions
|
||||
if txn["transactionValue"] > 0
|
||||
)
|
||||
total_expenses = sum(
|
||||
abs(txn["transactionValue"])
|
||||
for txn in recent_transactions
|
||||
if txn["transactionValue"] < 0
|
||||
)
|
||||
net_change = total_income - total_expenses
|
||||
|
||||
# Count by status
|
||||
booked_count = len(
|
||||
[txn for txn in recent_transactions if txn["transactionStatus"] == "booked"]
|
||||
)
|
||||
pending_count = len(
|
||||
[
|
||||
txn
|
||||
for txn in recent_transactions
|
||||
if txn["transactionStatus"] == "pending"
|
||||
]
|
||||
)
|
||||
|
||||
# Count unique accounts
|
||||
unique_accounts = len({txn["accountId"] for txn in recent_transactions})
|
||||
|
||||
stats = {
|
||||
"period_days": days,
|
||||
"total_transactions": total_transactions,
|
||||
"booked_transactions": booked_count,
|
||||
"pending_transactions": pending_count,
|
||||
"total_income": round(total_income, 2),
|
||||
"total_expenses": round(total_expenses, 2),
|
||||
"net_change": round(net_change, 2),
|
||||
"average_transaction": round(
|
||||
sum(txn["transactionValue"] for txn in recent_transactions)
|
||||
/ total_transactions,
|
||||
2,
|
||||
)
|
||||
if total_transactions > 0
|
||||
else 0,
|
||||
"accounts_included": unique_accounts,
|
||||
}
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=stats,
|
||||
message=f"Transaction statistics for last {days} days",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transaction stats from database: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get transaction stats: {str(e)}"
|
||||
) from e
|
||||
168
leggend/background/scheduler.py
Normal file
168
leggend/background/scheduler.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
from leggend.services.sync_service import SyncService
|
||||
from leggend.services.notification_service import NotificationService
|
||||
|
||||
|
||||
class BackgroundScheduler:
|
||||
def __init__(self):
|
||||
self.scheduler = AsyncIOScheduler()
|
||||
self.sync_service = SyncService()
|
||||
self.notification_service = NotificationService()
|
||||
self.max_retries = 3
|
||||
self.retry_delay = 300 # 5 minutes
|
||||
|
||||
def start(self):
|
||||
"""Start the scheduler and configure sync jobs based on configuration"""
|
||||
schedule_config = config.scheduler_config.get("sync", {})
|
||||
|
||||
if not schedule_config.get("enabled", True):
|
||||
logger.info("Sync scheduling is disabled in configuration")
|
||||
self.scheduler.start()
|
||||
return
|
||||
|
||||
# Parse schedule configuration
|
||||
trigger = self._parse_cron_config(schedule_config)
|
||||
if not trigger:
|
||||
return
|
||||
|
||||
self.scheduler.add_job(
|
||||
self._run_sync,
|
||||
trigger,
|
||||
id="daily_sync",
|
||||
name="Scheduled sync of all transactions",
|
||||
max_instances=1,
|
||||
)
|
||||
|
||||
self.scheduler.start()
|
||||
logger.info(f"Background scheduler started with sync job: {trigger}")
|
||||
|
||||
def shutdown(self):
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
logger.info("Background scheduler shutdown")
|
||||
|
||||
def reschedule_sync(self, schedule_config: dict):
|
||||
"""Reschedule the sync job with new configuration"""
|
||||
if self.scheduler.running:
|
||||
try:
|
||||
self.scheduler.remove_job("daily_sync")
|
||||
logger.info("Removed existing sync job")
|
||||
except Exception:
|
||||
pass # Job might not exist
|
||||
|
||||
if not schedule_config.get("enabled", True):
|
||||
logger.info("Sync scheduling disabled")
|
||||
return
|
||||
|
||||
# Configure new schedule
|
||||
trigger = self._parse_cron_config(schedule_config)
|
||||
if not trigger:
|
||||
return
|
||||
|
||||
self.scheduler.add_job(
|
||||
self._run_sync,
|
||||
trigger,
|
||||
id="daily_sync",
|
||||
name="Scheduled sync of all transactions",
|
||||
max_instances=1,
|
||||
)
|
||||
logger.info(f"Rescheduled sync job with: {trigger}")
|
||||
|
||||
def _parse_cron_config(self, schedule_config: dict) -> CronTrigger:
|
||||
"""Parse cron configuration and return CronTrigger"""
|
||||
if schedule_config.get("cron"):
|
||||
# Parse custom cron expression (e.g., "0 3 * * *" for daily at 3 AM)
|
||||
try:
|
||||
cron_parts = schedule_config["cron"].split()
|
||||
if len(cron_parts) == 5:
|
||||
minute, hour, day, month, day_of_week = cron_parts
|
||||
return CronTrigger(
|
||||
minute=minute,
|
||||
hour=hour,
|
||||
day=day if day != "*" else None,
|
||||
month=month if month != "*" else None,
|
||||
day_of_week=day_of_week if day_of_week != "*" else None,
|
||||
)
|
||||
else:
|
||||
logger.error(f"Invalid cron expression: {schedule_config['cron']}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing cron expression: {e}")
|
||||
return None
|
||||
else:
|
||||
# Use hour/minute configuration (default: 3:00 AM daily)
|
||||
hour = schedule_config.get("hour", 3)
|
||||
minute = schedule_config.get("minute", 0)
|
||||
return CronTrigger(hour=hour, minute=minute)
|
||||
|
||||
async def _run_sync(self, retry_count: int = 0):
|
||||
"""Run sync with enhanced error handling and retry logic"""
|
||||
try:
|
||||
logger.info("Starting scheduled sync job")
|
||||
await self.sync_service.sync_all_accounts()
|
||||
logger.info("Scheduled sync job completed successfully")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Scheduled sync job failed (attempt {retry_count + 1}/{self.max_retries}): {e}"
|
||||
)
|
||||
|
||||
# Send notification about the failure
|
||||
try:
|
||||
await self.notification_service.send_expiry_notification(
|
||||
{
|
||||
"type": "sync_failure",
|
||||
"error": str(e),
|
||||
"retry_count": retry_count + 1,
|
||||
"max_retries": self.max_retries,
|
||||
}
|
||||
)
|
||||
except Exception as notification_error:
|
||||
logger.error(
|
||||
f"Failed to send failure notification: {notification_error}"
|
||||
)
|
||||
|
||||
# Implement retry logic for transient failures
|
||||
if retry_count < self.max_retries - 1:
|
||||
import datetime
|
||||
|
||||
logger.info(f"Retrying sync job in {self.retry_delay} seconds...")
|
||||
# Schedule a retry
|
||||
retry_time = datetime.datetime.now() + datetime.timedelta(
|
||||
seconds=self.retry_delay
|
||||
)
|
||||
self.scheduler.add_job(
|
||||
self._run_sync,
|
||||
"date",
|
||||
args=[retry_count + 1],
|
||||
id=f"sync_retry_{retry_count + 1}",
|
||||
run_date=retry_time,
|
||||
)
|
||||
else:
|
||||
logger.error("Maximum retries exceeded for sync job")
|
||||
# Send final failure notification
|
||||
try:
|
||||
await self.notification_service.send_expiry_notification(
|
||||
{
|
||||
"type": "sync_final_failure",
|
||||
"error": str(e),
|
||||
"retry_count": retry_count + 1,
|
||||
}
|
||||
)
|
||||
except Exception as notification_error:
|
||||
logger.error(
|
||||
f"Failed to send final failure notification: {notification_error}"
|
||||
)
|
||||
|
||||
def get_next_sync_time(self):
|
||||
"""Get the next scheduled sync time"""
|
||||
job = self.scheduler.get_job("daily_sync")
|
||||
if job:
|
||||
return job.next_run_time
|
||||
return None
|
||||
|
||||
|
||||
scheduler = BackgroundScheduler()
|
||||
143
leggend/config.py
Normal file
143
leggend/config.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import os
|
||||
import tomllib
|
||||
import tomli_w
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from loguru import logger
|
||||
|
||||
|
||||
class Config:
|
||||
_instance = None
|
||||
_config = None
|
||||
_config_path = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def load_config(self, config_path: Optional[str] = None) -> Dict[str, Any]:
|
||||
if self._config is not None:
|
||||
return self._config
|
||||
|
||||
if config_path is None:
|
||||
config_path = os.environ.get(
|
||||
"LEGGEN_CONFIG_FILE",
|
||||
str(Path.home() / ".config" / "leggen" / "config.toml"),
|
||||
)
|
||||
|
||||
self._config_path = config_path
|
||||
|
||||
try:
|
||||
with open(config_path, "rb") as f:
|
||||
self._config = tomllib.load(f)
|
||||
logger.info(f"Configuration loaded from {config_path}")
|
||||
except FileNotFoundError:
|
||||
logger.error(f"Configuration file not found: {config_path}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading configuration: {e}")
|
||||
raise
|
||||
|
||||
return self._config
|
||||
|
||||
def save_config(
|
||||
self,
|
||||
config_data: Optional[Dict[str, Any]] = None,
|
||||
config_path: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Save configuration to TOML file"""
|
||||
if config_data is None:
|
||||
config_data = self._config
|
||||
|
||||
if config_path is None:
|
||||
config_path = self._config_path or os.environ.get(
|
||||
"LEGGEN_CONFIG_FILE",
|
||||
str(Path.home() / ".config" / "leggen" / "config.toml"),
|
||||
)
|
||||
|
||||
if config_path is None:
|
||||
raise ValueError("No config path specified")
|
||||
if config_data is None:
|
||||
raise ValueError("No config data to save")
|
||||
|
||||
# Ensure directory exists
|
||||
Path(config_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
with open(config_path, "wb") as f:
|
||||
tomli_w.dump(config_data, f)
|
||||
|
||||
# Update in-memory config
|
||||
self._config = config_data
|
||||
self._config_path = config_path
|
||||
logger.info(f"Configuration saved to {config_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving configuration: {e}")
|
||||
raise
|
||||
|
||||
def update_config(self, section: str, key: str, value: Any) -> None:
|
||||
"""Update a specific configuration value"""
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
|
||||
if section not in self._config:
|
||||
self._config[section] = {}
|
||||
|
||||
self._config[section][key] = value
|
||||
self.save_config()
|
||||
|
||||
def update_section(self, section: str, data: Dict[str, Any]) -> None:
|
||||
"""Update an entire configuration section"""
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
|
||||
self._config[section] = data
|
||||
self.save_config()
|
||||
|
||||
@property
|
||||
def config(self) -> Dict[str, Any]:
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def gocardless_config(self) -> Dict[str, str]:
|
||||
return self.config.get("gocardless", {})
|
||||
|
||||
@property
|
||||
def database_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("database", {})
|
||||
|
||||
@property
|
||||
def notifications_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("notifications", {})
|
||||
|
||||
@property
|
||||
def filters_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("filters", {})
|
||||
|
||||
@property
|
||||
def scheduler_config(self) -> Dict[str, Any]:
|
||||
"""Get scheduler configuration with defaults"""
|
||||
default_schedule = {
|
||||
"sync": {
|
||||
"enabled": True,
|
||||
"hour": 3,
|
||||
"minute": 0,
|
||||
"cron": None, # Optional custom cron expression
|
||||
}
|
||||
}
|
||||
return self.config.get("scheduler", default_schedule)
|
||||
|
||||
|
||||
config = Config()
|
||||
126
leggend/main.py
Normal file
126
leggend/main.py
Normal file
@@ -0,0 +1,126 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from importlib import metadata
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.routes import banks, accounts, sync, notifications, transactions
|
||||
from leggend.background.scheduler import scheduler
|
||||
from leggend.config import config
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
logger.info("Starting leggend service...")
|
||||
|
||||
# Load configuration
|
||||
try:
|
||||
config.load_config()
|
||||
logger.info("Configuration loaded successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load configuration: {e}")
|
||||
raise
|
||||
|
||||
# Start background scheduler
|
||||
scheduler.start()
|
||||
logger.info("Background scheduler started")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down leggend service...")
|
||||
scheduler.shutdown()
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
# Get version dynamically from package metadata
|
||||
try:
|
||||
version = metadata.version("leggen")
|
||||
except metadata.PackageNotFoundError:
|
||||
version = "unknown"
|
||||
|
||||
app = FastAPI(
|
||||
title="Leggend API",
|
||||
description="Open Banking API for Leggen",
|
||||
version=version,
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"http://localhost:3000",
|
||||
"http://localhost:5173",
|
||||
], # SvelteKit dev servers
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Include API routes
|
||||
app.include_router(banks.router, prefix="/api/v1", tags=["banks"])
|
||||
app.include_router(accounts.router, prefix="/api/v1", tags=["accounts"])
|
||||
app.include_router(transactions.router, prefix="/api/v1", tags=["transactions"])
|
||||
app.include_router(sync.router, prefix="/api/v1", tags=["sync"])
|
||||
app.include_router(notifications.router, prefix="/api/v1", tags=["notifications"])
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
# Get version dynamically
|
||||
try:
|
||||
version = metadata.version("leggen")
|
||||
except metadata.PackageNotFoundError:
|
||||
version = "unknown"
|
||||
return {"message": "Leggend API is running", "version": version}
|
||||
|
||||
@app.get("/health")
|
||||
async def health():
|
||||
return {"status": "healthy", "config_loaded": config._config is not None}
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Start the Leggend API service")
|
||||
parser.add_argument(
|
||||
"--reload", action="store_true", help="Enable auto-reload for development"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host", default="0.0.0.0", help="Host to bind to (default: 0.0.0.0)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port", type=int, default=8000, help="Port to bind to (default: 8000)"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.reload:
|
||||
# Use string import for reload to work properly
|
||||
uvicorn.run(
|
||||
"leggend.main:create_app",
|
||||
factory=True,
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
log_level="info",
|
||||
access_log=True,
|
||||
reload=True,
|
||||
reload_dirs=["leggend", "leggen"], # Watch both directories
|
||||
)
|
||||
else:
|
||||
app = create_app()
|
||||
uvicorn.run(
|
||||
app,
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
log_level="info",
|
||||
access_log=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
383
leggend/services/database_service.py
Normal file
383
leggend/services/database_service.py
Normal file
@@ -0,0 +1,383 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
import leggen.database.sqlite as sqlite_db
|
||||
|
||||
|
||||
class DatabaseService:
|
||||
def __init__(self):
|
||||
self.db_config = config.database_config
|
||||
self.sqlite_enabled = self.db_config.get("sqlite", True)
|
||||
|
||||
async def persist_balance(
|
||||
self, account_id: str, balance_data: Dict[str, Any]
|
||||
) -> None:
|
||||
"""Persist account balance data"""
|
||||
if not self.sqlite_enabled:
|
||||
logger.warning("SQLite database disabled, skipping balance persistence")
|
||||
return
|
||||
|
||||
await self._persist_balance_sqlite(account_id, balance_data)
|
||||
|
||||
async def persist_transactions(
|
||||
self, account_id: str, transactions: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Persist transactions and return new transactions"""
|
||||
if not self.sqlite_enabled:
|
||||
logger.warning("SQLite database disabled, skipping transaction persistence")
|
||||
return transactions
|
||||
|
||||
return await self._persist_transactions_sqlite(account_id, transactions)
|
||||
|
||||
def process_transactions(
|
||||
self,
|
||||
account_id: str,
|
||||
account_info: Dict[str, Any],
|
||||
transaction_data: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Process raw transaction data into standardized format"""
|
||||
transactions = []
|
||||
|
||||
# Process booked transactions
|
||||
for transaction in transaction_data.get("transactions", {}).get("booked", []):
|
||||
processed = self._process_single_transaction(
|
||||
account_id, account_info, transaction, "booked"
|
||||
)
|
||||
transactions.append(processed)
|
||||
|
||||
# Process pending transactions
|
||||
for transaction in transaction_data.get("transactions", {}).get("pending", []):
|
||||
processed = self._process_single_transaction(
|
||||
account_id, account_info, transaction, "pending"
|
||||
)
|
||||
transactions.append(processed)
|
||||
|
||||
return transactions
|
||||
|
||||
def _process_single_transaction(
|
||||
self,
|
||||
account_id: str,
|
||||
account_info: Dict[str, Any],
|
||||
transaction: Dict[str, Any],
|
||||
status: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""Process a single transaction into standardized format"""
|
||||
# Extract dates
|
||||
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||
"bookingDate"
|
||||
)
|
||||
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||
|
||||
if booked_date and value_date:
|
||||
min_date = min(
|
||||
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||
)
|
||||
else:
|
||||
date_str = booked_date or value_date
|
||||
if not date_str:
|
||||
raise ValueError("No valid date found in transaction")
|
||||
min_date = datetime.fromisoformat(date_str)
|
||||
|
||||
# Extract amount and currency
|
||||
transaction_amount = transaction.get("transactionAmount", {})
|
||||
amount = float(transaction_amount.get("amount", 0))
|
||||
currency = transaction_amount.get("currency", "")
|
||||
|
||||
# Extract description
|
||||
description = transaction.get(
|
||||
"remittanceInformationUnstructured",
|
||||
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||
)
|
||||
|
||||
return {
|
||||
"internalTransactionId": transaction.get("internalTransactionId"),
|
||||
"institutionId": account_info["institution_id"],
|
||||
"iban": account_info.get("iban", "N/A"),
|
||||
"transactionDate": min_date,
|
||||
"description": description,
|
||||
"transactionValue": amount,
|
||||
"transactionCurrency": currency,
|
||||
"transactionStatus": status,
|
||||
"accountId": account_id,
|
||||
"rawTransaction": transaction,
|
||||
}
|
||||
|
||||
async def get_transactions_from_db(
|
||||
self,
|
||||
account_id: Optional[str] = None,
|
||||
limit: Optional[int] = 100,
|
||||
offset: Optional[int] = 0,
|
||||
date_from: Optional[str] = None,
|
||||
date_to: Optional[str] = None,
|
||||
min_amount: Optional[float] = None,
|
||||
max_amount: Optional[float] = None,
|
||||
search: Optional[str] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get transactions from SQLite database"""
|
||||
if not self.sqlite_enabled:
|
||||
logger.warning("SQLite database disabled, cannot read transactions")
|
||||
return []
|
||||
|
||||
try:
|
||||
transactions = sqlite_db.get_transactions(
|
||||
account_id=account_id,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
min_amount=min_amount,
|
||||
max_amount=max_amount,
|
||||
search=search,
|
||||
)
|
||||
logger.debug(f"Retrieved {len(transactions)} transactions from database")
|
||||
return transactions
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transactions from database: {e}")
|
||||
return []
|
||||
|
||||
async def get_transaction_count_from_db(
|
||||
self,
|
||||
account_id: Optional[str] = None,
|
||||
date_from: Optional[str] = None,
|
||||
date_to: Optional[str] = None,
|
||||
min_amount: Optional[float] = None,
|
||||
max_amount: Optional[float] = None,
|
||||
search: Optional[str] = None,
|
||||
) -> int:
|
||||
"""Get total count of transactions from SQLite database"""
|
||||
if not self.sqlite_enabled:
|
||||
return 0
|
||||
|
||||
try:
|
||||
filters = {
|
||||
"date_from": date_from,
|
||||
"date_to": date_to,
|
||||
"min_amount": min_amount,
|
||||
"max_amount": max_amount,
|
||||
"search": search,
|
||||
}
|
||||
# Remove None values
|
||||
filters = {k: v for k, v in filters.items() if v is not None}
|
||||
|
||||
count = sqlite_db.get_transaction_count(account_id=account_id, **filters)
|
||||
logger.debug(f"Total transaction count: {count}")
|
||||
return count
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transaction count from database: {e}")
|
||||
return 0
|
||||
|
||||
async def get_balances_from_db(
|
||||
self, account_id: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get balances from SQLite database"""
|
||||
if not self.sqlite_enabled:
|
||||
logger.warning("SQLite database disabled, cannot read balances")
|
||||
return []
|
||||
|
||||
try:
|
||||
balances = sqlite_db.get_balances(account_id=account_id)
|
||||
logger.debug(f"Retrieved {len(balances)} balances from database")
|
||||
return balances
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get balances from database: {e}")
|
||||
return []
|
||||
|
||||
async def get_account_summary_from_db(
|
||||
self, account_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Get basic account info from SQLite database (avoids GoCardless call)"""
|
||||
if not self.sqlite_enabled:
|
||||
return None
|
||||
|
||||
try:
|
||||
summary = sqlite_db.get_account_summary(account_id)
|
||||
if summary:
|
||||
logger.debug(
|
||||
f"Retrieved account summary from database for {account_id}"
|
||||
)
|
||||
return summary
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get account summary from database: {e}")
|
||||
return None
|
||||
|
||||
async def _persist_balance_sqlite(
|
||||
self, account_id: str, balance_data: Dict[str, Any]
|
||||
) -> None:
|
||||
"""Persist balance to SQLite"""
|
||||
try:
|
||||
import sqlite3
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the balances table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS balances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
account_id TEXT,
|
||||
bank TEXT,
|
||||
status TEXT,
|
||||
iban TEXT,
|
||||
amount REAL,
|
||||
currency TEXT,
|
||||
type TEXT,
|
||||
timestamp DATETIME
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_account_id
|
||||
ON balances(account_id)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_timestamp
|
||||
ON balances(timestamp)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp
|
||||
ON balances(account_id, type, timestamp)"""
|
||||
)
|
||||
|
||||
# Convert GoCardless balance format to our format and persist
|
||||
for balance in balance_data.get("balances", []):
|
||||
balance_amount = balance["balanceAmount"]
|
||||
|
||||
try:
|
||||
cursor.execute(
|
||||
"""INSERT INTO balances (
|
||||
account_id,
|
||||
bank,
|
||||
status,
|
||||
iban,
|
||||
amount,
|
||||
currency,
|
||||
type,
|
||||
timestamp
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
account_id,
|
||||
balance_data.get("institution_id", "unknown"),
|
||||
"active",
|
||||
balance_data.get("iban", "N/A"),
|
||||
float(balance_amount["amount"]),
|
||||
balance_amount["currency"],
|
||||
balance["balanceType"],
|
||||
datetime.now(),
|
||||
),
|
||||
)
|
||||
except sqlite3.IntegrityError:
|
||||
logger.warning(f"Skipped duplicate balance for {account_id}")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(f"Persisted balances to SQLite for account {account_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to persist balances to SQLite: {e}")
|
||||
raise
|
||||
|
||||
async def _persist_transactions_sqlite(
|
||||
self, account_id: str, transactions: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Persist transactions to SQLite"""
|
||||
try:
|
||||
import sqlite3
|
||||
import json
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
db_path = Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the transactions table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS transactions (
|
||||
internalTransactionId TEXT PRIMARY KEY,
|
||||
institutionId TEXT,
|
||||
iban TEXT,
|
||||
transactionDate DATETIME,
|
||||
description TEXT,
|
||||
transactionValue REAL,
|
||||
transactionCurrency TEXT,
|
||||
transactionStatus TEXT,
|
||||
accountId TEXT,
|
||||
rawTransaction JSON
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_id
|
||||
ON transactions(accountId)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_date
|
||||
ON transactions(transactionDate)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_date
|
||||
ON transactions(accountId, transactionDate)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_amount
|
||||
ON transactions(transactionValue)"""
|
||||
)
|
||||
|
||||
# Prepare an SQL statement for inserting data
|
||||
insert_sql = """INSERT INTO transactions (
|
||||
internalTransactionId,
|
||||
institutionId,
|
||||
iban,
|
||||
transactionDate,
|
||||
description,
|
||||
transactionValue,
|
||||
transactionCurrency,
|
||||
transactionStatus,
|
||||
accountId,
|
||||
rawTransaction
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
|
||||
|
||||
new_transactions = []
|
||||
|
||||
for transaction in transactions:
|
||||
try:
|
||||
cursor.execute(
|
||||
insert_sql,
|
||||
(
|
||||
transaction["internalTransactionId"],
|
||||
transaction["institutionId"],
|
||||
transaction["iban"],
|
||||
transaction["transactionDate"],
|
||||
transaction["description"],
|
||||
transaction["transactionValue"],
|
||||
transaction["transactionCurrency"],
|
||||
transaction["transactionStatus"],
|
||||
transaction["accountId"],
|
||||
json.dumps(transaction["rawTransaction"]),
|
||||
),
|
||||
)
|
||||
new_transactions.append(transaction)
|
||||
except sqlite3.IntegrityError:
|
||||
# Transaction already exists
|
||||
continue
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
logger.info(
|
||||
f"Persisted {len(new_transactions)} new transactions to SQLite for account {account_id}"
|
||||
)
|
||||
return new_transactions
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to persist transactions to SQLite: {e}")
|
||||
raise
|
||||
174
leggend/services/gocardless_service.py
Normal file
174
leggend/services/gocardless_service.py
Normal file
@@ -0,0 +1,174 @@
|
||||
import json
|
||||
import httpx
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
|
||||
|
||||
def _log_rate_limits(response):
|
||||
"""Log GoCardless API rate limit headers"""
|
||||
limit = response.headers.get("X-RateLimit-Limit")
|
||||
remaining = response.headers.get("X-RateLimit-Remaining")
|
||||
reset = response.headers.get("X-RateLimit-Reset")
|
||||
account_success_reset = response.headers.get("X-RateLimit-Account-Success-Reset")
|
||||
|
||||
if limit or remaining or reset or account_success_reset:
|
||||
logger.info(
|
||||
f"GoCardless rate limits - Limit: {limit}, Remaining: {remaining}, Reset: {reset}s, Account Success Reset: {account_success_reset}"
|
||||
)
|
||||
|
||||
|
||||
class GoCardlessService:
|
||||
def __init__(self):
|
||||
self.config = config.gocardless_config
|
||||
self.base_url = self.config.get(
|
||||
"url", "https://bankaccountdata.gocardless.com/api/v2"
|
||||
)
|
||||
self._token = None
|
||||
|
||||
async def _get_auth_headers(self) -> Dict[str, str]:
|
||||
"""Get authentication headers for GoCardless API"""
|
||||
token = await self._get_token()
|
||||
return {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
|
||||
|
||||
async def _get_token(self) -> str:
|
||||
"""Get access token for GoCardless API"""
|
||||
if self._token:
|
||||
return self._token
|
||||
|
||||
# Use ~/.config/leggen for consistency with main config
|
||||
auth_file = Path.home() / ".config" / "leggen" / "auth.json"
|
||||
|
||||
if auth_file.exists():
|
||||
try:
|
||||
with open(auth_file, "r") as f:
|
||||
auth = json.load(f)
|
||||
|
||||
if auth.get("access"):
|
||||
# Try to refresh the token
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.post(
|
||||
f"{self.base_url}/token/refresh/",
|
||||
json={"refresh": auth["refresh"]},
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
auth.update(response.json())
|
||||
self._save_auth(auth)
|
||||
self._token = auth["access"]
|
||||
return self._token
|
||||
except httpx.HTTPStatusError:
|
||||
logger.warning("Token refresh failed, creating new token")
|
||||
return await self._create_token()
|
||||
else:
|
||||
return await self._create_token()
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading auth file: {e}")
|
||||
return await self._create_token()
|
||||
else:
|
||||
return await self._create_token()
|
||||
|
||||
async def _create_token(self) -> str:
|
||||
"""Create a new GoCardless access token"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{self.base_url}/token/new/",
|
||||
json={
|
||||
"secret_id": self.config["key"],
|
||||
"secret_key": self.config["secret"],
|
||||
},
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
auth = response.json()
|
||||
self._save_auth(auth)
|
||||
self._token = auth["access"]
|
||||
return self._token
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create GoCardless token: {e}")
|
||||
raise
|
||||
|
||||
def _save_auth(self, auth_data: dict):
|
||||
"""Save authentication data to file"""
|
||||
auth_file = Path.home() / ".config" / "leggen" / "auth.json"
|
||||
auth_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(auth_file, "w") as f:
|
||||
json.dump(auth_data, f)
|
||||
|
||||
async def get_institutions(self, country: str = "PT") -> List[Dict[str, Any]]:
|
||||
"""Get available bank institutions for a country"""
|
||||
headers = await self._get_auth_headers()
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/institutions/",
|
||||
headers=headers,
|
||||
params={"country": country},
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def create_requisition(
|
||||
self, institution_id: str, redirect_url: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Create a bank connection requisition"""
|
||||
headers = await self._get_auth_headers()
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
f"{self.base_url}/requisitions/",
|
||||
headers=headers,
|
||||
json={"institution_id": institution_id, "redirect": redirect_url},
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def get_requisitions(self) -> Dict[str, Any]:
|
||||
"""Get all requisitions"""
|
||||
headers = await self._get_auth_headers()
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/requisitions/", headers=headers
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def get_account_details(self, account_id: str) -> Dict[str, Any]:
|
||||
"""Get account details"""
|
||||
headers = await self._get_auth_headers()
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/accounts/{account_id}/", headers=headers
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def get_account_balances(self, account_id: str) -> Dict[str, Any]:
|
||||
"""Get account balances"""
|
||||
headers = await self._get_auth_headers()
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/accounts/{account_id}/balances/", headers=headers
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
async def get_account_transactions(self, account_id: str) -> Dict[str, Any]:
|
||||
"""Get account transactions"""
|
||||
headers = await self._get_auth_headers()
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.base_url}/accounts/{account_id}/transactions/", headers=headers
|
||||
)
|
||||
_log_rate_limits(response)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
132
leggend/services/notification_service.py
Normal file
132
leggend/services/notification_service.py
Normal file
@@ -0,0 +1,132 @@
|
||||
from typing import List, Dict, Any
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
|
||||
|
||||
class NotificationService:
|
||||
def __init__(self):
|
||||
self.notifications_config = config.notifications_config
|
||||
self.filters_config = config.filters_config
|
||||
|
||||
async def send_transaction_notifications(
|
||||
self, transactions: List[Dict[str, Any]]
|
||||
) -> None:
|
||||
"""Send notifications for new transactions that match filters"""
|
||||
if not self.filters_config:
|
||||
logger.info("No notification filters configured, skipping notifications")
|
||||
return
|
||||
|
||||
# Filter transactions that match notification criteria
|
||||
matching_transactions = self._filter_transactions(transactions)
|
||||
|
||||
if not matching_transactions:
|
||||
logger.info("No transactions matched notification filters")
|
||||
return
|
||||
|
||||
# Send to enabled notification services
|
||||
if self._is_discord_enabled():
|
||||
await self._send_discord_notifications(matching_transactions)
|
||||
|
||||
if self._is_telegram_enabled():
|
||||
await self._send_telegram_notifications(matching_transactions)
|
||||
|
||||
async def send_test_notification(self, service: str, message: str) -> bool:
|
||||
"""Send a test notification"""
|
||||
try:
|
||||
if service == "discord" and self._is_discord_enabled():
|
||||
await self._send_discord_test(message)
|
||||
return True
|
||||
elif service == "telegram" and self._is_telegram_enabled():
|
||||
await self._send_telegram_test(message)
|
||||
return True
|
||||
else:
|
||||
logger.error(
|
||||
f"Notification service '{service}' not enabled or not found"
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send test notification to {service}: {e}")
|
||||
return False
|
||||
|
||||
async def send_expiry_notification(self, notification_data: Dict[str, Any]) -> None:
|
||||
"""Send notification about account expiry"""
|
||||
if self._is_discord_enabled():
|
||||
await self._send_discord_expiry(notification_data)
|
||||
|
||||
if self._is_telegram_enabled():
|
||||
await self._send_telegram_expiry(notification_data)
|
||||
|
||||
def _filter_transactions(
|
||||
self, transactions: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Filter transactions based on notification criteria"""
|
||||
matching = []
|
||||
filters_case_insensitive = self.filters_config.get("case-insensitive", {})
|
||||
|
||||
for transaction in transactions:
|
||||
description = transaction.get("description", "").lower()
|
||||
|
||||
# Check case-insensitive filters
|
||||
for _filter_name, filter_value in filters_case_insensitive.items():
|
||||
if filter_value.lower() in description:
|
||||
matching.append(
|
||||
{
|
||||
"name": transaction["description"],
|
||||
"value": transaction["transactionValue"],
|
||||
"currency": transaction["transactionCurrency"],
|
||||
"date": transaction["transactionDate"],
|
||||
}
|
||||
)
|
||||
break
|
||||
|
||||
return matching
|
||||
|
||||
def _is_discord_enabled(self) -> bool:
|
||||
"""Check if Discord notifications are enabled"""
|
||||
discord_config = self.notifications_config.get("discord", {})
|
||||
return bool(
|
||||
discord_config.get("webhook") and discord_config.get("enabled", True)
|
||||
)
|
||||
|
||||
def _is_telegram_enabled(self) -> bool:
|
||||
"""Check if Telegram notifications are enabled"""
|
||||
telegram_config = self.notifications_config.get("telegram", {})
|
||||
return bool(
|
||||
telegram_config.get("token")
|
||||
and telegram_config.get("chat_id")
|
||||
and telegram_config.get("enabled", True)
|
||||
)
|
||||
|
||||
async def _send_discord_notifications(
|
||||
self, transactions: List[Dict[str, Any]]
|
||||
) -> None:
|
||||
"""Send Discord notifications - placeholder implementation"""
|
||||
# Would import and use leggen.notifications.discord
|
||||
logger.info(f"Sending {len(transactions)} transaction notifications to Discord")
|
||||
|
||||
async def _send_telegram_notifications(
|
||||
self, transactions: List[Dict[str, Any]]
|
||||
) -> None:
|
||||
"""Send Telegram notifications - placeholder implementation"""
|
||||
# Would import and use leggen.notifications.telegram
|
||||
logger.info(
|
||||
f"Sending {len(transactions)} transaction notifications to Telegram"
|
||||
)
|
||||
|
||||
async def _send_discord_test(self, message: str) -> None:
|
||||
"""Send Discord test notification"""
|
||||
logger.info(f"Sending Discord test: {message}")
|
||||
|
||||
async def _send_telegram_test(self, message: str) -> None:
|
||||
"""Send Telegram test notification"""
|
||||
logger.info(f"Sending Telegram test: {message}")
|
||||
|
||||
async def _send_discord_expiry(self, notification_data: Dict[str, Any]) -> None:
|
||||
"""Send Discord expiry notification"""
|
||||
logger.info(f"Sending Discord expiry notification: {notification_data}")
|
||||
|
||||
async def _send_telegram_expiry(self, notification_data: Dict[str, Any]) -> None:
|
||||
"""Send Telegram expiry notification"""
|
||||
logger.info(f"Sending Telegram expiry notification: {notification_data}")
|
||||
153
leggend/services/sync_service.py
Normal file
153
leggend/services/sync_service.py
Normal file
@@ -0,0 +1,153 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.sync import SyncResult, SyncStatus
|
||||
from leggend.services.gocardless_service import GoCardlessService
|
||||
from leggend.services.database_service import DatabaseService
|
||||
from leggend.services.notification_service import NotificationService
|
||||
|
||||
|
||||
class SyncService:
|
||||
def __init__(self):
|
||||
self.gocardless = GoCardlessService()
|
||||
self.database = DatabaseService()
|
||||
self.notifications = NotificationService()
|
||||
self._sync_status = SyncStatus(is_running=False)
|
||||
|
||||
async def get_sync_status(self) -> SyncStatus:
|
||||
"""Get current sync status"""
|
||||
return self._sync_status
|
||||
|
||||
async def sync_all_accounts(self, force: bool = False) -> SyncResult:
|
||||
"""Sync all connected accounts"""
|
||||
if self._sync_status.is_running and not force:
|
||||
raise Exception("Sync is already running")
|
||||
|
||||
start_time = datetime.now()
|
||||
self._sync_status.is_running = True
|
||||
self._sync_status.errors = []
|
||||
|
||||
accounts_processed = 0
|
||||
transactions_added = 0
|
||||
transactions_updated = 0
|
||||
balances_updated = 0
|
||||
errors = []
|
||||
|
||||
try:
|
||||
logger.info("Starting sync of all accounts")
|
||||
|
||||
# Get all requisitions and accounts
|
||||
requisitions = await self.gocardless.get_requisitions()
|
||||
all_accounts = set()
|
||||
|
||||
for req in requisitions.get("results", []):
|
||||
all_accounts.update(req.get("accounts", []))
|
||||
|
||||
self._sync_status.total_accounts = len(all_accounts)
|
||||
|
||||
# Process each account
|
||||
for account_id in all_accounts:
|
||||
try:
|
||||
# Get account details
|
||||
account_details = await self.gocardless.get_account_details(
|
||||
account_id
|
||||
)
|
||||
|
||||
# Get and save balances
|
||||
balances = await self.gocardless.get_account_balances(account_id)
|
||||
if balances:
|
||||
await self.database.persist_balance(account_id, balances)
|
||||
balances_updated += len(balances.get("balances", []))
|
||||
|
||||
# Get and save transactions
|
||||
transactions = await self.gocardless.get_account_transactions(
|
||||
account_id
|
||||
)
|
||||
if transactions:
|
||||
processed_transactions = self.database.process_transactions(
|
||||
account_id, account_details, transactions
|
||||
)
|
||||
new_transactions = await self.database.persist_transactions(
|
||||
account_id, processed_transactions
|
||||
)
|
||||
transactions_added += len(new_transactions)
|
||||
|
||||
# Send notifications for new transactions
|
||||
if new_transactions:
|
||||
await self.notifications.send_transaction_notifications(
|
||||
new_transactions
|
||||
)
|
||||
|
||||
accounts_processed += 1
|
||||
self._sync_status.accounts_synced = accounts_processed
|
||||
|
||||
logger.info(f"Synced account {account_id} successfully")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to sync account {account_id}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
end_time = datetime.now()
|
||||
duration = (end_time - start_time).total_seconds()
|
||||
|
||||
self._sync_status.last_sync = end_time
|
||||
|
||||
result = SyncResult(
|
||||
success=len(errors) == 0,
|
||||
accounts_processed=accounts_processed,
|
||||
transactions_added=transactions_added,
|
||||
transactions_updated=transactions_updated,
|
||||
balances_updated=balances_updated,
|
||||
duration_seconds=duration,
|
||||
errors=errors,
|
||||
started_at=start_time,
|
||||
completed_at=end_time,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Sync completed: {accounts_processed} accounts, {transactions_added} new transactions"
|
||||
)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Sync failed: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg)
|
||||
raise
|
||||
finally:
|
||||
self._sync_status.is_running = False
|
||||
|
||||
async def sync_specific_accounts(
|
||||
self, account_ids: List[str], force: bool = False
|
||||
) -> SyncResult:
|
||||
"""Sync specific accounts"""
|
||||
if self._sync_status.is_running and not force:
|
||||
raise Exception("Sync is already running")
|
||||
|
||||
# Similar implementation but only for specified accounts
|
||||
# For brevity, implementing a simplified version
|
||||
start_time = datetime.now()
|
||||
self._sync_status.is_running = True
|
||||
|
||||
try:
|
||||
# Process only specified accounts
|
||||
# Implementation would be similar to sync_all_accounts
|
||||
# but filtered to only the specified account_ids
|
||||
|
||||
end_time = datetime.now()
|
||||
return SyncResult(
|
||||
success=True,
|
||||
accounts_processed=len(account_ids),
|
||||
transactions_added=0,
|
||||
transactions_updated=0,
|
||||
balances_updated=0,
|
||||
duration_seconds=(end_time - start_time).total_seconds(),
|
||||
errors=[],
|
||||
started_at=start_time,
|
||||
completed_at=end_time,
|
||||
)
|
||||
finally:
|
||||
self._sync_status.is_running = False
|
||||
10
leggend/utils/gocardless.py
Normal file
10
leggend/utils/gocardless.py
Normal file
@@ -0,0 +1,10 @@
|
||||
REQUISITION_STATUS = {
|
||||
"CR": "CREATED",
|
||||
"GC": "GIVING_CONSENT",
|
||||
"UA": "UNDERGOING_AUTHENTICATION",
|
||||
"RJ": "REJECTED",
|
||||
"SA": "SELECTING_ACCOUNTS",
|
||||
"GA": "GRANTING_ACCESS",
|
||||
"LN": "LINKED",
|
||||
"EX": "EXPIRED",
|
||||
}
|
||||
623
poetry.lock
generated
623
poetry.lock
generated
@@ -1,623 +0,0 @@
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.4.2"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
|
||||
{file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
|
||||
{file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
|
||||
{file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
|
||||
{file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
|
||||
{file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
|
||||
{file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
|
||||
{file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
|
||||
{file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
|
||||
{file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
|
||||
{file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
|
||||
{file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
|
||||
{file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"},
|
||||
{file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"},
|
||||
{file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"},
|
||||
{file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"},
|
||||
{file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"},
|
||||
{file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"},
|
||||
{file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"},
|
||||
{file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"},
|
||||
{file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
|
||||
{file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.0.0"
|
||||
mypy-extensions = ">=0.4.3"
|
||||
packaging = ">=22.0"
|
||||
pathspec = ">=0.9.0"
|
||||
platformdirs = ">=2"
|
||||
|
||||
[package.extras]
|
||||
colorama = ["colorama (>=0.4.3)"]
|
||||
d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
|
||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2024.6.2"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
|
||||
{file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.4.0"
|
||||
description = "Validate configuration and produce human readable error messages."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
|
||||
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.3.2"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
|
||||
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
|
||||
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
|
||||
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
|
||||
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
|
||||
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
|
||||
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
|
||||
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.7"
|
||||
description = "Composable command line interface toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
|
||||
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "discord-webhook"
|
||||
version = "1.3.1"
|
||||
description = "Easily send Discord webhooks with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10,<4.0"
|
||||
files = [
|
||||
{file = "discord_webhook-1.3.1-py3-none-any.whl", hash = "sha256:ede07028316de76d24eb811836e2b818b2017510da786777adcb0d5970e7af79"},
|
||||
{file = "discord_webhook-1.3.1.tar.gz", hash = "sha256:ee3e0f3ea4f3dc8dc42be91f75b894a01624c6c13fea28e23ebcf9a6c9a304f7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = ">=2.28.1,<3.0.0"
|
||||
|
||||
[package.extras]
|
||||
async = ["httpx (>=0.23.0,<0.24.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.8"
|
||||
description = "Distribution utilities"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
|
||||
{file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.6.1"
|
||||
description = "DNS toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
|
||||
{file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
|
||||
dnssec = ["cryptography (>=41)"]
|
||||
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
|
||||
doq = ["aioquic (>=0.9.25)"]
|
||||
idna = ["idna (>=3.6)"]
|
||||
trio = ["trio (>=0.23)"]
|
||||
wmi = ["wmi (>=1.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.14.0"
|
||||
description = "A platform independent file lock."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"},
|
||||
{file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
|
||||
typing = ["typing-extensions (>=4.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.5.36"
|
||||
description = "File identification library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"},
|
||||
{file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.7"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "loguru"
|
||||
version = "0.7.2"
|
||||
description = "Python logging made (stupidly) simple"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
|
||||
{file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
|
||||
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.0.0"
|
||||
description = "Type system extensions for programs checked with the mypy type checker."
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.9.1"
|
||||
description = "Node.js virtual environment builder"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
|
||||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "24.0"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
|
||||
{file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.2.2"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
|
||||
{file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
|
||||
type = ["mypy (>=1.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "3.7.1"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"},
|
||||
{file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
virtualenv = ">=20.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "pymongo"
|
||||
version = "4.7.3"
|
||||
description = "Python driver for MongoDB <http://www.mongodb.org>"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pymongo-4.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e9580b4537b3cc5d412070caabd1dabdf73fdce249793598792bac5782ecf2eb"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:517243b2b189c98004570dd8fc0e89b1a48363d5578b3b99212fa2098b2ea4b8"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23b1e9dabd61da1c7deb54d888f952f030e9e35046cebe89309b28223345b3d9"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03e0f9901ad66c6fb7da0d303461377524d61dab93a4e4e5af44164c5bb4db76"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a870824aa54453aee030bac08c77ebcf2fe8999400f0c2a065bebcbcd46b7f8"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd7b3d3f4261bddbb74a332d87581bc523353e62bb9da4027cc7340f6fcbebc"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d719a643ea6da46d215a3ba51dac805a773b611c641319558d8576cbe31cef8"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-win32.whl", hash = "sha256:d8b1e06f361f3c66ee694cb44326e1a2e4f93bc9c3a4849ae8547889fca71154"},
|
||||
{file = "pymongo-4.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:c450ab2f9397e2d5caa7fddeb4feb30bf719c47c13ae02c0bbb3b71bf4099c1c"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79cc6459209e885ba097779eaa0fe7f2fa049db39ab43b1731cf8d065a4650e8"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e2287f1e2cc35e73cd74a4867e398a97962c5578a3991c730ef78d276ca8e46"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413506bd48d8c31ee100645192171e4773550d7cb940b594d5175ac29e329ea1"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cc1febf17646d52b7561caa762f60bdfe2cbdf3f3e70772f62eb624269f9c05"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8dfcf18a49955d50a16c92b39230bd0668ffc9c164ccdfe9d28805182b48fa72"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89872041196c008caddf905eb59d3dc2d292ae6b0282f1138418e76f3abd3ad6"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3ed97b89de62ea927b672ad524de0d23f3a6b4a01c8d10e3d224abec973fbc3"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-win32.whl", hash = "sha256:d2f52b38151e946011d888a8441d3d75715c663fc5b41a7ade595e924e12a90a"},
|
||||
{file = "pymongo-4.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:4a4cc91c28e81c0ce03d3c278e399311b0af44665668a91828aec16527082676"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cb30c8a78f5ebaca98640943447b6a0afcb146f40b415757c9047bf4a40d07b4"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9cf2069f5d37c398186453589486ea98bb0312214c439f7d320593b61880dc05"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3564f423958fced8a8c90940fd2f543c27adbcd6c7c6ed6715d847053f6200a0"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a8af8a38fa6951fff73e6ff955a6188f829b29fed7c5a1b739a306b4aa56fe8"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a0e81c8dba6d825272867d487f18764cfed3c736d71d7d4ff5b79642acbed42"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88fc1d146feabac4385ea8ddb1323e584922922641303c8bf392fe1c36803463"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4225100b2c5d1f7393d7c5d256ceb8b20766830eecf869f8ae232776347625a6"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-win32.whl", hash = "sha256:5f3569ed119bf99c0f39ac9962fb5591eff02ca210fe80bb5178d7a1171c1b1e"},
|
||||
{file = "pymongo-4.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:eb383c54c0c8ba27e7712b954fcf2a0905fee82a929d277e2e94ad3a5ba3c7db"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a46cffe91912570151617d866a25d07b9539433a32231ca7e7cf809b6ba1745f"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c3cba427dac50944c050c96d958c5e643c33a457acee03bae27c8990c5b9c16"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a5fd893edbeb7fa982f8d44b6dd0186b6cd86c89e23f6ef95049ff72bffe46"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c168a2fadc8b19071d0a9a4f85fe38f3029fe22163db04b4d5c046041c0b14bd"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c59c2c9e70f63a7f18a31e367898248c39c068c639b0579623776f637e8f482"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08165fd82c89d372e82904c3268bd8fe5de44f92a00e97bb1db1785154397d9"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:397fed21afec4fdaecf72f9c4344b692e489756030a9c6d864393e00c7e80491"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f903075f8625e2d228f1b9b9a0cf1385f1c41e93c03fd7536c91780a0fb2e98f"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-win32.whl", hash = "sha256:8ed1132f58c38add6b6138b771d0477a3833023c015c455d9a6e26f367f9eb5c"},
|
||||
{file = "pymongo-4.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8d00a5d8fc1043a4f641cbb321da766699393f1b6f87c70fae8089d61c9c9c54"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9377b868c38700c7557aac1bc4baae29f47f1d279cc76b60436e547fd643318c"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:da4a6a7b4f45329bb135aa5096823637bd5f760b44d6224f98190ee367b6b5dd"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e2f9277f8a63ac89335ec4f1699ae0d96ebd06d239480d69ed25473a71b2c"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db3d608d541a444c84f0bfc7bad80b0b897e0f4afa580a53f9a944065d9b633"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e90af2ad3a8a7c295f4d09a2fbcb9a350c76d6865f787c07fe843b79c6e821d1"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e28feb18dc559d50ededba27f9054c79f80c4edd70a826cecfe68f3266807b3"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f21ecddcba2d9132d5aebd8e959de8d318c29892d0718420447baf2b9bccbb19"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:26140fbb3f6a9a74bd73ed46d0b1f43d5702e87a6e453a31b24fad9c19df9358"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:94baa5fc7f7d22c3ce2ac7bd92f7e03ba7a6875f2480e3b97a400163d6eaafc9"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-win32.whl", hash = "sha256:92dd247727dd83d1903e495acc743ebd757f030177df289e3ba4ef8a8c561fad"},
|
||||
{file = "pymongo-4.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:1c90c848a5e45475731c35097f43026b88ef14a771dfd08f20b67adc160a3f79"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f598be401b416319a535c386ac84f51df38663f7a9d1071922bda4d491564422"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35ba90477fae61c65def6e7d09e8040edfdd3b7fd47c3c258b4edded60c4d625"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aa8735955c70892634d7e61b0ede9b1eefffd3cd09ccabee0ffcf1bdfe62254"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:82a97d8f7f138586d9d0a0cff804a045cdbbfcfc1cd6bba542b151e284fbbec5"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de3b9db558930efab5eaef4db46dcad8bf61ac3ddfd5751b3e5ac6084a25e366"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0e149217ef62812d3c2401cf0e2852b0c57fd155297ecc4dcd67172c4eca402"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3a8a1ef4a824f5feb793b3231526d0045eadb5eb01080e38435dfc40a26c3e5"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d14e5e89a4be1f10efc3d9dcb13eb7a3b2334599cb6bb5d06c6a9281b79c8e22"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6bfa29f032fd4fd7b129520f8cdb51ab71d88c2ba0567cccd05d325f963acb5"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-win32.whl", hash = "sha256:1421d0bd2ce629405f5157bd1aaa9b83f12d53a207cf68a43334f4e4ee312b66"},
|
||||
{file = "pymongo-4.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:f7ee974f8b9370a998919c55b1050889f43815ab588890212023fecbc0402a6d"},
|
||||
{file = "pymongo-4.7.3.tar.gz", hash = "sha256:6354a66b228f2cd399be7429685fb68e07f19110a3679782ecb4fdb68da03831"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dnspython = ">=1.16.0,<3.0.0"
|
||||
|
||||
[package.extras]
|
||||
aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"]
|
||||
encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.6.0,<2.0.0)"]
|
||||
gssapi = ["pykerberos", "winkerberos (>=0.5.0)"]
|
||||
ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"]
|
||||
snappy = ["python-snappy"]
|
||||
test = ["pytest (>=7)"]
|
||||
zstd = ["zstandard"]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.1"
|
||||
description = "YAML parser and emitter for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
|
||||
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
|
||||
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
|
||||
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<3"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.4.8"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"},
|
||||
{file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"},
|
||||
{file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"},
|
||||
{file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"},
|
||||
{file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"},
|
||||
{file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"},
|
||||
{file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"},
|
||||
{file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tabulate"
|
||||
version = "0.9.0"
|
||||
description = "Pretty-print tabular data"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
|
||||
{file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
widechars = ["wcwidth"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.1"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
|
||||
{file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.26.2"
|
||||
description = "Virtual Python Environment builder"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"},
|
||||
{file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
distlib = ">=0.3.7,<1"
|
||||
filelock = ">=3.12.2,<4"
|
||||
platformdirs = ">=3.9.1,<5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
|
||||
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "win32-setctime"
|
||||
version = "1.1.0"
|
||||
description = "A small Python utility to set file creation time on Windows"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
|
||||
{file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.12"
|
||||
content-hash = "8feb4a1b6f346671e88cc8090be2eaa8f8492acdaf9acbf4388a0b6be9f920e6"
|
||||
101
pyproject.toml
101
pyproject.toml
@@ -1,11 +1,20 @@
|
||||
[tool.poetry]
|
||||
[project]
|
||||
name = "leggen"
|
||||
version = "0.6.0"
|
||||
version = "0.6.11"
|
||||
description = "An Open Banking CLI"
|
||||
authors = ["Elisiário Couto <elisiario@couto.io>"]
|
||||
authors = [{ name = "Elisiário Couto", email = "elisiario@couto.io" }]
|
||||
requires-python = "~=3.12.0"
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/elisiariocouto/leggen"
|
||||
keywords = [
|
||||
"openbanking",
|
||||
"cli",
|
||||
"psd2",
|
||||
"gocardless",
|
||||
"bank",
|
||||
"transactions",
|
||||
"finance",
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
@@ -14,40 +23,70 @@ classifiers = [
|
||||
"Topic :: Utilities",
|
||||
"Topic :: Office/Business :: Financial",
|
||||
]
|
||||
keywords = [
|
||||
"openbanking",
|
||||
"cli",
|
||||
"psd2",
|
||||
"gocardless",
|
||||
"mongodb",
|
||||
"bank",
|
||||
"transactions",
|
||||
"finance",
|
||||
dependencies = [
|
||||
"click>=8.1.7,<9",
|
||||
"requests>=2.31.0,<3",
|
||||
"loguru>=0.7.2,<0.8",
|
||||
"tabulate>=0.9.0,<0.10",
|
||||
"discord-webhook>=1.3.1,<2",
|
||||
"fastapi>=0.104.0,<1",
|
||||
"uvicorn[standard]>=0.24.0,<1",
|
||||
"apscheduler>=3.10.0,<4",
|
||||
"tomli-w>=1.0.0,<2",
|
||||
"httpx>=0.28.1",
|
||||
]
|
||||
|
||||
packages = [{ "include" = "leggen" }]
|
||||
[project.urls]
|
||||
Repository = "https://github.com/elisiariocouto/leggen"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12"
|
||||
click = "^8.1.7"
|
||||
requests = "^2.31.0"
|
||||
loguru = "^0.7.2"
|
||||
tabulate = "^0.9.0"
|
||||
pymongo = "^4.6.1"
|
||||
discord-webhook = "^1.3.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.4.8"
|
||||
pre-commit = "^3.6.0"
|
||||
black = "^24.4.2"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
[project.scripts]
|
||||
leggen = "leggen.main:cli"
|
||||
leggend = "leggend.main:main"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"ruff>=0.6.1",
|
||||
"pre-commit>=3.6.0",
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.23.0",
|
||||
"pytest-mock>=3.12.0",
|
||||
"respx>=0.21.0",
|
||||
"requests-mock>=1.12.0",
|
||||
"mypy>=1.17.1",
|
||||
"types-tabulate>=0.9.0.20241207",
|
||||
"types-requests>=2.32.4.20250809",
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = ["leggen", "leggend"]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
include = ["leggen", "leggend"]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.ruff]
|
||||
lint.ignore = ["E501", "B008", "B006"]
|
||||
lint.extend-select = ["B", "C4", "PIE", "T20", "SIM", "TCH"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = "test_*.py"
|
||||
python_classes = "Test*"
|
||||
python_functions = "test_*"
|
||||
addopts = ["-v", "--tb=short", "--strict-markers", "--disable-warnings"]
|
||||
asyncio_mode = "auto"
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
markers = [
|
||||
"unit: Unit tests",
|
||||
"integration: Integration tests",
|
||||
"slow: Slow running tests",
|
||||
"api: API endpoint tests",
|
||||
"cli: CLI command tests",
|
||||
]
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = ["apscheduler.*"]
|
||||
ignore_missing_imports = true
|
||||
|
||||
@@ -11,18 +11,18 @@ function check_command {
|
||||
|
||||
check_command git
|
||||
check_command git-cliff
|
||||
check_command poetry
|
||||
check_command uv
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo " > No semver verb specified, run release with <major|minor|patch> parameter."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CURRENT_VERSION=$(poetry version -s)
|
||||
CURRENT_VERSION=$(uvx poetry version -s)
|
||||
echo " > Current version is $CURRENT_VERSION"
|
||||
|
||||
poetry version "$1"
|
||||
NEXT_VERSION=$(poetry version -s)
|
||||
uvx poetry version "$1"
|
||||
NEXT_VERSION=$(uvx poetry version -s)
|
||||
|
||||
echo " > leggen bumped to $NEXT_VERSION"
|
||||
echo "Updating CHANGELOG.md"
|
||||
|
||||
125
tests/conftest.py
Normal file
125
tests/conftest.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""Pytest configuration and shared fixtures."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
import json
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from leggend.main import create_app
|
||||
from leggend.config import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_config_dir():
|
||||
"""Create a temporary config directory for testing."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
config_dir = Path(tmpdir) / ".config" / "leggen"
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
yield config_dir
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config(temp_config_dir):
|
||||
"""Mock configuration for testing."""
|
||||
config_data = {
|
||||
"gocardless": {
|
||||
"key": "test-key",
|
||||
"secret": "test-secret",
|
||||
"url": "https://bankaccountdata.gocardless.com/api/v2",
|
||||
},
|
||||
"database": {"sqlite": True},
|
||||
"scheduler": {"sync": {"enabled": True, "hour": 3, "minute": 0}},
|
||||
}
|
||||
|
||||
config_file = temp_config_dir / "config.toml"
|
||||
with open(config_file, "wb") as f:
|
||||
import tomli_w
|
||||
|
||||
tomli_w.dump(config_data, f)
|
||||
|
||||
# Mock the config path
|
||||
with patch.object(Config, "load_config") as mock_load:
|
||||
mock_load.return_value = config_data
|
||||
config = Config()
|
||||
config._config = config_data
|
||||
config._config_path = str(config_file)
|
||||
yield config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_auth_token(temp_config_dir):
|
||||
"""Mock GoCardless authentication token."""
|
||||
auth_data = {"access": "mock-access-token", "refresh": "mock-refresh-token"}
|
||||
|
||||
auth_file = temp_config_dir / "auth.json"
|
||||
with open(auth_file, "w") as f:
|
||||
json.dump(auth_data, f)
|
||||
|
||||
return auth_data
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fastapi_app():
|
||||
"""Create FastAPI test application."""
|
||||
return create_app()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_client(fastapi_app):
|
||||
"""Create FastAPI test client."""
|
||||
return TestClient(fastapi_app)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_bank_data():
|
||||
"""Sample bank/institution data for testing."""
|
||||
return [
|
||||
{
|
||||
"id": "REVOLUT_REVOLT21",
|
||||
"name": "Revolut",
|
||||
"bic": "REVOLT21",
|
||||
"transaction_total_days": 90,
|
||||
"countries": ["GB", "LT"],
|
||||
},
|
||||
{
|
||||
"id": "BANCOBPI_BBPIPTPL",
|
||||
"name": "Banco BPI",
|
||||
"bic": "BBPIPTPL",
|
||||
"transaction_total_days": 90,
|
||||
"countries": ["PT"],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_account_data():
|
||||
"""Sample account data for testing."""
|
||||
return {
|
||||
"id": "test-account-123",
|
||||
"institution_id": "REVOLUT_REVOLT21",
|
||||
"status": "READY",
|
||||
"iban": "LT313250081177977789",
|
||||
"created": "2024-02-13T23:56:00Z",
|
||||
"last_accessed": "2025-09-01T09:30:00Z",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_transaction_data():
|
||||
"""Sample transaction data for testing."""
|
||||
return {
|
||||
"transactions": {
|
||||
"booked": [
|
||||
{
|
||||
"internalTransactionId": "txn-123",
|
||||
"bookingDate": "2025-09-01",
|
||||
"valueDate": "2025-09-01",
|
||||
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||
"remittanceInformationUnstructured": "Coffee Shop Payment",
|
||||
}
|
||||
],
|
||||
"pending": [],
|
||||
}
|
||||
}
|
||||
269
tests/unit/test_api_accounts.py
Normal file
269
tests/unit/test_api_accounts.py
Normal file
@@ -0,0 +1,269 @@
|
||||
"""Tests for accounts API endpoints."""
|
||||
|
||||
import pytest
|
||||
import respx
|
||||
import httpx
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@pytest.mark.api
|
||||
class TestAccountsAPI:
|
||||
"""Test account-related API endpoints."""
|
||||
|
||||
@respx.mock
|
||||
def test_get_all_accounts_success(
|
||||
self, api_client, mock_config, mock_auth_token, sample_account_data
|
||||
):
|
||||
"""Test successful retrieval of all accounts."""
|
||||
requisitions_data = {
|
||||
"results": [{"id": "req-123", "accounts": ["test-account-123"]}]
|
||||
}
|
||||
|
||||
balances_data = {
|
||||
"balances": [
|
||||
{
|
||||
"balanceAmount": {"amount": "100.50", "currency": "EUR"},
|
||||
"balanceType": "interimAvailable",
|
||||
"lastChangeDateTime": "2025-09-01T09:30:00Z",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Mock GoCardless token creation
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
# Mock GoCardless API calls
|
||||
respx.get("https://bankaccountdata.gocardless.com/api/v2/requisitions/").mock(
|
||||
return_value=httpx.Response(200, json=requisitions_data)
|
||||
)
|
||||
respx.get(
|
||||
"https://bankaccountdata.gocardless.com/api/v2/accounts/test-account-123/"
|
||||
).mock(return_value=httpx.Response(200, json=sample_account_data))
|
||||
respx.get(
|
||||
"https://bankaccountdata.gocardless.com/api/v2/accounts/test-account-123/balances/"
|
||||
).mock(return_value=httpx.Response(200, json=balances_data))
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/accounts")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 1
|
||||
account = data["data"][0]
|
||||
assert account["id"] == "test-account-123"
|
||||
assert account["institution_id"] == "REVOLUT_REVOLT21"
|
||||
assert len(account["balances"]) == 1
|
||||
assert account["balances"][0]["amount"] == 100.50
|
||||
|
||||
@respx.mock
|
||||
def test_get_account_details_success(
|
||||
self, api_client, mock_config, mock_auth_token, sample_account_data
|
||||
):
|
||||
"""Test successful retrieval of specific account details."""
|
||||
balances_data = {
|
||||
"balances": [
|
||||
{
|
||||
"balanceAmount": {"amount": "250.75", "currency": "EUR"},
|
||||
"balanceType": "interimAvailable",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Mock GoCardless token creation
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
# Mock GoCardless API calls
|
||||
respx.get(
|
||||
"https://bankaccountdata.gocardless.com/api/v2/accounts/test-account-123/"
|
||||
).mock(return_value=httpx.Response(200, json=sample_account_data))
|
||||
respx.get(
|
||||
"https://bankaccountdata.gocardless.com/api/v2/accounts/test-account-123/balances/"
|
||||
).mock(return_value=httpx.Response(200, json=balances_data))
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/accounts/test-account-123")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
account = data["data"]
|
||||
assert account["id"] == "test-account-123"
|
||||
assert account["iban"] == "LT313250081177977789"
|
||||
assert len(account["balances"]) == 1
|
||||
|
||||
def test_get_account_balances_success(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test successful retrieval of account balances from database."""
|
||||
mock_balances = [
|
||||
{
|
||||
"id": 1,
|
||||
"account_id": "test-account-123",
|
||||
"bank": "REVOLUT_REVOLT21",
|
||||
"status": "active",
|
||||
"iban": "LT313250081177977789",
|
||||
"amount": 1000.00,
|
||||
"currency": "EUR",
|
||||
"type": "interimAvailable",
|
||||
"timestamp": "2025-09-01T10:00:00Z",
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"account_id": "test-account-123",
|
||||
"bank": "REVOLUT_REVOLT21",
|
||||
"status": "active",
|
||||
"iban": "LT313250081177977789",
|
||||
"amount": 950.00,
|
||||
"currency": "EUR",
|
||||
"type": "expected",
|
||||
"timestamp": "2025-09-01T10:00:00Z",
|
||||
},
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||
return_value=mock_balances,
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/accounts/test-account-123/balances")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 2
|
||||
assert data["data"][0]["amount"] == 1000.00
|
||||
assert data["data"][0]["currency"] == "EUR"
|
||||
assert data["data"][0]["balance_type"] == "interimAvailable"
|
||||
|
||||
def test_get_account_transactions_success(
|
||||
self,
|
||||
api_client,
|
||||
mock_config,
|
||||
mock_auth_token,
|
||||
sample_account_data,
|
||||
sample_transaction_data,
|
||||
):
|
||||
"""Test successful retrieval of account transactions from database."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-123",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": "2025-09-01T09:30:00Z",
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "data"},
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
response = api_client.get(
|
||||
"/api/v1/accounts/test-account-123/transactions?summary_only=true"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 1
|
||||
|
||||
transaction = data["data"][0]
|
||||
assert transaction["internal_transaction_id"] == "txn-123"
|
||||
assert transaction["amount"] == -10.50
|
||||
assert transaction["currency"] == "EUR"
|
||||
assert transaction["description"] == "Coffee Shop Payment"
|
||||
|
||||
def test_get_account_transactions_full_details(
|
||||
self,
|
||||
api_client,
|
||||
mock_config,
|
||||
mock_auth_token,
|
||||
sample_account_data,
|
||||
sample_transaction_data,
|
||||
):
|
||||
"""Test retrieval of full transaction details from database."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-123",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": "2025-09-01T09:30:00Z",
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "raw_data"},
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
response = api_client.get(
|
||||
"/api/v1/accounts/test-account-123/transactions?summary_only=false"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 1
|
||||
|
||||
transaction = data["data"][0]
|
||||
assert transaction["internal_transaction_id"] == "txn-123"
|
||||
assert transaction["institution_id"] == "REVOLUT_REVOLT21"
|
||||
assert transaction["iban"] == "LT313250081177977789"
|
||||
assert "raw_transaction" in transaction
|
||||
|
||||
def test_get_account_not_found(self, api_client, mock_config, mock_auth_token):
|
||||
"""Test handling of non-existent account."""
|
||||
# Mock 404 response from GoCardless
|
||||
with respx.mock:
|
||||
# Mock GoCardless token creation
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
respx.get(
|
||||
"https://bankaccountdata.gocardless.com/api/v2/accounts/nonexistent/"
|
||||
).mock(
|
||||
return_value=httpx.Response(404, json={"detail": "Account not found"})
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/accounts/nonexistent")
|
||||
|
||||
assert response.status_code == 404
|
||||
163
tests/unit/test_api_banks.py
Normal file
163
tests/unit/test_api_banks.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""Tests for banks API endpoints."""
|
||||
|
||||
import pytest
|
||||
import respx
|
||||
import httpx
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@pytest.mark.api
|
||||
class TestBanksAPI:
|
||||
"""Test bank-related API endpoints."""
|
||||
|
||||
@respx.mock
|
||||
def test_get_institutions_success(
|
||||
self, api_client, mock_config, mock_auth_token, sample_bank_data
|
||||
):
|
||||
"""Test successful retrieval of bank institutions."""
|
||||
# Mock GoCardless token creation/refresh
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
# Mock GoCardless institutions API
|
||||
respx.get("https://bankaccountdata.gocardless.com/api/v2/institutions/").mock(
|
||||
return_value=httpx.Response(200, json=sample_bank_data)
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/institutions?country=PT")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 2
|
||||
assert data["data"][0]["id"] == "REVOLUT_REVOLT21"
|
||||
assert data["data"][1]["id"] == "BANCOBPI_BBPIPTPL"
|
||||
|
||||
@respx.mock
|
||||
def test_get_institutions_invalid_country(self, api_client, mock_config):
|
||||
"""Test institutions endpoint with invalid country code."""
|
||||
# Mock GoCardless token creation
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
# Mock empty institutions response for invalid country
|
||||
respx.get("https://bankaccountdata.gocardless.com/api/v2/institutions/").mock(
|
||||
return_value=httpx.Response(200, json=[])
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/institutions?country=XX")
|
||||
|
||||
# Should still work but return empty or filtered results
|
||||
assert response.status_code in [200, 404]
|
||||
|
||||
@respx.mock
|
||||
def test_connect_to_bank_success(self, api_client, mock_config, mock_auth_token):
|
||||
"""Test successful bank connection creation."""
|
||||
requisition_data = {
|
||||
"id": "req-123",
|
||||
"institution_id": "REVOLUT_REVOLT21",
|
||||
"status": "CR",
|
||||
"created": "2025-09-02T00:00:00Z",
|
||||
"link": "https://example.com/auth",
|
||||
}
|
||||
|
||||
# Mock GoCardless token creation
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
# Mock GoCardless requisitions API
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/requisitions/").mock(
|
||||
return_value=httpx.Response(200, json=requisition_data)
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"institution_id": "REVOLUT_REVOLT21",
|
||||
"redirect_url": "http://localhost:8000/",
|
||||
}
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.post("/api/v1/banks/connect", json=request_data)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert data["data"]["id"] == "req-123"
|
||||
assert data["data"]["institution_id"] == "REVOLUT_REVOLT21"
|
||||
|
||||
@respx.mock
|
||||
def test_get_bank_status_success(self, api_client, mock_config, mock_auth_token):
|
||||
"""Test successful retrieval of bank connection status."""
|
||||
requisitions_data = {
|
||||
"results": [
|
||||
{
|
||||
"id": "req-123",
|
||||
"institution_id": "REVOLUT_REVOLT21",
|
||||
"status": "LN",
|
||||
"created": "2025-09-02T00:00:00Z",
|
||||
"accounts": ["acc-123"],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Mock GoCardless token creation
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(
|
||||
200, json={"access": "test-token", "refresh": "test-refresh"}
|
||||
)
|
||||
)
|
||||
|
||||
# Mock GoCardless requisitions API
|
||||
respx.get("https://bankaccountdata.gocardless.com/api/v2/requisitions/").mock(
|
||||
return_value=httpx.Response(200, json=requisitions_data)
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/status")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 1
|
||||
assert data["data"][0]["bank_id"] == "REVOLUT_REVOLT21"
|
||||
assert data["data"][0]["status_display"] == "LINKED"
|
||||
|
||||
def test_get_supported_countries(self, api_client):
|
||||
"""Test supported countries endpoint."""
|
||||
response = api_client.get("/api/v1/banks/countries")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) > 0
|
||||
|
||||
# Check some expected countries
|
||||
country_codes = [country["code"] for country in data["data"]]
|
||||
assert "PT" in country_codes
|
||||
assert "GB" in country_codes
|
||||
assert "DE" in country_codes
|
||||
|
||||
@respx.mock
|
||||
def test_authentication_failure(self, api_client, mock_config):
|
||||
"""Test handling of authentication failures."""
|
||||
# Mock token creation failure
|
||||
respx.post("https://bankaccountdata.gocardless.com/api/v2/token/new/").mock(
|
||||
return_value=httpx.Response(401, json={"detail": "Invalid credentials"})
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/institutions")
|
||||
|
||||
assert response.status_code == 500
|
||||
data = response.json()
|
||||
assert "Failed to get institutions" in data["detail"]
|
||||
155
tests/unit/test_api_client.py
Normal file
155
tests/unit/test_api_client.py
Normal file
@@ -0,0 +1,155 @@
|
||||
"""Tests for CLI API client."""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import requests_mock
|
||||
from unittest.mock import patch
|
||||
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
class TestLeggendAPIClient:
|
||||
"""Test the CLI API client."""
|
||||
|
||||
def test_health_check_success(self):
|
||||
"""Test successful health check."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/health", json={"status": "healthy"})
|
||||
|
||||
result = client.health_check()
|
||||
assert result is True
|
||||
|
||||
def test_health_check_failure(self):
|
||||
"""Test health check failure."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/health", status_code=500)
|
||||
|
||||
result = client.health_check()
|
||||
assert result is False
|
||||
|
||||
def test_get_institutions_success(self, sample_bank_data):
|
||||
"""Test getting institutions via API client."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
"data": sample_bank_data,
|
||||
"message": "Found 2 institutions for PT",
|
||||
}
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/api/v1/banks/institutions", json=api_response)
|
||||
|
||||
result = client.get_institutions("PT")
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "REVOLUT_REVOLT21"
|
||||
|
||||
def test_get_accounts_success(self, sample_account_data):
|
||||
"""Test getting accounts via API client."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
"data": [sample_account_data],
|
||||
"message": "Retrieved 1 accounts",
|
||||
}
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/api/v1/accounts", json=api_response)
|
||||
|
||||
result = client.get_accounts()
|
||||
assert len(result) == 1
|
||||
assert result[0]["id"] == "test-account-123"
|
||||
|
||||
def test_trigger_sync_success(self):
|
||||
"""Test triggering sync via API client."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
"data": {"sync_started": True, "force": False},
|
||||
"message": "Started sync for all accounts",
|
||||
}
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.post("http://localhost:8000/api/v1/sync", json=api_response)
|
||||
|
||||
result = client.trigger_sync()
|
||||
assert result["sync_started"] is True
|
||||
|
||||
def test_connection_error_handling(self):
|
||||
"""Test handling of connection errors."""
|
||||
client = LeggendAPIClient("http://localhost:9999") # Non-existent service
|
||||
|
||||
with pytest.raises((requests.ConnectionError, requests.RequestException)):
|
||||
client.get_accounts()
|
||||
|
||||
def test_http_error_handling(self):
|
||||
"""Test handling of HTTP errors."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get(
|
||||
"http://localhost:8000/api/v1/accounts",
|
||||
status_code=500,
|
||||
json={"detail": "Internal server error"},
|
||||
)
|
||||
|
||||
with pytest.raises((requests.HTTPError, requests.RequestException)):
|
||||
client.get_accounts()
|
||||
|
||||
def test_custom_api_url(self):
|
||||
"""Test using custom API URL."""
|
||||
custom_url = "http://custom-host:9000"
|
||||
client = LeggendAPIClient(custom_url)
|
||||
|
||||
assert client.base_url == custom_url
|
||||
|
||||
def test_environment_variable_url(self):
|
||||
"""Test using environment variable for API URL."""
|
||||
with patch.dict("os.environ", {"LEGGEND_API_URL": "http://env-host:7000"}):
|
||||
client = LeggendAPIClient()
|
||||
assert client.base_url == "http://env-host:7000"
|
||||
|
||||
def test_sync_with_options(self):
|
||||
"""Test sync with various options."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
"data": {"sync_started": True, "force": True},
|
||||
"message": "Started sync for 2 specific accounts",
|
||||
}
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.post("http://localhost:8000/api/v1/sync", json=api_response)
|
||||
|
||||
result = client.trigger_sync(account_ids=["acc1", "acc2"], force=True)
|
||||
assert result["sync_started"] is True
|
||||
assert result["force"] is True
|
||||
|
||||
def test_get_scheduler_config(self):
|
||||
"""Test getting scheduler configuration."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
"data": {
|
||||
"enabled": True,
|
||||
"hour": 3,
|
||||
"minute": 0,
|
||||
"next_scheduled_sync": "2025-09-03T03:00:00Z",
|
||||
},
|
||||
}
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/api/v1/sync/scheduler", json=api_response)
|
||||
|
||||
result = client.get_scheduler_config()
|
||||
assert result["enabled"] is True
|
||||
assert result["hour"] == 3
|
||||
369
tests/unit/test_api_transactions.py
Normal file
369
tests/unit/test_api_transactions.py
Normal file
@@ -0,0 +1,369 @@
|
||||
"""Tests for transactions API endpoints."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@pytest.mark.api
|
||||
class TestTransactionsAPI:
|
||||
"""Test transaction-related API endpoints."""
|
||||
|
||||
def test_get_all_transactions_success(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test successful retrieval of all transactions from database."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "data"},
|
||||
},
|
||||
{
|
||||
"internalTransactionId": "txn-002",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||
"description": "Grocery Store",
|
||||
"transactionValue": -45.30,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"other": "data"},
|
||||
},
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=2,
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions?summary_only=true")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 2
|
||||
|
||||
# Check first transaction summary
|
||||
transaction = data["data"][0]
|
||||
assert transaction["internal_transaction_id"] == "txn-001"
|
||||
assert transaction["amount"] == -10.50
|
||||
assert transaction["currency"] == "EUR"
|
||||
assert transaction["description"] == "Coffee Shop Payment"
|
||||
assert transaction["status"] == "booked"
|
||||
assert transaction["account_id"] == "test-account-123"
|
||||
|
||||
def test_get_all_transactions_full_details(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test retrieval of full transaction details from database."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "raw_data"},
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions?summary_only=false")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 1
|
||||
|
||||
transaction = data["data"][0]
|
||||
assert transaction["internal_transaction_id"] == "txn-001"
|
||||
assert transaction["institution_id"] == "REVOLUT_REVOLT21"
|
||||
assert transaction["iban"] == "LT313250081177977789"
|
||||
assert "raw_transaction" in transaction
|
||||
|
||||
def test_get_transactions_with_filters(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test getting transactions with various filters."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "data"},
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
) as mock_get_transactions,
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
response = api_client.get(
|
||||
"/api/v1/transactions?"
|
||||
"account_id=test-account-123&"
|
||||
"date_from=2025-09-01&"
|
||||
"date_to=2025-09-02&"
|
||||
"min_amount=-50.0&"
|
||||
"max_amount=0.0&"
|
||||
"search=Coffee&"
|
||||
"limit=10&"
|
||||
"offset=5"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
|
||||
# Verify the database service was called with correct filters
|
||||
mock_get_transactions.assert_called_once_with(
|
||||
account_id="test-account-123",
|
||||
limit=10,
|
||||
offset=5,
|
||||
date_from="2025-09-01",
|
||||
date_to="2025-09-02",
|
||||
min_amount=-50.0,
|
||||
max_amount=0.0,
|
||||
search="Coffee",
|
||||
)
|
||||
|
||||
def test_get_transactions_empty_result(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test getting transactions when database returns empty result."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=[],
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=0,
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
assert len(data["data"]) == 0
|
||||
assert "0 transactions" in data["message"]
|
||||
|
||||
def test_get_transactions_database_error(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test handling database error when getting transactions."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
side_effect=Exception("Database connection failed"),
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions")
|
||||
|
||||
assert response.status_code == 500
|
||||
assert "Failed to get transactions" in response.json()["detail"]
|
||||
|
||||
def test_get_transaction_stats_success(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test successful retrieval of transaction statistics from database."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"transactionValue": -10.50,
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
},
|
||||
{
|
||||
"internalTransactionId": "txn-002",
|
||||
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||
"transactionValue": 100.00,
|
||||
"transactionStatus": "pending",
|
||||
"accountId": "test-account-123",
|
||||
},
|
||||
{
|
||||
"internalTransactionId": "txn-003",
|
||||
"transactionDate": datetime(2025, 9, 3, 16, 45),
|
||||
"transactionValue": -25.30,
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "other-account-456",
|
||||
},
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions/stats?days=30")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
|
||||
stats = data["data"]
|
||||
assert stats["period_days"] == 30
|
||||
assert stats["total_transactions"] == 3
|
||||
assert stats["booked_transactions"] == 2
|
||||
assert stats["pending_transactions"] == 1
|
||||
assert stats["total_income"] == 100.00
|
||||
assert stats["total_expenses"] == 35.80 # abs(-10.50) + abs(-25.30)
|
||||
assert stats["net_change"] == 64.20 # 100.00 - 35.80
|
||||
assert stats["accounts_included"] == 2 # Two unique account IDs
|
||||
|
||||
# Average transaction: ((-10.50) + 100.00 + (-25.30)) / 3 = 64.20 / 3 = 21.4
|
||||
expected_avg = round(64.20 / 3, 2)
|
||||
assert stats["average_transaction"] == expected_avg
|
||||
|
||||
def test_get_transaction_stats_with_account_filter(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test getting transaction stats filtered by account."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"transactionValue": -10.50,
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
) as mock_get_transactions,
|
||||
):
|
||||
response = api_client.get(
|
||||
"/api/v1/transactions/stats?account_id=test-account-123"
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify the database service was called with account filter
|
||||
mock_get_transactions.assert_called_once()
|
||||
call_kwargs = mock_get_transactions.call_args.kwargs
|
||||
assert call_kwargs["account_id"] == "test-account-123"
|
||||
|
||||
def test_get_transaction_stats_empty_result(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test getting stats when no transactions match criteria."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=[],
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions/stats")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["success"] is True
|
||||
|
||||
stats = data["data"]
|
||||
assert stats["total_transactions"] == 0
|
||||
assert stats["total_income"] == 0.0
|
||||
assert stats["total_expenses"] == 0.0
|
||||
assert stats["net_change"] == 0.0
|
||||
assert stats["average_transaction"] == 0 # Division by zero handled
|
||||
assert stats["accounts_included"] == 0
|
||||
|
||||
def test_get_transaction_stats_database_error(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test handling database error when getting stats."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
side_effect=Exception("Database connection failed"),
|
||||
),
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions/stats")
|
||||
|
||||
assert response.status_code == 500
|
||||
assert "Failed to get transaction stats" in response.json()["detail"]
|
||||
|
||||
def test_get_transaction_stats_custom_period(
|
||||
self, api_client, mock_config, mock_auth_token
|
||||
):
|
||||
"""Test getting transaction stats for custom time period."""
|
||||
mock_transactions = [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"transactionValue": -10.50,
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
}
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
) as mock_get_transactions,
|
||||
):
|
||||
response = api_client.get("/api/v1/transactions/stats?days=7")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["data"]["period_days"] == 7
|
||||
|
||||
# Verify the date range was calculated correctly for 7 days
|
||||
mock_get_transactions.assert_called_once()
|
||||
call_kwargs = mock_get_transactions.call_args.kwargs
|
||||
assert "date_from" in call_kwargs
|
||||
assert "date_to" in call_kwargs
|
||||
201
tests/unit/test_config.py
Normal file
201
tests/unit/test_config.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""Tests for configuration management."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
from leggend.config import Config
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
class TestConfig:
|
||||
"""Test configuration management."""
|
||||
|
||||
def test_singleton_behavior(self):
|
||||
"""Test that Config is a singleton."""
|
||||
config1 = Config()
|
||||
config2 = Config()
|
||||
assert config1 is config2
|
||||
|
||||
def test_load_config_success(self, temp_config_dir):
|
||||
"""Test successful configuration loading."""
|
||||
config_data = {
|
||||
"gocardless": {
|
||||
"key": "test-key",
|
||||
"secret": "test-secret",
|
||||
"url": "https://test.example.com",
|
||||
},
|
||||
"database": {"sqlite": True},
|
||||
}
|
||||
|
||||
config_file = temp_config_dir / "config.toml"
|
||||
with open(config_file, "wb") as f:
|
||||
import tomli_w
|
||||
|
||||
tomli_w.dump(config_data, f)
|
||||
|
||||
config = Config()
|
||||
# Reset singleton state for testing
|
||||
config._config = None
|
||||
config._config_path = None
|
||||
|
||||
result = config.load_config(str(config_file))
|
||||
|
||||
assert result == config_data
|
||||
assert config.gocardless_config["key"] == "test-key"
|
||||
assert config.database_config["sqlite"] is True
|
||||
|
||||
def test_load_config_file_not_found(self):
|
||||
"""Test handling of missing configuration file."""
|
||||
config = Config()
|
||||
config._config = None # Reset for test
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
config.load_config("/nonexistent/config.toml")
|
||||
|
||||
def test_save_config_success(self, temp_config_dir):
|
||||
"""Test successful configuration saving."""
|
||||
config_data = {"gocardless": {"key": "new-key", "secret": "new-secret"}}
|
||||
|
||||
config_file = temp_config_dir / "new_config.toml"
|
||||
config = Config()
|
||||
config._config = None
|
||||
|
||||
config.save_config(config_data, str(config_file))
|
||||
|
||||
# Verify file was created and contains correct data
|
||||
assert config_file.exists()
|
||||
|
||||
import tomllib
|
||||
|
||||
with open(config_file, "rb") as f:
|
||||
saved_data = tomllib.load(f)
|
||||
|
||||
assert saved_data == config_data
|
||||
|
||||
def test_update_config_success(self, temp_config_dir):
|
||||
"""Test updating configuration values."""
|
||||
initial_config = {
|
||||
"gocardless": {"key": "old-key"},
|
||||
"database": {"sqlite": True},
|
||||
}
|
||||
|
||||
config_file = temp_config_dir / "config.toml"
|
||||
with open(config_file, "wb") as f:
|
||||
import tomli_w
|
||||
|
||||
tomli_w.dump(initial_config, f)
|
||||
|
||||
config = Config()
|
||||
config._config = None
|
||||
config.load_config(str(config_file))
|
||||
|
||||
config.update_config("gocardless", "key", "new-key")
|
||||
|
||||
assert config.gocardless_config["key"] == "new-key"
|
||||
|
||||
# Verify it was saved to file
|
||||
import tomllib
|
||||
|
||||
with open(config_file, "rb") as f:
|
||||
saved_data = tomllib.load(f)
|
||||
assert saved_data["gocardless"]["key"] == "new-key"
|
||||
|
||||
def test_update_section_success(self, temp_config_dir):
|
||||
"""Test updating entire configuration section."""
|
||||
initial_config = {"database": {"sqlite": True}}
|
||||
|
||||
config_file = temp_config_dir / "config.toml"
|
||||
with open(config_file, "wb") as f:
|
||||
import tomli_w
|
||||
|
||||
tomli_w.dump(initial_config, f)
|
||||
|
||||
config = Config()
|
||||
config._config = None
|
||||
config.load_config(str(config_file))
|
||||
|
||||
new_db_config = {"sqlite": False, "path": "./custom.db"}
|
||||
config.update_section("database", new_db_config)
|
||||
|
||||
assert config.database_config == new_db_config
|
||||
|
||||
def test_scheduler_config_defaults(self):
|
||||
"""Test scheduler configuration with defaults."""
|
||||
config = Config()
|
||||
config._config = {} # Empty config
|
||||
|
||||
scheduler_config = config.scheduler_config
|
||||
|
||||
assert scheduler_config["sync"]["enabled"] is True
|
||||
assert scheduler_config["sync"]["hour"] == 3
|
||||
assert scheduler_config["sync"]["minute"] == 0
|
||||
assert scheduler_config["sync"]["cron"] is None
|
||||
|
||||
def test_scheduler_config_custom(self):
|
||||
"""Test scheduler configuration with custom values."""
|
||||
custom_config = {
|
||||
"scheduler": {
|
||||
"sync": {
|
||||
"enabled": False,
|
||||
"hour": 6,
|
||||
"minute": 30,
|
||||
"cron": "0 6 * * 1-5",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
config = Config()
|
||||
config._config = custom_config
|
||||
|
||||
scheduler_config = config.scheduler_config
|
||||
|
||||
assert scheduler_config["sync"]["enabled"] is False
|
||||
assert scheduler_config["sync"]["hour"] == 6
|
||||
assert scheduler_config["sync"]["minute"] == 30
|
||||
assert scheduler_config["sync"]["cron"] == "0 6 * * 1-5"
|
||||
|
||||
def test_environment_variable_config_path(self):
|
||||
"""Test using environment variable for config path."""
|
||||
with patch.dict(
|
||||
"os.environ", {"LEGGEN_CONFIG_FILE": "/custom/path/config.toml"}
|
||||
):
|
||||
config = Config()
|
||||
config._config = None
|
||||
|
||||
with (
|
||||
patch("builtins.open", side_effect=FileNotFoundError),
|
||||
pytest.raises(FileNotFoundError),
|
||||
):
|
||||
config.load_config()
|
||||
|
||||
def test_notifications_config(self):
|
||||
"""Test notifications configuration access."""
|
||||
custom_config = {
|
||||
"notifications": {
|
||||
"discord": {"webhook": "https://discord.webhook", "enabled": True},
|
||||
"telegram": {"token": "bot-token", "chat_id": 123},
|
||||
}
|
||||
}
|
||||
|
||||
config = Config()
|
||||
config._config = custom_config
|
||||
|
||||
notifications = config.notifications_config
|
||||
assert notifications["discord"]["webhook"] == "https://discord.webhook"
|
||||
assert notifications["telegram"]["token"] == "bot-token"
|
||||
|
||||
def test_filters_config(self):
|
||||
"""Test filters configuration access."""
|
||||
custom_config = {
|
||||
"filters": {
|
||||
"case-insensitive": {"salary": "SALARY", "bills": "BILL"},
|
||||
"amount_threshold": 100.0,
|
||||
}
|
||||
}
|
||||
|
||||
config = Config()
|
||||
config._config = custom_config
|
||||
|
||||
filters = config.filters_config
|
||||
assert filters["case-insensitive"]["salary"] == "SALARY"
|
||||
assert filters["amount_threshold"] == 100.0
|
||||
433
tests/unit/test_database_service.py
Normal file
433
tests/unit/test_database_service.py
Normal file
@@ -0,0 +1,433 @@
|
||||
"""Tests for database service."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
from datetime import datetime
|
||||
|
||||
from leggend.services.database_service import DatabaseService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def database_service():
|
||||
"""Create a database service instance for testing."""
|
||||
return DatabaseService()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_transactions_db_format():
|
||||
"""Sample transactions in database format."""
|
||||
return [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "data"},
|
||||
},
|
||||
{
|
||||
"internalTransactionId": "txn-002",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||
"description": "Grocery Store",
|
||||
"transactionValue": -45.30,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"other": "data"},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_balances_db_format():
|
||||
"""Sample balances in database format."""
|
||||
return [
|
||||
{
|
||||
"id": 1,
|
||||
"account_id": "test-account-123",
|
||||
"bank": "REVOLUT_REVOLT21",
|
||||
"status": "active",
|
||||
"iban": "LT313250081177977789",
|
||||
"amount": 1000.00,
|
||||
"currency": "EUR",
|
||||
"type": "interimAvailable",
|
||||
"timestamp": datetime(2025, 9, 1, 10, 0),
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"account_id": "test-account-123",
|
||||
"bank": "REVOLUT_REVOLT21",
|
||||
"status": "active",
|
||||
"iban": "LT313250081177977789",
|
||||
"amount": 950.00,
|
||||
"currency": "EUR",
|
||||
"type": "expected",
|
||||
"timestamp": datetime(2025, 9, 1, 10, 0),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestDatabaseService:
|
||||
"""Test database service operations."""
|
||||
|
||||
async def test_get_transactions_from_db_success(
|
||||
self, database_service, sample_transactions_db_format
|
||||
):
|
||||
"""Test successful retrieval of transactions from database."""
|
||||
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||
mock_get_transactions.return_value = sample_transactions_db_format
|
||||
|
||||
result = await database_service.get_transactions_from_db(
|
||||
account_id="test-account-123", limit=10
|
||||
)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["internalTransactionId"] == "txn-001"
|
||||
mock_get_transactions.assert_called_once_with(
|
||||
account_id="test-account-123",
|
||||
limit=10,
|
||||
offset=0,
|
||||
date_from=None,
|
||||
date_to=None,
|
||||
min_amount=None,
|
||||
max_amount=None,
|
||||
search=None,
|
||||
)
|
||||
|
||||
async def test_get_transactions_from_db_with_filters(
|
||||
self, database_service, sample_transactions_db_format
|
||||
):
|
||||
"""Test retrieving transactions with filters."""
|
||||
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||
mock_get_transactions.return_value = sample_transactions_db_format
|
||||
|
||||
result = await database_service.get_transactions_from_db(
|
||||
account_id="test-account-123",
|
||||
limit=5,
|
||||
offset=10,
|
||||
date_from="2025-09-01",
|
||||
date_to="2025-09-02",
|
||||
min_amount=-50.0,
|
||||
max_amount=0.0,
|
||||
search="Coffee",
|
||||
)
|
||||
|
||||
assert len(result) == 2
|
||||
mock_get_transactions.assert_called_once_with(
|
||||
account_id="test-account-123",
|
||||
limit=5,
|
||||
offset=10,
|
||||
date_from="2025-09-01",
|
||||
date_to="2025-09-02",
|
||||
min_amount=-50.0,
|
||||
max_amount=0.0,
|
||||
search="Coffee",
|
||||
)
|
||||
|
||||
async def test_get_transactions_from_db_sqlite_disabled(self, database_service):
|
||||
"""Test getting transactions when SQLite is disabled."""
|
||||
database_service.sqlite_enabled = False
|
||||
|
||||
result = await database_service.get_transactions_from_db()
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_get_transactions_from_db_error(self, database_service):
|
||||
"""Test handling error when getting transactions."""
|
||||
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||
mock_get_transactions.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_transactions_from_db()
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_get_transaction_count_from_db_success(self, database_service):
|
||||
"""Test successful retrieval of transaction count."""
|
||||
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||
mock_get_count.return_value = 42
|
||||
|
||||
result = await database_service.get_transaction_count_from_db(
|
||||
account_id="test-account-123"
|
||||
)
|
||||
|
||||
assert result == 42
|
||||
mock_get_count.assert_called_once_with(account_id="test-account-123")
|
||||
|
||||
async def test_get_transaction_count_from_db_with_filters(self, database_service):
|
||||
"""Test getting transaction count with filters."""
|
||||
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||
mock_get_count.return_value = 15
|
||||
|
||||
result = await database_service.get_transaction_count_from_db(
|
||||
account_id="test-account-123",
|
||||
date_from="2025-09-01",
|
||||
min_amount=-100.0,
|
||||
search="Coffee",
|
||||
)
|
||||
|
||||
assert result == 15
|
||||
mock_get_count.assert_called_once_with(
|
||||
account_id="test-account-123",
|
||||
date_from="2025-09-01",
|
||||
min_amount=-100.0,
|
||||
search="Coffee",
|
||||
)
|
||||
|
||||
async def test_get_transaction_count_from_db_sqlite_disabled(
|
||||
self, database_service
|
||||
):
|
||||
"""Test getting count when SQLite is disabled."""
|
||||
database_service.sqlite_enabled = False
|
||||
|
||||
result = await database_service.get_transaction_count_from_db()
|
||||
|
||||
assert result == 0
|
||||
|
||||
async def test_get_transaction_count_from_db_error(self, database_service):
|
||||
"""Test handling error when getting count."""
|
||||
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||
mock_get_count.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_transaction_count_from_db()
|
||||
|
||||
assert result == 0
|
||||
|
||||
async def test_get_balances_from_db_success(
|
||||
self, database_service, sample_balances_db_format
|
||||
):
|
||||
"""Test successful retrieval of balances from database."""
|
||||
with patch("leggen.database.sqlite.get_balances") as mock_get_balances:
|
||||
mock_get_balances.return_value = sample_balances_db_format
|
||||
|
||||
result = await database_service.get_balances_from_db(
|
||||
account_id="test-account-123"
|
||||
)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["account_id"] == "test-account-123"
|
||||
assert result[0]["amount"] == 1000.00
|
||||
mock_get_balances.assert_called_once_with(account_id="test-account-123")
|
||||
|
||||
async def test_get_balances_from_db_sqlite_disabled(self, database_service):
|
||||
"""Test getting balances when SQLite is disabled."""
|
||||
database_service.sqlite_enabled = False
|
||||
|
||||
result = await database_service.get_balances_from_db()
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_get_balances_from_db_error(self, database_service):
|
||||
"""Test handling error when getting balances."""
|
||||
with patch("leggen.database.sqlite.get_balances") as mock_get_balances:
|
||||
mock_get_balances.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_balances_from_db()
|
||||
|
||||
assert result == []
|
||||
|
||||
async def test_get_account_summary_from_db_success(self, database_service):
|
||||
"""Test successful retrieval of account summary."""
|
||||
mock_summary = {
|
||||
"accountId": "test-account-123",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
}
|
||||
|
||||
with patch("leggen.database.sqlite.get_account_summary") as mock_get_summary:
|
||||
mock_get_summary.return_value = mock_summary
|
||||
|
||||
result = await database_service.get_account_summary_from_db(
|
||||
"test-account-123"
|
||||
)
|
||||
|
||||
assert result == mock_summary
|
||||
mock_get_summary.assert_called_once_with("test-account-123")
|
||||
|
||||
async def test_get_account_summary_from_db_sqlite_disabled(self, database_service):
|
||||
"""Test getting summary when SQLite is disabled."""
|
||||
database_service.sqlite_enabled = False
|
||||
|
||||
result = await database_service.get_account_summary_from_db("test-account-123")
|
||||
|
||||
assert result is None
|
||||
|
||||
async def test_get_account_summary_from_db_error(self, database_service):
|
||||
"""Test handling error when getting summary."""
|
||||
with patch("leggen.database.sqlite.get_account_summary") as mock_get_summary:
|
||||
mock_get_summary.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_account_summary_from_db(
|
||||
"test-account-123"
|
||||
)
|
||||
|
||||
assert result is None
|
||||
|
||||
async def test_persist_balance_sqlite_success(self, database_service):
|
||||
"""Test successful balance persistence."""
|
||||
balance_data = {
|
||||
"institution_id": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"balances": [
|
||||
{
|
||||
"balanceAmount": {"amount": "1000.00", "currency": "EUR"},
|
||||
"balanceType": "interimAvailable",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
with patch("sqlite3.connect") as mock_connect:
|
||||
mock_conn = mock_connect.return_value
|
||||
mock_cursor = mock_conn.cursor.return_value
|
||||
|
||||
await database_service._persist_balance_sqlite(
|
||||
"test-account-123", balance_data
|
||||
)
|
||||
|
||||
# Verify database operations
|
||||
mock_connect.assert_called()
|
||||
mock_cursor.execute.assert_called() # Table creation and insert
|
||||
mock_conn.commit.assert_called_once()
|
||||
mock_conn.close.assert_called_once()
|
||||
|
||||
async def test_persist_balance_sqlite_error(self, database_service):
|
||||
"""Test handling error during balance persistence."""
|
||||
balance_data = {"balances": []}
|
||||
|
||||
with patch("sqlite3.connect") as mock_connect:
|
||||
mock_connect.side_effect = Exception("Database error")
|
||||
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
await database_service._persist_balance_sqlite(
|
||||
"test-account-123", balance_data
|
||||
)
|
||||
|
||||
async def test_persist_transactions_sqlite_success(
|
||||
self, database_service, sample_transactions_db_format
|
||||
):
|
||||
"""Test successful transaction persistence."""
|
||||
with patch("sqlite3.connect") as mock_connect:
|
||||
mock_conn = mock_connect.return_value
|
||||
mock_cursor = mock_conn.cursor.return_value
|
||||
|
||||
result = await database_service._persist_transactions_sqlite(
|
||||
"test-account-123", sample_transactions_db_format
|
||||
)
|
||||
|
||||
# Should return the transactions (assuming no duplicates)
|
||||
assert len(result) >= 0 # Could be empty if all are duplicates
|
||||
|
||||
# Verify database operations
|
||||
mock_connect.assert_called()
|
||||
mock_cursor.execute.assert_called()
|
||||
mock_conn.commit.assert_called_once()
|
||||
mock_conn.close.assert_called_once()
|
||||
|
||||
async def test_persist_transactions_sqlite_error(self, database_service):
|
||||
"""Test handling error during transaction persistence."""
|
||||
with patch("sqlite3.connect") as mock_connect:
|
||||
mock_connect.side_effect = Exception("Database error")
|
||||
|
||||
with pytest.raises(Exception, match="Database error"):
|
||||
await database_service._persist_transactions_sqlite(
|
||||
"test-account-123", []
|
||||
)
|
||||
|
||||
async def test_process_transactions_booked_and_pending(self, database_service):
|
||||
"""Test processing transactions with both booked and pending."""
|
||||
account_info = {
|
||||
"institution_id": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
}
|
||||
|
||||
transaction_data = {
|
||||
"transactions": {
|
||||
"booked": [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"bookingDate": "2025-09-01",
|
||||
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||
"remittanceInformationUnstructured": "Coffee Shop",
|
||||
}
|
||||
],
|
||||
"pending": [
|
||||
{
|
||||
"internalTransactionId": "txn-002",
|
||||
"bookingDate": "2025-09-02",
|
||||
"transactionAmount": {"amount": "-25.00", "currency": "EUR"},
|
||||
"remittanceInformationUnstructured": "Gas Station",
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
result = database_service.process_transactions(
|
||||
"test-account-123", account_info, transaction_data
|
||||
)
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
# Check booked transaction
|
||||
booked_txn = next(t for t in result if t["transactionStatus"] == "booked")
|
||||
assert booked_txn["internalTransactionId"] == "txn-001"
|
||||
assert booked_txn["transactionValue"] == -10.50
|
||||
assert booked_txn["description"] == "Coffee Shop"
|
||||
|
||||
# Check pending transaction
|
||||
pending_txn = next(t for t in result if t["transactionStatus"] == "pending")
|
||||
assert pending_txn["internalTransactionId"] == "txn-002"
|
||||
assert pending_txn["transactionValue"] == -25.00
|
||||
assert pending_txn["description"] == "Gas Station"
|
||||
|
||||
async def test_process_transactions_missing_date_error(self, database_service):
|
||||
"""Test processing transaction with missing date raises error."""
|
||||
account_info = {"institution_id": "TEST_BANK"}
|
||||
|
||||
transaction_data = {
|
||||
"transactions": {
|
||||
"booked": [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
# Missing both bookingDate and valueDate
|
||||
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||
}
|
||||
],
|
||||
"pending": [],
|
||||
}
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError, match="No valid date found in transaction"):
|
||||
database_service.process_transactions(
|
||||
"test-account-123", account_info, transaction_data
|
||||
)
|
||||
|
||||
async def test_process_transactions_remittance_array(self, database_service):
|
||||
"""Test processing transaction with remittance array."""
|
||||
account_info = {"institution_id": "TEST_BANK"}
|
||||
|
||||
transaction_data = {
|
||||
"transactions": {
|
||||
"booked": [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"bookingDate": "2025-09-01",
|
||||
"transactionAmount": {"amount": "-10.50", "currency": "EUR"},
|
||||
"remittanceInformationUnstructuredArray": ["Line 1", "Line 2"],
|
||||
}
|
||||
],
|
||||
"pending": [],
|
||||
}
|
||||
}
|
||||
|
||||
result = database_service.process_transactions(
|
||||
"test-account-123", account_info, transaction_data
|
||||
)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["description"] == "Line 1,Line 2"
|
||||
196
tests/unit/test_scheduler.py
Normal file
196
tests/unit/test_scheduler.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""Tests for background scheduler."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, AsyncMock, MagicMock
|
||||
from datetime import datetime
|
||||
|
||||
from leggend.background.scheduler import BackgroundScheduler
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
class TestBackgroundScheduler:
|
||||
"""Test background job scheduler."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config(self):
|
||||
"""Mock configuration for scheduler tests."""
|
||||
return {"sync": {"enabled": True, "hour": 3, "minute": 0, "cron": None}}
|
||||
|
||||
@pytest.fixture
|
||||
def scheduler(self):
|
||||
"""Create scheduler instance for testing."""
|
||||
with (
|
||||
patch("leggend.background.scheduler.SyncService"),
|
||||
patch("leggend.background.scheduler.config") as mock_config,
|
||||
):
|
||||
mock_config.scheduler_config = {
|
||||
"sync": {"enabled": True, "hour": 3, "minute": 0}
|
||||
}
|
||||
|
||||
# Create scheduler and replace its AsyncIO scheduler with a mock
|
||||
scheduler = BackgroundScheduler()
|
||||
mock_scheduler = MagicMock()
|
||||
mock_scheduler.running = False
|
||||
mock_scheduler.get_jobs.return_value = []
|
||||
scheduler.scheduler = mock_scheduler
|
||||
return scheduler
|
||||
|
||||
def test_scheduler_start_default_config(self, scheduler, mock_config):
|
||||
"""Test starting scheduler with default configuration."""
|
||||
with patch("leggend.config.config") as mock_config_obj:
|
||||
mock_config_obj.scheduler_config = mock_config
|
||||
|
||||
# Mock the job that gets added
|
||||
mock_job = MagicMock()
|
||||
mock_job.id = "daily_sync"
|
||||
scheduler.scheduler.get_jobs.return_value = [mock_job]
|
||||
|
||||
scheduler.start()
|
||||
|
||||
# Verify scheduler.start() was called
|
||||
scheduler.scheduler.start.assert_called_once()
|
||||
# Verify add_job was called
|
||||
scheduler.scheduler.add_job.assert_called_once()
|
||||
|
||||
def test_scheduler_start_disabled(self, scheduler):
|
||||
"""Test scheduler behavior when sync is disabled."""
|
||||
disabled_config = {"sync": {"enabled": False}}
|
||||
|
||||
with (
|
||||
patch.object(scheduler, "scheduler") as mock_scheduler,
|
||||
patch("leggend.background.scheduler.config") as mock_config_obj,
|
||||
):
|
||||
mock_config_obj.scheduler_config = disabled_config
|
||||
mock_scheduler.running = False
|
||||
|
||||
scheduler.start()
|
||||
|
||||
# Verify scheduler.start() was called
|
||||
mock_scheduler.start.assert_called_once()
|
||||
# Verify add_job was NOT called for disabled sync
|
||||
mock_scheduler.add_job.assert_not_called()
|
||||
|
||||
def test_scheduler_start_with_cron(self, scheduler):
|
||||
"""Test starting scheduler with custom cron expression."""
|
||||
cron_config = {
|
||||
"sync": {
|
||||
"enabled": True,
|
||||
"cron": "0 6 * * 1-5", # 6 AM on weekdays
|
||||
}
|
||||
}
|
||||
|
||||
with patch("leggend.config.config") as mock_config_obj:
|
||||
mock_config_obj.scheduler_config = cron_config
|
||||
|
||||
scheduler.start()
|
||||
|
||||
# Verify scheduler.start() and add_job were called
|
||||
scheduler.scheduler.start.assert_called_once()
|
||||
scheduler.scheduler.add_job.assert_called_once()
|
||||
# Verify job was added with correct ID
|
||||
call_args = scheduler.scheduler.add_job.call_args
|
||||
assert call_args.kwargs["id"] == "daily_sync"
|
||||
|
||||
def test_scheduler_start_invalid_cron(self, scheduler):
|
||||
"""Test handling of invalid cron expressions."""
|
||||
invalid_cron_config = {"sync": {"enabled": True, "cron": "invalid cron"}}
|
||||
|
||||
with (
|
||||
patch.object(scheduler, "scheduler") as mock_scheduler,
|
||||
patch("leggend.background.scheduler.config") as mock_config_obj,
|
||||
):
|
||||
mock_config_obj.scheduler_config = invalid_cron_config
|
||||
mock_scheduler.running = False
|
||||
|
||||
scheduler.start()
|
||||
|
||||
# With invalid cron, scheduler.start() should not be called due to early return
|
||||
# and add_job should not be called
|
||||
mock_scheduler.start.assert_not_called()
|
||||
mock_scheduler.add_job.assert_not_called()
|
||||
|
||||
def test_scheduler_shutdown(self, scheduler):
|
||||
"""Test scheduler shutdown."""
|
||||
scheduler.scheduler.running = True
|
||||
|
||||
scheduler.shutdown()
|
||||
|
||||
scheduler.scheduler.shutdown.assert_called_once()
|
||||
|
||||
def test_reschedule_sync(self, scheduler, mock_config):
|
||||
"""Test rescheduling sync job."""
|
||||
scheduler.scheduler.running = True
|
||||
|
||||
# Reschedule with new config
|
||||
new_config = {"enabled": True, "hour": 6, "minute": 30}
|
||||
|
||||
scheduler.reschedule_sync(new_config)
|
||||
|
||||
# Verify remove_job and add_job were called
|
||||
scheduler.scheduler.remove_job.assert_called_once_with("daily_sync")
|
||||
scheduler.scheduler.add_job.assert_called_once()
|
||||
|
||||
def test_reschedule_sync_disable(self, scheduler, mock_config):
|
||||
"""Test disabling sync via reschedule."""
|
||||
scheduler.scheduler.running = True
|
||||
|
||||
# Disable sync
|
||||
disabled_config = {"enabled": False}
|
||||
scheduler.reschedule_sync(disabled_config)
|
||||
|
||||
# Job should be removed but not re-added
|
||||
scheduler.scheduler.remove_job.assert_called_once_with("daily_sync")
|
||||
scheduler.scheduler.add_job.assert_not_called()
|
||||
|
||||
def test_get_next_sync_time(self, scheduler, mock_config):
|
||||
"""Test getting next scheduled sync time."""
|
||||
mock_job = MagicMock()
|
||||
mock_job.next_run_time = datetime(2025, 9, 2, 3, 0)
|
||||
scheduler.scheduler.get_job.return_value = mock_job
|
||||
|
||||
next_time = scheduler.get_next_sync_time()
|
||||
|
||||
assert next_time is not None
|
||||
assert isinstance(next_time, datetime)
|
||||
scheduler.scheduler.get_job.assert_called_once_with("daily_sync")
|
||||
|
||||
def test_get_next_sync_time_no_job(self, scheduler):
|
||||
"""Test getting next sync time when no job is scheduled."""
|
||||
scheduler.scheduler.get_job.return_value = None
|
||||
|
||||
next_time = scheduler.get_next_sync_time()
|
||||
|
||||
assert next_time is None
|
||||
scheduler.scheduler.get_job.assert_called_once_with("daily_sync")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_run_sync_success(self, scheduler):
|
||||
"""Test successful sync job execution."""
|
||||
mock_sync_service = AsyncMock()
|
||||
scheduler.sync_service = mock_sync_service
|
||||
|
||||
await scheduler._run_sync()
|
||||
|
||||
mock_sync_service.sync_all_accounts.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_run_sync_failure(self, scheduler):
|
||||
"""Test sync job execution with failure."""
|
||||
mock_sync_service = AsyncMock()
|
||||
mock_sync_service.sync_all_accounts.side_effect = Exception("Sync failed")
|
||||
scheduler.sync_service = mock_sync_service
|
||||
|
||||
# Should not raise exception, just log error
|
||||
await scheduler._run_sync()
|
||||
|
||||
mock_sync_service.sync_all_accounts.assert_called_once()
|
||||
|
||||
def test_scheduler_job_max_instances(self, scheduler, mock_config):
|
||||
"""Test that sync jobs have max_instances=1."""
|
||||
with patch("leggend.config.config") as mock_config_obj:
|
||||
mock_config_obj.scheduler_config = mock_config
|
||||
scheduler.start()
|
||||
|
||||
# Verify add_job was called with max_instances=1
|
||||
call_args = scheduler.scheduler.add_job.call_args
|
||||
assert call_args.kwargs["max_instances"] == 1
|
||||
368
tests/unit/test_sqlite_database.py
Normal file
368
tests/unit/test_sqlite_database.py
Normal file
@@ -0,0 +1,368 @@
|
||||
"""Tests for SQLite database functions."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
from datetime import datetime
|
||||
|
||||
import leggen.database.sqlite as sqlite_db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path():
|
||||
"""Create a temporary database file for testing."""
|
||||
import uuid
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
db_path = Path(tmpdir) / f"test_{uuid.uuid4().hex}.db"
|
||||
yield db_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_home_db_path(temp_db_path):
|
||||
"""Mock the home database path to use temp file."""
|
||||
config_dir = temp_db_path.parent / ".config" / "leggen"
|
||||
config_dir.mkdir(parents=True, exist_ok=True)
|
||||
db_file = config_dir / "leggen.db"
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = temp_db_path.parent
|
||||
yield db_file
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_transactions():
|
||||
"""Sample transaction data for testing."""
|
||||
return [
|
||||
{
|
||||
"internalTransactionId": "txn-001",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"some": "data"},
|
||||
},
|
||||
{
|
||||
"internalTransactionId": "txn-002",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||
"description": "Grocery Store",
|
||||
"transactionValue": -45.30,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"other": "data"},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_balance():
|
||||
"""Sample balance data for testing."""
|
||||
return {
|
||||
"account_id": "test-account-123",
|
||||
"bank": "REVOLUT_REVOLT21",
|
||||
"status": "active",
|
||||
"iban": "LT313250081177977789",
|
||||
"amount": 1000.00,
|
||||
"currency": "EUR",
|
||||
"type": "interimAvailable",
|
||||
"timestamp": datetime.now(),
|
||||
}
|
||||
|
||||
|
||||
class MockContext:
|
||||
"""Mock context for testing."""
|
||||
|
||||
|
||||
class TestSQLiteDatabase:
|
||||
"""Test SQLite database operations."""
|
||||
|
||||
def test_persist_transactions(self, mock_home_db_path, sample_transactions):
|
||||
"""Test persisting transactions to database."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Mock the database path
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Persist transactions
|
||||
new_transactions = sqlite_db.persist_transactions(
|
||||
ctx, "test-account-123", sample_transactions
|
||||
)
|
||||
|
||||
# Should return all transactions as new
|
||||
assert len(new_transactions) == 2
|
||||
assert new_transactions[0]["internalTransactionId"] == "txn-001"
|
||||
|
||||
def test_persist_transactions_duplicates(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test handling duplicate transactions."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Insert transactions twice
|
||||
new_transactions_1 = sqlite_db.persist_transactions(
|
||||
ctx, "test-account-123", sample_transactions
|
||||
)
|
||||
new_transactions_2 = sqlite_db.persist_transactions(
|
||||
ctx, "test-account-123", sample_transactions
|
||||
)
|
||||
|
||||
# First time should return all as new
|
||||
assert len(new_transactions_1) == 2
|
||||
# Second time should return none (all duplicates)
|
||||
assert len(new_transactions_2) == 0
|
||||
|
||||
def test_get_transactions_all(self, mock_home_db_path, sample_transactions):
|
||||
"""Test retrieving all transactions."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Insert test data
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get all transactions
|
||||
transactions = sqlite_db.get_transactions()
|
||||
|
||||
assert len(transactions) == 2
|
||||
assert (
|
||||
transactions[0]["internalTransactionId"] == "txn-002"
|
||||
) # Ordered by date DESC
|
||||
assert transactions[1]["internalTransactionId"] == "txn-001"
|
||||
|
||||
def test_get_transactions_filtered_by_account(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test filtering transactions by account ID."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Add transaction for different account
|
||||
other_account_transaction = sample_transactions[0].copy()
|
||||
other_account_transaction["internalTransactionId"] = "txn-003"
|
||||
other_account_transaction["accountId"] = "other-account"
|
||||
|
||||
all_transactions = sample_transactions + [other_account_transaction]
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", all_transactions)
|
||||
|
||||
# Filter by account
|
||||
transactions = sqlite_db.get_transactions(account_id="test-account-123")
|
||||
|
||||
assert len(transactions) == 2
|
||||
for txn in transactions:
|
||||
assert txn["accountId"] == "test-account-123"
|
||||
|
||||
def test_get_transactions_with_pagination(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test transaction pagination."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get first page
|
||||
transactions_page1 = sqlite_db.get_transactions(limit=1, offset=0)
|
||||
assert len(transactions_page1) == 1
|
||||
|
||||
# Get second page
|
||||
transactions_page2 = sqlite_db.get_transactions(limit=1, offset=1)
|
||||
assert len(transactions_page2) == 1
|
||||
|
||||
# Should be different transactions
|
||||
assert (
|
||||
transactions_page1[0]["internalTransactionId"]
|
||||
!= transactions_page2[0]["internalTransactionId"]
|
||||
)
|
||||
|
||||
def test_get_transactions_with_amount_filter(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test filtering transactions by amount."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Filter by minimum amount (should exclude coffee shop payment)
|
||||
transactions = sqlite_db.get_transactions(min_amount=-20.0)
|
||||
assert len(transactions) == 1
|
||||
assert transactions[0]["transactionValue"] == -10.50
|
||||
|
||||
def test_get_transactions_with_search(self, mock_home_db_path, sample_transactions):
|
||||
"""Test searching transactions by description."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Search for "Coffee"
|
||||
transactions = sqlite_db.get_transactions(search="Coffee")
|
||||
assert len(transactions) == 1
|
||||
assert "Coffee" in transactions[0]["description"]
|
||||
|
||||
def test_get_transactions_empty_database(self, mock_home_db_path):
|
||||
"""Test getting transactions from empty database."""
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
transactions = sqlite_db.get_transactions()
|
||||
assert transactions == []
|
||||
|
||||
def test_get_transactions_nonexistent_database(self):
|
||||
"""Test getting transactions when database doesn't exist."""
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = Path("/nonexistent")
|
||||
|
||||
transactions = sqlite_db.get_transactions()
|
||||
assert transactions == []
|
||||
|
||||
def test_persist_balances(self, mock_home_db_path, sample_balance):
|
||||
"""Test persisting balance data."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
result = sqlite_db.persist_balances(ctx, sample_balance)
|
||||
|
||||
# Should return the balance data
|
||||
assert result["account_id"] == "test-account-123"
|
||||
|
||||
def test_get_balances(self, mock_home_db_path, sample_balance):
|
||||
"""Test retrieving balances."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Insert test balance
|
||||
sqlite_db.persist_balances(ctx, sample_balance)
|
||||
|
||||
# Get balances
|
||||
balances = sqlite_db.get_balances()
|
||||
|
||||
assert len(balances) == 1
|
||||
assert balances[0]["account_id"] == "test-account-123"
|
||||
assert balances[0]["amount"] == 1000.00
|
||||
|
||||
def test_get_balances_filtered_by_account(self, mock_home_db_path, sample_balance):
|
||||
"""Test filtering balances by account ID."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Create balance for different account
|
||||
other_balance = sample_balance.copy()
|
||||
other_balance["account_id"] = "other-account"
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_balances(ctx, sample_balance)
|
||||
sqlite_db.persist_balances(ctx, other_balance)
|
||||
|
||||
# Filter by account
|
||||
balances = sqlite_db.get_balances(account_id="test-account-123")
|
||||
|
||||
assert len(balances) == 1
|
||||
assert balances[0]["account_id"] == "test-account-123"
|
||||
|
||||
def test_get_account_summary(self, mock_home_db_path, sample_transactions):
|
||||
"""Test getting account summary from transactions."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
summary = sqlite_db.get_account_summary("test-account-123")
|
||||
|
||||
assert summary is not None
|
||||
assert summary["accountId"] == "test-account-123"
|
||||
assert summary["institutionId"] == "REVOLUT_REVOLT21"
|
||||
assert summary["iban"] == "LT313250081177977789"
|
||||
|
||||
def test_get_account_summary_nonexistent(self, mock_home_db_path):
|
||||
"""Test getting summary for nonexistent account."""
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
summary = sqlite_db.get_account_summary("nonexistent")
|
||||
assert summary is None
|
||||
|
||||
def test_get_transaction_count(self, mock_home_db_path, sample_transactions):
|
||||
"""Test getting transaction count."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get total count
|
||||
count = sqlite_db.get_transaction_count()
|
||||
assert count == 2
|
||||
|
||||
# Get count for specific account
|
||||
count_filtered = sqlite_db.get_transaction_count(
|
||||
account_id="test-account-123"
|
||||
)
|
||||
assert count_filtered == 2
|
||||
|
||||
# Get count for nonexistent account
|
||||
count_none = sqlite_db.get_transaction_count(account_id="nonexistent")
|
||||
assert count_none == 0
|
||||
|
||||
def test_get_transaction_count_with_filters(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test getting transaction count with filters."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Filter by search
|
||||
count = sqlite_db.get_transaction_count(search="Coffee")
|
||||
assert count == 1
|
||||
|
||||
# Filter by amount
|
||||
count = sqlite_db.get_transaction_count(min_amount=-20.0)
|
||||
assert count == 1
|
||||
|
||||
def test_database_indexes_created(self, mock_home_db_path, sample_transactions):
|
||||
"""Test that database indexes are created properly."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Persist transactions to create tables and indexes
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get transactions to ensure we can query the table (indexes working)
|
||||
transactions = sqlite_db.get_transactions(account_id="test-account-123")
|
||||
assert len(transactions) == 2
|
||||
780
uv.lock
generated
Normal file
780
uv.lock
generated
Normal file
@@ -0,0 +1,780 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = "==3.12.*"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "apscheduler"
|
||||
version = "3.11.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "tzlocal" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347, upload-time = "2024-11-24T19:39:26.463Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004, upload-time = "2024-11-24T19:39:24.442Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2024.12.14"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010, upload-time = "2024-12-14T13:52:38.02Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927, upload-time = "2024-12-14T13:52:36.114Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188, upload-time = "2024-12-24T18:12:35.43Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105, upload-time = "2024-12-24T18:10:38.83Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404, upload-time = "2024-12-24T18:10:44.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423, upload-time = "2024-12-24T18:10:45.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184, upload-time = "2024-12-24T18:10:47.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268, upload-time = "2024-12-24T18:10:50.589Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601, upload-time = "2024-12-24T18:10:52.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098, upload-time = "2024-12-24T18:10:53.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520, upload-time = "2024-12-24T18:10:55.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852, upload-time = "2024-12-24T18:10:57.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488, upload-time = "2024-12-24T18:10:59.43Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192, upload-time = "2024-12-24T18:11:00.676Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550, upload-time = "2024-12-24T18:11:01.952Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785, upload-time = "2024-12-24T18:11:03.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "discord-webhook"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e8/e6/660b07356a15d98787d893f879efc404eb15176312d457f2f6f7090acd32/discord_webhook-1.3.1.tar.gz", hash = "sha256:ee3e0f3ea4f3dc8dc42be91f75b894a01624c6c13fea28e23ebcf9a6c9a304f7", size = 11715, upload-time = "2024-01-31T17:23:14.463Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/92/e2/eed83ebc8d88da0930143a6dd1d0ba0b6deba1fd91b956f21c23a2608510/discord_webhook-1.3.1-py3-none-any.whl", hash = "sha256:ede07028316de76d24eb811836e2b818b2017510da786777adcb0d5970e7af79", size = 13206, upload-time = "2024-01-31T17:23:12.424Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923, upload-time = "2024-10-09T18:35:47.551Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.116.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.16.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037, upload-time = "2024-09-17T19:02:01.779Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.16.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httptools"
|
||||
version = "0.6.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.6.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cf/92/69934b9ef3c31ca2470980423fda3d00f0460ddefdf30a67adf7f17e2e00/identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc", size = 99213, upload-time = "2025-01-04T17:01:41.99Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/fa/dce098f4cdf7621aa8f7b4f919ce545891f489482f0bfa5102f3eca8608b/identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566", size = 99078, upload-time = "2025-01-04T17:01:40.667Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "leggen"
|
||||
version = "0.6.11"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "apscheduler" },
|
||||
{ name = "click" },
|
||||
{ name = "discord-webhook" },
|
||||
{ name = "fastapi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "loguru" },
|
||||
{ name = "requests" },
|
||||
{ name = "tabulate" },
|
||||
{ name = "tomli-w" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "mypy" },
|
||||
{ name = "pre-commit" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-mock" },
|
||||
{ name = "requests-mock" },
|
||||
{ name = "respx" },
|
||||
{ name = "ruff" },
|
||||
{ name = "types-requests" },
|
||||
{ name = "types-tabulate" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "apscheduler", specifier = ">=3.10.0,<4" },
|
||||
{ name = "click", specifier = ">=8.1.7,<9" },
|
||||
{ name = "discord-webhook", specifier = ">=1.3.1,<2" },
|
||||
{ name = "fastapi", specifier = ">=0.104.0,<1" },
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "loguru", specifier = ">=0.7.2,<0.8" },
|
||||
{ name = "requests", specifier = ">=2.31.0,<3" },
|
||||
{ name = "tabulate", specifier = ">=0.9.0,<0.10" },
|
||||
{ name = "tomli-w", specifier = ">=1.0.0,<2" },
|
||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0,<1" },
|
||||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "mypy", specifier = ">=1.17.1" },
|
||||
{ name = "pre-commit", specifier = ">=3.6.0" },
|
||||
{ name = "pytest", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0.23.0" },
|
||||
{ name = "pytest-mock", specifier = ">=3.12.0" },
|
||||
{ name = "requests-mock", specifier = ">=1.12.0" },
|
||||
{ name = "respx", specifier = ">=0.21.0" },
|
||||
{ name = "ruff", specifier = ">=0.6.1" },
|
||||
{ name = "types-requests", specifier = ">=2.32.4.20250809" },
|
||||
{ name = "types-tabulate", specifier = ">=0.9.0.20241207" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "loguru"
|
||||
version = "0.7.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.17.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "mypy-extensions" },
|
||||
{ name = "pathspec" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.3.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "4.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cfgv" },
|
||||
{ name = "identify" },
|
||||
{ name = "nodeenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "virtualenv" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678, upload-time = "2024-10-08T16:09:37.641Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713, upload-time = "2024-10-08T16:09:35.726Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-types" },
|
||||
{ name = "pydantic-core" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.33.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.14.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests-mock"
|
||||
version = "1.12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/92/32/587625f91f9a0a3d84688bf9cfc4b2480a7e8ec327cefd0ff2ac891fd2cf/requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401", size = 60901, upload-time = "2024-03-29T03:54:29.446Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/ec/889fbc557727da0c34a33850950310240f2040f3b1955175fdb2b36a8910/requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", size = 27695, upload-time = "2024-03-29T03:54:27.64Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "respx"
|
||||
version = "0.22.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/67/3e/e89f736f01aa9517a97e2e7e0ce8d34a4d8207087b3cfdec95133fee13b5/ruff-0.9.1.tar.gz", hash = "sha256:fd2b25ecaf907d6458fa842675382c8597b3c746a2dde6717fe3415425df0c17", size = 3498844, upload-time = "2025-01-10T18:57:53.896Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/05/c3a2e0feb3d5d394cdfd552de01df9d3ec8a3a3771bbff247fab7e668653/ruff-0.9.1-py3-none-linux_armv6l.whl", hash = "sha256:84330dda7abcc270e6055551aca93fdde1b0685fc4fd358f26410f9349cf1743", size = 10645241, upload-time = "2025-01-10T18:56:45.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/da/59f0a40e5f88ee5c054ad175caaa2319fc96571e1d29ab4730728f2aad4f/ruff-0.9.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3cae39ba5d137054b0e5b472aee3b78a7c884e61591b100aeb544bcd1fc38d4f", size = 10391066, upload-time = "2025-01-10T18:56:52.224Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/fe/85e1c1acf0ba04a3f2d54ae61073da030f7a5dc386194f96f3c6ca444a78/ruff-0.9.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50c647ff96f4ba288db0ad87048257753733763b409b2faf2ea78b45c8bb7fcb", size = 10012308, upload-time = "2025-01-10T18:56:55.426Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/9b/780aa5d4bdca8dcea4309264b8faa304bac30e1ce0bcc910422bfcadd203/ruff-0.9.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c8b149e9c7353cace7d698e1656ffcf1e36e50f8ea3b5d5f7f87ff9986a7ca", size = 10881960, upload-time = "2025-01-10T18:56:59.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/f4/dac4361afbfe520afa7186439e8094e4884ae3b15c8fc75fb2e759c1f267/ruff-0.9.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:beb3298604540c884d8b282fe7625651378e1986c25df51dec5b2f60cafc31ce", size = 10414803, upload-time = "2025-01-10T18:57:04.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/a2/057a3cb7999513cb78d6cb33a7d1cc6401c82d7332583786e4dad9e38e44/ruff-0.9.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39d0174ccc45c439093971cc06ed3ac4dc545f5e8bdacf9f067adf879544d969", size = 11464929, upload-time = "2025-01-10T18:57:08.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/c6/1ccfcc209bee465ced4874dcfeaadc88aafcc1ea9c9f31ef66f063c187f0/ruff-0.9.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69572926c0f0c9912288915214ca9b2809525ea263603370b9e00bed2ba56dbd", size = 12170717, upload-time = "2025-01-10T18:57:12.564Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/97/4a524027518525c7cf6931e9fd3b2382be5e4b75b2b61bec02681a7685a5/ruff-0.9.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:937267afce0c9170d6d29f01fcd1f4378172dec6760a9f4dface48cdabf9610a", size = 11708921, upload-time = "2025-01-10T18:57:17.216Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/a4/4e77cf6065c700d5593b25fca6cf725b1ab6d70674904f876254d0112ed0/ruff-0.9.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:186c2313de946f2c22bdf5954b8dd083e124bcfb685732cfb0beae0c47233d9b", size = 13058074, upload-time = "2025-01-10T18:57:20.57Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/d6/fcb78e0531e863d0a952c4c5600cc5cd317437f0e5f031cd2288b117bb37/ruff-0.9.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f94942a3bb767675d9a051867c036655fe9f6c8a491539156a6f7e6b5f31831", size = 11281093, upload-time = "2025-01-10T18:57:25.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/3b/7235bbeff00c95dc2d073cfdbf2b871b5bbf476754c5d277815d286b4328/ruff-0.9.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:728d791b769cc28c05f12c280f99e8896932e9833fef1dd8756a6af2261fd1ab", size = 10882610, upload-time = "2025-01-10T18:57:28.855Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/66/5599d23257c61cf038137f82999ca8f9d0080d9d5134440a461bef85b461/ruff-0.9.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f312c86fb40c5c02b44a29a750ee3b21002bd813b5233facdaf63a51d9a85e1", size = 10489273, upload-time = "2025-01-10T18:57:32.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/85/de4aa057e2532db0f9761e2c2c13834991e087787b93e4aeb5f1cb10d2df/ruff-0.9.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ae017c3a29bee341ba584f3823f805abbe5fe9cd97f87ed07ecbf533c4c88366", size = 11003314, upload-time = "2025-01-10T18:57:35.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/42/afedcaa089116d81447347f76041ff46025849fedb0ed2b187d24cf70fca/ruff-0.9.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5dc40a378a0e21b4cfe2b8a0f1812a6572fc7b230ef12cd9fac9161aa91d807f", size = 11342982, upload-time = "2025-01-10T18:57:38.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/c6/fe45f3eb27e3948b41a305d8b768e949bf6a39310e9df73f6c576d7f1d9f/ruff-0.9.1-py3-none-win32.whl", hash = "sha256:46ebf5cc106cf7e7378ca3c28ce4293b61b449cd121b98699be727d40b79ba72", size = 8819750, upload-time = "2025-01-10T18:57:41.93Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/8d/580db77c3b9d5c3d9479e55b0b832d279c30c8f00ab0190d4cd8fc67831c/ruff-0.9.1-py3-none-win_amd64.whl", hash = "sha256:342a824b46ddbcdddd3abfbb332fa7fcaac5488bf18073e841236aadf4ad5c19", size = 9701331, upload-time = "2025-01-10T18:57:46.334Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/94/0498cdb7316ed67a1928300dd87d659c933479f44dec51b4f62bfd1f8028/ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7", size = 9145708, upload-time = "2025-01-10T18:57:51.308Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.47.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tabulate"
|
||||
version = "0.9.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli-w"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/75/241269d1da26b624c0d5e110e8149093c759b7a286138f4efd61a60e75fe/tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021", size = 7184, upload-time = "2025-01-15T12:07:24.262Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/18/c86eb8e0202e32dd3df50d43d7ff9854f8e0603945ff398974c1d91ac1ef/tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90", size = 6675, upload-time = "2025-01-15T12:07:22.074Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.4.20250809"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-tabulate"
|
||||
version = "0.9.0.20241207"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3f/43/16030404a327e4ff8c692f2273854019ed36718667b2993609dc37d14dd4/types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230", size = 8195, upload-time = "2024-12-07T02:54:42.554Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/86/a9ebfd509cbe74471106dffed320e208c72537f9aeb0a55eaa6b1b5e4d17/types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85", size = 8307, upload-time = "2024-12-07T02:54:41.031Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2025.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzlocal"
|
||||
version = "5.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.35.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
standard = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "httptools" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
|
||||
{ name = "watchfiles" },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvloop"
|
||||
version = "0.21.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.28.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "distlib" },
|
||||
{ name = "filelock" },
|
||||
{ name = "platformdirs" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/39/689abee4adc85aad2af8174bb195a819d0be064bf55fcc73b49d2b28ae77/virtualenv-20.28.1.tar.gz", hash = "sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329", size = 7650532, upload-time = "2025-01-03T01:56:53.613Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/51/8f/dfb257ca6b4e27cb990f1631142361e4712badab8e3ca8dc134d96111515/virtualenv-20.28.1-py3-none-any.whl", hash = "sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb", size = 4276719, upload-time = "2025-01-03T01:56:50.498Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270, upload-time = "2025-06-15T19:05:27.466Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370, upload-time = "2025-06-15T19:05:28.548Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654, upload-time = "2025-06-15T19:05:29.997Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667, upload-time = "2025-06-15T19:05:31.172Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213, upload-time = "2025-06-15T19:05:32.299Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718, upload-time = "2025-06-15T19:05:33.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098, upload-time = "2025-06-15T19:05:34.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209, upload-time = "2025-06-15T19:05:35.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786, upload-time = "2025-06-15T19:05:36.559Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343, upload-time = "2025-06-15T19:05:37.5Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "websockets"
|
||||
version = "15.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "win32-setctime"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
|
||||
]
|
||||
Reference in New Issue
Block a user