mirror of
https://github.com/elisiariocouto/leggen.git
synced 2025-12-28 23:09:13 +00:00
Compare commits
19 Commits
2025.9.10
...
da98b7b2b7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da98b7b2b7 | ||
|
|
2467cb2f5a | ||
|
|
5ae3a51d81 | ||
|
|
d09cf6d04c | ||
|
|
2c6e099596 | ||
|
|
990d0295b3 | ||
|
|
318ca517f7 | ||
|
|
0e645d9bae | ||
|
|
d51aa9429e | ||
|
|
c8f0a103c6 | ||
|
|
5987a759b8 | ||
|
|
6bfbed8fb6 | ||
|
|
b7e4ec4a1b | ||
|
|
35b6d98e6a | ||
|
|
3e248f95a8 | ||
|
|
e136fc4b75 | ||
|
|
692bee574e | ||
|
|
482f16c77e | ||
|
|
c6ac4455f8 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -162,3 +162,5 @@ docker-compose.dev.yml
|
||||
nocodb/
|
||||
sql/
|
||||
leggen.db
|
||||
*.db
|
||||
config.toml
|
||||
|
||||
@@ -15,8 +15,8 @@ repos:
|
||||
hooks:
|
||||
- id: mypy
|
||||
name: Static type check with mypy
|
||||
entry: uv run mypy leggen leggend --check-untyped-defs
|
||||
files: "^leggen(d)?/.*"
|
||||
entry: uv run mypy leggen --check-untyped-defs
|
||||
files: "^leggen/.*"
|
||||
language: "system"
|
||||
types: ["python"]
|
||||
always_run: true
|
||||
|
||||
68
AGENTS.md
68
AGENTS.md
@@ -1,5 +1,55 @@
|
||||
# Agent Guidelines for Leggen
|
||||
|
||||
## Quick Setup for Development
|
||||
|
||||
### Prerequisites
|
||||
- **uv** must be installed for Python dependency management (can be installed via `pip install uv`)
|
||||
- **Configuration file**: Copy `config.example.toml` to `config.toml` before running any commands:
|
||||
```bash
|
||||
cp config.example.toml config.toml
|
||||
```
|
||||
|
||||
### Generate Mock Database
|
||||
The leggen CLI provides a command to generate a mock database for testing:
|
||||
|
||||
```bash
|
||||
# Generate sample database with default settings (3 accounts, 50 transactions each)
|
||||
uv run leggen --config config.toml generate_sample_db --database /path/to/test.db --force
|
||||
|
||||
# Custom configuration
|
||||
uv run leggen --config config.toml generate_sample_db --database ./test-data.db --accounts 5 --transactions 100 --force
|
||||
```
|
||||
|
||||
The command outputs instructions for setting the required environment variable to use the generated database.
|
||||
|
||||
### Start the API Server
|
||||
1. Install uv if not already installed: `pip install uv`
|
||||
2. Set the database environment variable to point to your generated mock database:
|
||||
```bash
|
||||
export LEGGEN_DATABASE_PATH=/path/to/your/generated/database.db
|
||||
```
|
||||
3. Ensure the API can find the configuration file (choose one):
|
||||
```bash
|
||||
# Option 1: Copy config to the expected location
|
||||
mkdir -p ~/.config/leggen && cp config.toml ~/.config/leggen/config.toml
|
||||
|
||||
# Option 2: Set environment variable to current config file
|
||||
export LEGGEN_CONFIG_FILE=./config.toml
|
||||
```
|
||||
4. Start the API server:
|
||||
```bash
|
||||
uv run leggen server
|
||||
```
|
||||
- For development mode with auto-reload: `uv run leggen server --reload`
|
||||
- API will be available at `http://localhost:8000` with docs at `http://localhost:8000/docs`
|
||||
|
||||
### Start the Frontend
|
||||
1. Navigate to the frontend directory: `cd frontend`
|
||||
2. Install npm dependencies: `npm install`
|
||||
3. Start the development server: `npm run dev`
|
||||
- Frontend will be available at `http://localhost:3000`
|
||||
- The frontend is configured to connect to the API at `http://localhost:8000/api/v1`
|
||||
|
||||
## Build/Lint/Test Commands
|
||||
|
||||
### Frontend (React/TypeScript)
|
||||
@@ -10,7 +60,7 @@
|
||||
### Backend (Python)
|
||||
- **Lint**: `uv run ruff check .`
|
||||
- **Format**: `uv run ruff format .`
|
||||
- **Type check**: `uv run mypy leggen leggend --check-untyped-defs`
|
||||
- **Type check**: `uv run mypy leggen --check-untyped-defs`
|
||||
- **All checks**: `uv run pre-commit run --all-files`
|
||||
- **Run all tests**: `uv run pytest`
|
||||
- **Run single test**: `uv run pytest tests/unit/test_api_accounts.py::TestAccountsAPI::test_get_all_accounts_success -v`
|
||||
@@ -37,6 +87,20 @@
|
||||
|
||||
### General
|
||||
- **Formatting**: ruff for Python, ESLint for TypeScript
|
||||
- **Commits**: Use conventional commits, run pre-commit hooks before pushing
|
||||
- **Commits**: Use conventional commits with optional scopes, run pre-commit hooks before pushing
|
||||
- Format: `type(scope): Description starting with uppercase and ending with period.`
|
||||
- Scopes: `cli`, `api`, `frontend` (optional)
|
||||
- Types: `feat`, `fix`, `refactor` (avoid too many different types)
|
||||
- Examples:
|
||||
- `feat(frontend): Add support for S3 backups.`
|
||||
- `fix(api): Resolve authentication timeout issues.`
|
||||
- `refactor(cli): Improve error handling for missing config.`
|
||||
- Avoid including specific numbers, counts, or data-dependent information that may become outdated
|
||||
- **Security**: Never log sensitive data, use environment variables for secrets
|
||||
|
||||
## Contributing Guidelines
|
||||
|
||||
This repository follows conventional changelog practices. Refer to `CONTRIBUTING.md` for detailed contribution guidelines including:
|
||||
- Commit message format and scoping
|
||||
- Release process using `scripts/release.sh`
|
||||
- Pre-commit hooks setup with `pre-commit install`
|
||||
|
||||
@@ -18,7 +18,7 @@ FROM python:3.13-alpine
|
||||
LABEL org.opencontainers.image.source="https://github.com/elisiariocouto/leggen"
|
||||
LABEL org.opencontainers.image.authors="Elisiário Couto <elisiario@couto.io>"
|
||||
LABEL org.opencontainers.image.licenses="MIT"
|
||||
LABEL org.opencontainers.image.title="Leggend API"
|
||||
LABEL org.opencontainers.image.title="Leggen API"
|
||||
LABEL org.opencontainers.image.description="Open Banking API for Leggen"
|
||||
LABEL org.opencontainers.image.url="https://github.com/elisiariocouto/leggen"
|
||||
|
||||
@@ -30,4 +30,4 @@ EXPOSE 8000
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s CMD wget -q --spider http://127.0.0.1:8000/api/v1/health || exit 1
|
||||
|
||||
CMD ["/app/.venv/bin/leggend"]
|
||||
CMD ["/app/.venv/bin/leggen", "server"]
|
||||
|
||||
29
README.md
29
README.md
@@ -2,14 +2,14 @@
|
||||
|
||||
An Open Banking CLI and API service for managing bank connections and transactions.
|
||||
|
||||
This tool provides **FastAPI backend service** (`leggend`), a **React Web Interface** and a **command-line interface** (`leggen`) to connect to banks using the GoCardless Open Banking API.
|
||||
This tool provides a **unified command-line interface** (`leggen`) with both CLI commands and an integrated **FastAPI backend service**, plus a **React Web Interface** to connect to banks using the GoCardless Open Banking API.
|
||||
|
||||
Having your bank data accessible through both CLI and REST API gives you the power to backup, analyze, create reports, and integrate with other applications.
|
||||
|
||||
## 🛠️ Technologies
|
||||
|
||||
### 🔌 API & Backend
|
||||
- [FastAPI](https://fastapi.tiangolo.com/): High-performance async API backend (`leggend` service)
|
||||
- [FastAPI](https://fastapi.tiangolo.com/): High-performance async API backend (integrated into `leggen server`)
|
||||
- [GoCardless Open Banking API](https://developer.gocardless.com/bank-account-data/overview): for connecting to banks
|
||||
- [APScheduler](https://apscheduler.readthedocs.io/): Background job scheduling with configurable cron
|
||||
|
||||
@@ -107,7 +107,7 @@ For development or local installation:
|
||||
uv sync # or pip install -e .
|
||||
|
||||
# Start the API service
|
||||
uv run leggend --reload # Development mode with auto-reload
|
||||
uv run leggen server --reload # Development mode with auto-reload
|
||||
|
||||
# Use the CLI (in another terminal)
|
||||
uv run leggen --help
|
||||
@@ -152,19 +152,19 @@ case-sensitive = ["SpecificStore"]
|
||||
|
||||
## 📖 Usage
|
||||
|
||||
### API Service (`leggend`)
|
||||
### API Service (`leggen server`)
|
||||
|
||||
Start the FastAPI backend service:
|
||||
|
||||
```bash
|
||||
# Production mode
|
||||
leggend
|
||||
leggen server
|
||||
|
||||
# Development mode with auto-reload
|
||||
leggend --reload
|
||||
leggen server --reload
|
||||
|
||||
# Custom host and port
|
||||
leggend --host 127.0.0.1 --port 8080
|
||||
leggen server --host 127.0.0.1 --port 8080
|
||||
```
|
||||
|
||||
**API Documentation**: Visit `http://localhost:8000/docs` for interactive API documentation.
|
||||
@@ -207,7 +207,7 @@ leggen sync --force --wait
|
||||
leggen --api-url http://localhost:8080 status
|
||||
|
||||
# Set via environment variable
|
||||
export LEGGEND_API_URL=http://localhost:8080
|
||||
export LEGGEN_API_URL=http://localhost:8080
|
||||
leggen status
|
||||
```
|
||||
|
||||
@@ -223,7 +223,7 @@ docker compose -f compose.dev.yml ps
|
||||
|
||||
# Check logs
|
||||
docker compose -f compose.dev.yml logs frontend
|
||||
docker compose -f compose.dev.yml logs leggend
|
||||
docker compose -f compose.dev.yml logs leggen-server
|
||||
|
||||
# Stop development services
|
||||
docker compose -f compose.dev.yml down
|
||||
@@ -239,7 +239,7 @@ docker compose ps
|
||||
|
||||
# Check logs
|
||||
docker compose logs frontend
|
||||
docker compose logs leggend
|
||||
docker compose logs leggen-server
|
||||
|
||||
# Access the web interface at http://localhost:3000
|
||||
# API documentation at http://localhost:8000/docs
|
||||
@@ -290,7 +290,7 @@ cd leggen
|
||||
uv sync
|
||||
|
||||
# Start API service with auto-reload
|
||||
uv run leggend --reload
|
||||
uv run leggen server --reload
|
||||
|
||||
# Use CLI commands
|
||||
uv run leggen status
|
||||
@@ -333,13 +333,10 @@ The test suite includes:
|
||||
leggen/ # CLI application
|
||||
├── commands/ # CLI command implementations
|
||||
├── utils/ # Shared utilities
|
||||
└── api_client.py # API client for leggend service
|
||||
|
||||
leggend/ # FastAPI backend service
|
||||
├── api/ # API routes and models
|
||||
├── api/ # FastAPI API routes and models
|
||||
├── services/ # Business logic
|
||||
├── background/ # Background job scheduler
|
||||
└── main.py # FastAPI application
|
||||
└── api_client.py # API client for server communication
|
||||
|
||||
tests/ # Test suite
|
||||
├── conftest.py # Shared test fixtures
|
||||
|
||||
@@ -8,13 +8,13 @@ services:
|
||||
ports:
|
||||
- "127.0.0.1:3000:80"
|
||||
environment:
|
||||
- API_BACKEND_URL=${API_BACKEND_URL:-http://leggend:8000}
|
||||
- API_BACKEND_URL=${API_BACKEND_URL:-http://leggen-server:8000}
|
||||
depends_on:
|
||||
leggend:
|
||||
leggen-server:
|
||||
condition: service_healthy
|
||||
|
||||
# FastAPI backend service
|
||||
leggend:
|
||||
leggen-server:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
|
||||
@@ -6,11 +6,11 @@ services:
|
||||
ports:
|
||||
- "127.0.0.1:3000:80"
|
||||
depends_on:
|
||||
leggend:
|
||||
leggen-server:
|
||||
condition: service_healthy
|
||||
|
||||
# FastAPI backend service
|
||||
leggend:
|
||||
leggen-server:
|
||||
image: ghcr.io/elisiariocouto/leggen:latest
|
||||
restart: "unless-stopped"
|
||||
ports:
|
||||
|
||||
@@ -20,8 +20,8 @@ enabled = true
|
||||
|
||||
# Optional: Telegram notifications
|
||||
[notifications.telegram]
|
||||
token = "your-bot-token"
|
||||
chat_id = 12345
|
||||
api-key = "your-bot-token"
|
||||
chat-id = 12345
|
||||
enabled = true
|
||||
|
||||
# Optional: Transaction filters for notifications
|
||||
|
||||
@@ -25,7 +25,7 @@ COPY --from=builder /app/dist /usr/share/nginx/html
|
||||
COPY default.conf.template /etc/nginx/templates/default.conf.template
|
||||
|
||||
# Set default API backend URL (can be overridden at runtime)
|
||||
ENV API_BACKEND_URL=http://leggend:8000
|
||||
ENV API_BACKEND_URL=http://leggen-server:8000
|
||||
|
||||
# Expose port 80
|
||||
EXPOSE 80
|
||||
|
||||
@@ -93,7 +93,7 @@ The frontend supports configurable API URLs through environment variables:
|
||||
|
||||
- Uses relative URLs (`/api/v1`) that nginx proxies to the backend
|
||||
- Configure nginx proxy target via `API_BACKEND_URL` environment variable
|
||||
- Default: `http://leggend:8000`
|
||||
- Default: `http://leggen-server:8000`
|
||||
|
||||
**Docker Compose:**
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import {
|
||||
LineChart,
|
||||
Line,
|
||||
AreaChart,
|
||||
Area,
|
||||
XAxis,
|
||||
YAxis,
|
||||
CartesianGrid,
|
||||
@@ -8,10 +8,11 @@ import {
|
||||
ResponsiveContainer,
|
||||
Legend,
|
||||
} from "recharts";
|
||||
import type { Balance } from "../../types/api";
|
||||
import type { Balance, Account } from "../../types/api";
|
||||
|
||||
interface BalanceChartProps {
|
||||
data: Balance[];
|
||||
accounts: Account[];
|
||||
className?: string;
|
||||
}
|
||||
|
||||
@@ -26,16 +27,43 @@ interface AggregatedDataPoint {
|
||||
[key: string]: string | number;
|
||||
}
|
||||
|
||||
export default function BalanceChart({ data, className }: BalanceChartProps) {
|
||||
export default function BalanceChart({ data, accounts, className }: BalanceChartProps) {
|
||||
// Create a lookup map for account info
|
||||
const accountMap = accounts.reduce((map, account) => {
|
||||
map[account.id] = account;
|
||||
return map;
|
||||
}, {} as Record<string, Account>);
|
||||
|
||||
// Helper function to get bank name from institution_id
|
||||
const getBankName = (institutionId: string): string => {
|
||||
const bankMapping: Record<string, string> = {
|
||||
'REVOLUT_REVOLT21': 'Revolut',
|
||||
'NUBANK_NUPBBR25': 'Nu Pagamentos',
|
||||
'BANCOBPI_BBPIPTPL': 'Banco BPI',
|
||||
// Add more mappings as needed
|
||||
};
|
||||
return bankMapping[institutionId] || institutionId.split('_')[0];
|
||||
};
|
||||
|
||||
// Helper function to create display name for account
|
||||
const getAccountDisplayName = (accountId: string): string => {
|
||||
const account = accountMap[accountId];
|
||||
if (account) {
|
||||
const bankName = getBankName(account.institution_id);
|
||||
const accountName = account.name || `Account ${accountId.split('-')[1]}`;
|
||||
return `${bankName} - ${accountName}`;
|
||||
}
|
||||
return `Account ${accountId.split('-')[1]}`;
|
||||
};
|
||||
// Process balance data for the chart
|
||||
const chartData = data
|
||||
.filter((balance) => balance.balance_type === "closingBooked")
|
||||
.map((balance) => ({
|
||||
date: new Date(balance.reference_date).toLocaleDateString(),
|
||||
date: new Date(balance.reference_date).toLocaleDateString('en-GB'), // DD/MM/YYYY format
|
||||
balance: balance.balance_amount,
|
||||
account_id: balance.account_id,
|
||||
}))
|
||||
.sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime());
|
||||
.sort((a, b) => new Date(a.date.split('/').reverse().join('/')).getTime() - new Date(b.date.split('/').reverse().join('/')).getTime());
|
||||
|
||||
// Group by account and aggregate
|
||||
const accountBalances: { [key: string]: ChartDataPoint[] } = {};
|
||||
@@ -58,7 +86,7 @@ export default function BalanceChart({ data, className }: BalanceChartProps) {
|
||||
});
|
||||
|
||||
const finalData = Object.values(aggregatedData).sort(
|
||||
(a, b) => new Date(a.date).getTime() - new Date(b.date).getTime()
|
||||
(a, b) => new Date(a.date.split('/').reverse().join('/')).getTime() - new Date(b.date.split('/').reverse().join('/')).getTime()
|
||||
);
|
||||
|
||||
const colors = ["#3B82F6", "#10B981", "#F59E0B", "#EF4444", "#8B5CF6"];
|
||||
@@ -83,14 +111,16 @@ export default function BalanceChart({ data, className }: BalanceChartProps) {
|
||||
</h3>
|
||||
<div className="h-80">
|
||||
<ResponsiveContainer width="100%" height="100%">
|
||||
<LineChart data={finalData}>
|
||||
<AreaChart data={finalData}>
|
||||
<CartesianGrid strokeDasharray="3 3" />
|
||||
<XAxis
|
||||
dataKey="date"
|
||||
tick={{ fontSize: 12 }}
|
||||
tickFormatter={(value) => {
|
||||
const date = new Date(value);
|
||||
return date.toLocaleDateString(undefined, {
|
||||
// Convert DD/MM/YYYY back to a proper date for formatting
|
||||
const [day, month, year] = value.split('/');
|
||||
const date = new Date(year, month - 1, day);
|
||||
return date.toLocaleDateString('en-GB', {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
});
|
||||
@@ -101,25 +131,25 @@ export default function BalanceChart({ data, className }: BalanceChartProps) {
|
||||
tickFormatter={(value) => `€${value.toLocaleString()}`}
|
||||
/>
|
||||
<Tooltip
|
||||
formatter={(value: number) => [
|
||||
formatter={(value: number, name: string) => [
|
||||
`€${value.toLocaleString()}`,
|
||||
"Balance",
|
||||
getAccountDisplayName(name),
|
||||
]}
|
||||
labelFormatter={(label) => `Date: ${label}`}
|
||||
/>
|
||||
<Legend />
|
||||
{Object.keys(accountBalances).map((accountId, index) => (
|
||||
<Line
|
||||
<Area
|
||||
key={accountId}
|
||||
type="monotone"
|
||||
dataKey={accountId}
|
||||
stackId="1"
|
||||
fill={colors[index % colors.length]}
|
||||
stroke={colors[index % colors.length]}
|
||||
strokeWidth={2}
|
||||
dot={{ r: 4 }}
|
||||
name={`Account ${accountId.split('-')[1]}`}
|
||||
name={getAccountDisplayName(accountId)}
|
||||
/>
|
||||
))}
|
||||
</LineChart>
|
||||
</AreaChart>
|
||||
</ResponsiveContainer>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -12,6 +12,7 @@ import apiClient from "../../lib/api";
|
||||
|
||||
interface MonthlyTrendsProps {
|
||||
className?: string;
|
||||
days?: number;
|
||||
}
|
||||
|
||||
interface MonthlyData {
|
||||
@@ -31,19 +32,12 @@ interface TooltipProps {
|
||||
label?: string;
|
||||
}
|
||||
|
||||
export default function MonthlyTrends({ className }: MonthlyTrendsProps) {
|
||||
// Get transactions for the last 12 months
|
||||
export default function MonthlyTrends({ className, days = 365 }: MonthlyTrendsProps) {
|
||||
// Get transactions for the specified period using analytics endpoint
|
||||
const { data: transactions, isLoading } = useQuery({
|
||||
queryKey: ["transactions", "monthly-trends"],
|
||||
queryKey: ["transactions", "monthly-trends", days],
|
||||
queryFn: async () => {
|
||||
const response = await apiClient.getTransactions({
|
||||
startDate: new Date(
|
||||
Date.now() - 365 * 24 * 60 * 60 * 1000
|
||||
).toISOString().split("T")[0],
|
||||
endDate: new Date().toISOString().split("T")[0],
|
||||
perPage: 1000,
|
||||
});
|
||||
return response.data;
|
||||
return await apiClient.getTransactionsForAnalytics(days);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -54,12 +48,12 @@ export default function MonthlyTrends({ className }: MonthlyTrendsProps) {
|
||||
const monthlyMap: { [key: string]: MonthlyData } = {};
|
||||
|
||||
transactions.forEach((transaction) => {
|
||||
const date = new Date(transaction.transaction_date);
|
||||
const date = new Date(transaction.date);
|
||||
const monthKey = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}`;
|
||||
|
||||
if (!monthlyMap[monthKey]) {
|
||||
monthlyMap[monthKey] = {
|
||||
month: date.toLocaleDateString(undefined, {
|
||||
month: date.toLocaleDateString('en-GB', {
|
||||
year: 'numeric',
|
||||
month: 'short'
|
||||
}),
|
||||
@@ -69,10 +63,10 @@ export default function MonthlyTrends({ className }: MonthlyTrendsProps) {
|
||||
};
|
||||
}
|
||||
|
||||
if (transaction.transaction_value > 0) {
|
||||
monthlyMap[monthKey].income += transaction.transaction_value;
|
||||
if (transaction.amount > 0) {
|
||||
monthlyMap[monthKey].income += transaction.amount;
|
||||
} else {
|
||||
monthlyMap[monthKey].expenses += Math.abs(transaction.transaction_value);
|
||||
monthlyMap[monthKey].expenses += Math.abs(transaction.amount);
|
||||
}
|
||||
|
||||
monthlyMap[monthKey].net = monthlyMap[monthKey].income - monthlyMap[monthKey].expenses;
|
||||
@@ -83,10 +77,20 @@ export default function MonthlyTrends({ className }: MonthlyTrendsProps) {
|
||||
...Object.entries(monthlyMap)
|
||||
.sort(([a], [b]) => a.localeCompare(b))
|
||||
.map(([, data]) => data)
|
||||
.slice(-12) // Last 12 months
|
||||
);
|
||||
}
|
||||
|
||||
// Calculate number of months to display based on days filter
|
||||
const getMonthsToDisplay = (days: number): number => {
|
||||
if (days <= 30) return 1;
|
||||
if (days <= 180) return 6;
|
||||
if (days <= 365) return 12;
|
||||
return Math.ceil(days / 30);
|
||||
};
|
||||
|
||||
const monthsToDisplay = getMonthsToDisplay(days);
|
||||
const displayData = monthlyData.slice(-monthsToDisplay);
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className={className}>
|
||||
@@ -100,7 +104,7 @@ export default function MonthlyTrends({ className }: MonthlyTrendsProps) {
|
||||
);
|
||||
}
|
||||
|
||||
if (monthlyData.length === 0) {
|
||||
if (displayData.length === 0) {
|
||||
return (
|
||||
<div className={className}>
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-4">
|
||||
@@ -129,14 +133,22 @@ export default function MonthlyTrends({ className }: MonthlyTrendsProps) {
|
||||
return null;
|
||||
};
|
||||
|
||||
// Generate dynamic title based on time period
|
||||
const getTitle = (days: number): string => {
|
||||
if (days <= 30) return "Monthly Spending Trends (Last 30 Days)";
|
||||
if (days <= 180) return "Monthly Spending Trends (Last 6 Months)";
|
||||
if (days <= 365) return "Monthly Spending Trends (Last 12 Months)";
|
||||
return `Monthly Spending Trends (Last ${Math.ceil(days / 30)} Months)`;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={className}>
|
||||
<h3 className="text-lg font-medium text-gray-900 mb-4">
|
||||
Monthly Spending Trends (Last 12 Months)
|
||||
{getTitle(days)}
|
||||
</h3>
|
||||
<div className="h-80">
|
||||
<ResponsiveContainer width="100%" height="100%">
|
||||
<BarChart data={monthlyData} margin={{ top: 20, right: 30, left: 20, bottom: 5 }}>
|
||||
<BarChart data={displayData} margin={{ top: 20, right: 30, left: 20, bottom: 5 }}>
|
||||
<CartesianGrid strokeDasharray="3 3" />
|
||||
<XAxis
|
||||
dataKey="month"
|
||||
|
||||
39
frontend/src/components/analytics/TimePeriodFilter.tsx
Normal file
39
frontend/src/components/analytics/TimePeriodFilter.tsx
Normal file
@@ -0,0 +1,39 @@
|
||||
import { Calendar } from "lucide-react";
|
||||
import type { TimePeriod } from "../../lib/timePeriods";
|
||||
import { TIME_PERIODS } from "../../lib/timePeriods";
|
||||
|
||||
interface TimePeriodFilterProps {
|
||||
selectedPeriod: TimePeriod;
|
||||
onPeriodChange: (period: TimePeriod) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export default function TimePeriodFilter({
|
||||
selectedPeriod,
|
||||
onPeriodChange,
|
||||
className = "",
|
||||
}: TimePeriodFilterProps) {
|
||||
return (
|
||||
<div className={`flex items-center gap-4 ${className}`}>
|
||||
<div className="flex items-center gap-2 text-gray-700">
|
||||
<Calendar size={20} />
|
||||
<span className="font-medium">Time Period:</span>
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
{TIME_PERIODS.map((period) => (
|
||||
<button
|
||||
key={period.value}
|
||||
onClick={() => onPeriodChange(period)}
|
||||
className={`px-4 py-2 text-sm font-medium rounded-md transition-colors ${
|
||||
selectedPeriod.value === period.value
|
||||
? "bg-blue-600 text-white"
|
||||
: "bg-gray-100 text-gray-700 hover:bg-gray-200"
|
||||
}`}
|
||||
>
|
||||
{period.label}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -30,17 +30,33 @@ export default function TransactionDistribution({
|
||||
accounts,
|
||||
className,
|
||||
}: TransactionDistributionProps) {
|
||||
// Helper function to get bank name from institution_id
|
||||
const getBankName = (institutionId: string): string => {
|
||||
const bankMapping: Record<string, string> = {
|
||||
'REVOLUT_REVOLT21': 'Revolut',
|
||||
'NUBANK_NUPBBR25': 'Nu Pagamentos',
|
||||
'BANCOBPI_BBPIPTPL': 'Banco BPI',
|
||||
// TODO: Add more bank mappings as needed
|
||||
};
|
||||
return bankMapping[institutionId] || institutionId.split('_')[0];
|
||||
};
|
||||
|
||||
// Helper function to create display name for account
|
||||
const getAccountDisplayName = (account: Account): string => {
|
||||
const bankName = getBankName(account.institution_id);
|
||||
const accountName = account.name || `Account ${account.id.split('-')[1]}`;
|
||||
return `${bankName} - ${accountName}`;
|
||||
};
|
||||
|
||||
// Create pie chart data from account balances
|
||||
const pieData: PieDataPoint[] = accounts.map((account, index) => {
|
||||
const closingBalance = account.balances.find(
|
||||
(balance) => balance.balance_type === "closingBooked"
|
||||
);
|
||||
const primaryBalance = account.balances?.[0]?.amount || 0;
|
||||
|
||||
const colors = ["#3B82F6", "#10B981", "#F59E0B", "#EF4444", "#8B5CF6"];
|
||||
|
||||
return {
|
||||
name: account.name || `Account ${account.id.split('-')[1]}`,
|
||||
value: closingBalance?.amount || 0,
|
||||
name: getAccountDisplayName(account),
|
||||
value: primaryBalance,
|
||||
color: colors[index % colors.length],
|
||||
};
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import axios from "axios";
|
||||
import type {
|
||||
Account,
|
||||
Transaction,
|
||||
AnalyticsTransaction,
|
||||
Balance,
|
||||
ApiResponse,
|
||||
NotificationSettings,
|
||||
@@ -54,6 +55,18 @@ export const apiClient = {
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get historical balances for balance progression chart
|
||||
getHistoricalBalances: async (days?: number, accountId?: string): Promise<Balance[]> => {
|
||||
const queryParams = new URLSearchParams();
|
||||
if (days) queryParams.append("days", days.toString());
|
||||
if (accountId) queryParams.append("account_id", accountId);
|
||||
|
||||
const response = await api.get<ApiResponse<Balance[]>>(
|
||||
`/balances/history?${queryParams.toString()}`
|
||||
);
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get balances for specific account
|
||||
getAccountBalances: async (accountId: string): Promise<Balance[]> => {
|
||||
const response = await api.get<ApiResponse<Balance[]>>(
|
||||
@@ -154,6 +167,17 @@ export const apiClient = {
|
||||
);
|
||||
return response.data.data;
|
||||
},
|
||||
|
||||
// Get all transactions for analytics (no pagination)
|
||||
getTransactionsForAnalytics: async (days?: number): Promise<AnalyticsTransaction[]> => {
|
||||
const queryParams = new URLSearchParams();
|
||||
if (days) queryParams.append("days", days.toString());
|
||||
|
||||
const response = await api.get<ApiResponse<AnalyticsTransaction[]>>(
|
||||
`/transactions/analytics?${queryParams.toString()}`
|
||||
);
|
||||
return response.data.data;
|
||||
},
|
||||
};
|
||||
|
||||
export default apiClient;
|
||||
|
||||
19
frontend/src/lib/timePeriods.ts
Normal file
19
frontend/src/lib/timePeriods.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
export type TimePeriod = {
|
||||
label: string;
|
||||
days: number;
|
||||
value: string;
|
||||
};
|
||||
|
||||
function getDaysFromYearStart(): number {
|
||||
const now = new Date();
|
||||
const yearStart = new Date(now.getFullYear(), 0, 1);
|
||||
const diffTime = now.getTime() - yearStart.getTime();
|
||||
return Math.ceil(diffTime / (1000 * 60 * 60 * 24));
|
||||
}
|
||||
|
||||
export const TIME_PERIODS: TimePeriod[] = [
|
||||
{ label: "Last 30 days", days: 30, value: "30d" },
|
||||
{ label: "Last 6 months", days: 180, value: "6m" },
|
||||
{ label: "Year to Date", days: getDaysFromYearStart(), value: "ytd" },
|
||||
{ label: "Last 365 days", days: 365, value: "365d" },
|
||||
];
|
||||
@@ -1,10 +1,10 @@
|
||||
import { createFileRoute } from "@tanstack/react-router";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { useState } from "react";
|
||||
import {
|
||||
CreditCard,
|
||||
TrendingUp,
|
||||
TrendingDown,
|
||||
DollarSign,
|
||||
Activity,
|
||||
Users,
|
||||
} from "lucide-react";
|
||||
@@ -13,12 +13,20 @@ import StatCard from "../components/analytics/StatCard";
|
||||
import BalanceChart from "../components/analytics/BalanceChart";
|
||||
import TransactionDistribution from "../components/analytics/TransactionDistribution";
|
||||
import MonthlyTrends from "../components/analytics/MonthlyTrends";
|
||||
import TimePeriodFilter from "../components/analytics/TimePeriodFilter";
|
||||
import type { TimePeriod } from "../lib/timePeriods";
|
||||
import { TIME_PERIODS } from "../lib/timePeriods";
|
||||
|
||||
function AnalyticsDashboard() {
|
||||
// Default to Last 365 days
|
||||
const [selectedPeriod, setSelectedPeriod] = useState<TimePeriod>(
|
||||
TIME_PERIODS.find((p) => p.value === "365d") || TIME_PERIODS[3]
|
||||
);
|
||||
|
||||
// Fetch analytics data
|
||||
const { data: stats, isLoading: statsLoading } = useQuery({
|
||||
queryKey: ["transaction-stats"],
|
||||
queryFn: () => apiClient.getTransactionStats(365), // Last year
|
||||
queryKey: ["transaction-stats", selectedPeriod.days],
|
||||
queryFn: () => apiClient.getTransactionStats(selectedPeriod.days),
|
||||
});
|
||||
|
||||
const { data: accounts, isLoading: accountsLoading } = useQuery({
|
||||
@@ -27,8 +35,8 @@ function AnalyticsDashboard() {
|
||||
});
|
||||
|
||||
const { data: balances, isLoading: balancesLoading } = useQuery({
|
||||
queryKey: ["balances"],
|
||||
queryFn: () => apiClient.getBalances(),
|
||||
queryKey: ["historical-balances", selectedPeriod.days],
|
||||
queryFn: () => apiClient.getHistoricalBalances(selectedPeriod.days),
|
||||
});
|
||||
|
||||
const isLoading = statsLoading || accountsLoading || balancesLoading;
|
||||
@@ -38,8 +46,8 @@ function AnalyticsDashboard() {
|
||||
<div className="p-6">
|
||||
<div className="animate-pulse">
|
||||
<div className="h-8 bg-gray-200 rounded w-48 mb-6"></div>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6 mb-8">
|
||||
{[...Array(4)].map((_, i) => (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6 mb-8">
|
||||
{[...Array(3)].map((_, i) => (
|
||||
<div key={i} className="h-32 bg-gray-200 rounded"></div>
|
||||
))}
|
||||
</div>
|
||||
@@ -52,30 +60,17 @@ function AnalyticsDashboard() {
|
||||
);
|
||||
}
|
||||
|
||||
const totalBalance = accounts?.reduce((sum, account) => {
|
||||
const closingBalance = account.balances.find(
|
||||
(balance) => balance.balance_type === "closingBooked"
|
||||
);
|
||||
return sum + (closingBalance?.amount || 0);
|
||||
}, 0) || 0;
|
||||
|
||||
return (
|
||||
<div className="p-6 space-y-8">
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-gray-900">Analytics Dashboard</h1>
|
||||
<p className="mt-2 text-gray-600">
|
||||
Overview of your financial data and spending patterns
|
||||
</p>
|
||||
</div>
|
||||
{/* Time Period Filter */}
|
||||
<TimePeriodFilter
|
||||
selectedPeriod={selectedPeriod}
|
||||
onPeriodChange={setSelectedPeriod}
|
||||
className="bg-white rounded-lg shadow p-4 border border-gray-200"
|
||||
/>
|
||||
|
||||
{/* Stats Cards */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
<StatCard
|
||||
title="Total Balance"
|
||||
value={`€${totalBalance.toLocaleString()}`}
|
||||
subtitle={`Across ${accounts?.length || 0} accounts`}
|
||||
icon={DollarSign}
|
||||
/>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
<StatCard
|
||||
title="Total Transactions"
|
||||
value={stats?.total_transactions || 0}
|
||||
@@ -126,7 +121,7 @@ function AnalyticsDashboard() {
|
||||
{/* Charts */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
|
||||
<div className="bg-white rounded-lg shadow p-6 border border-gray-200">
|
||||
<BalanceChart data={balances || []} />
|
||||
<BalanceChart data={balances || []} accounts={accounts || []} />
|
||||
</div>
|
||||
<div className="bg-white rounded-lg shadow p-6 border border-gray-200">
|
||||
<TransactionDistribution accounts={accounts || []} />
|
||||
@@ -135,43 +130,8 @@ function AnalyticsDashboard() {
|
||||
|
||||
{/* Monthly Trends */}
|
||||
<div className="bg-white rounded-lg shadow p-6 border border-gray-200">
|
||||
<MonthlyTrends />
|
||||
<MonthlyTrends days={selectedPeriod.days} />
|
||||
</div>
|
||||
|
||||
{/* Summary Section */}
|
||||
{stats && (
|
||||
<div className="bg-blue-50 rounded-lg p-6 border border-blue-200">
|
||||
<h3 className="text-lg font-medium text-blue-900 mb-4">
|
||||
Period Summary ({stats.period_days} days)
|
||||
</h3>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-4 text-sm">
|
||||
<div>
|
||||
<p className="text-blue-700 font-medium">Booked Transactions</p>
|
||||
<p className="text-blue-900">{stats.booked_transactions}</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-blue-700 font-medium">Pending Transactions</p>
|
||||
<p className="text-blue-900">{stats.pending_transactions}</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-blue-700 font-medium">Transaction Ratio</p>
|
||||
<p className="text-blue-900">
|
||||
{stats.total_transactions > 0
|
||||
? `${Math.round(
|
||||
(stats.booked_transactions / stats.total_transactions) * 100
|
||||
)}% booked`
|
||||
: "No transactions"}
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-blue-700 font-medium">Spend Rate</p>
|
||||
<p className="text-blue-900">
|
||||
€{((stats.total_expenses || 0) / stats.period_days).toFixed(2)}/day
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -59,6 +59,17 @@ export interface RawTransactionData {
|
||||
[key: string]: unknown; // Allow additional fields
|
||||
}
|
||||
|
||||
// Type for analytics transaction data
|
||||
export interface AnalyticsTransaction {
|
||||
transaction_id: string;
|
||||
date: string;
|
||||
description: string;
|
||||
amount: number;
|
||||
currency: string;
|
||||
status: string;
|
||||
account_id: string;
|
||||
}
|
||||
|
||||
export interface Transaction {
|
||||
transaction_id: string; // NEW: stable bank-provided transaction ID
|
||||
internal_transaction_id: string | null; // OLD: unstable GoCardless ID
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Dict, Any
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, List
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
from typing import Optional, List, Union
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.accounts import (
|
||||
AccountDetails,
|
||||
from leggen.api.models.accounts import (
|
||||
AccountBalance,
|
||||
AccountDetails,
|
||||
AccountUpdate,
|
||||
Transaction,
|
||||
TransactionSummary,
|
||||
AccountUpdate,
|
||||
)
|
||||
from leggend.services.database_service import DatabaseService
|
||||
from leggen.api.models.common import APIResponse
|
||||
from leggen.services.database_service import DatabaseService
|
||||
|
||||
router = APIRouter()
|
||||
database_service = DatabaseService()
|
||||
@@ -215,6 +216,35 @@ async def get_all_balances() -> APIResponse:
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/balances/history", response_model=APIResponse)
|
||||
async def get_historical_balances(
|
||||
days: Optional[int] = Query(
|
||||
default=365, le=1095, ge=1, description="Number of days of history to retrieve"
|
||||
),
|
||||
account_id: Optional[str] = Query(
|
||||
default=None, description="Filter by specific account ID"
|
||||
),
|
||||
) -> APIResponse:
|
||||
"""Get historical balance progression calculated from transaction history"""
|
||||
try:
|
||||
# Get historical balances from database
|
||||
historical_balances = await database_service.get_historical_balances_from_db(
|
||||
account_id=account_id, days=days or 365
|
||||
)
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=historical_balances,
|
||||
message=f"Retrieved {len(historical_balances)} historical balance points over {days} days",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get historical balances: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get historical balances: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/accounts/{account_id}/transactions", response_model=APIResponse)
|
||||
async def get_account_transactions(
|
||||
account_id: str,
|
||||
@@ -1,15 +1,15 @@
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.banks import (
|
||||
BankInstitution,
|
||||
from leggen.api.models.banks import (
|
||||
BankConnectionRequest,
|
||||
BankRequisition,
|
||||
BankConnectionStatus,
|
||||
BankInstitution,
|
||||
BankRequisition,
|
||||
)
|
||||
from leggend.services.gocardless_service import GoCardlessService
|
||||
from leggend.utils.gocardless import REQUISITION_STATUS
|
||||
from leggen.api.models.common import APIResponse
|
||||
from leggen.services.gocardless_service import GoCardlessService
|
||||
from leggen.utils.gocardless import REQUISITION_STATUS
|
||||
|
||||
router = APIRouter()
|
||||
gocardless_service = GoCardlessService()
|
||||
@@ -1,17 +1,18 @@
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.notifications import (
|
||||
from leggen.api.models.common import APIResponse
|
||||
from leggen.api.models.notifications import (
|
||||
DiscordConfig,
|
||||
NotificationFilters,
|
||||
NotificationSettings,
|
||||
NotificationTest,
|
||||
DiscordConfig,
|
||||
TelegramConfig,
|
||||
NotificationFilters,
|
||||
)
|
||||
from leggend.services.notification_service import NotificationService
|
||||
from leggend.config import config
|
||||
from leggen.services.notification_service import NotificationService
|
||||
from leggen.utils.config import config
|
||||
|
||||
router = APIRouter()
|
||||
notification_service = NotificationService()
|
||||
@@ -1,12 +1,13 @@
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||
|
||||
from fastapi import APIRouter, BackgroundTasks, HTTPException
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggend.api.models.sync import SyncRequest, SchedulerConfig
|
||||
from leggend.services.sync_service import SyncService
|
||||
from leggend.background.scheduler import scheduler
|
||||
from leggend.config import config
|
||||
from leggen.api.models.common import APIResponse
|
||||
from leggen.api.models.sync import SchedulerConfig, SyncRequest
|
||||
from leggen.background.scheduler import scheduler
|
||||
from leggen.services.sync_service import SyncService
|
||||
from leggen.utils.config import config
|
||||
|
||||
router = APIRouter()
|
||||
sync_service = SyncService()
|
||||
@@ -1,11 +1,12 @@
|
||||
from typing import Optional, List, Union
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.common import APIResponse, PaginatedResponse
|
||||
from leggend.api.models.accounts import Transaction, TransactionSummary
|
||||
from leggend.services.database_service import DatabaseService
|
||||
from leggen.api.models.accounts import Transaction, TransactionSummary
|
||||
from leggen.api.models.common import APIResponse, PaginatedResponse
|
||||
from leggen.services.database_service import DatabaseService
|
||||
|
||||
router = APIRouter()
|
||||
database_service = DatabaseService()
|
||||
@@ -202,3 +203,53 @@ async def get_transaction_stats(
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get transaction stats: {str(e)}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/transactions/analytics", response_model=APIResponse)
|
||||
async def get_transactions_for_analytics(
|
||||
days: int = Query(default=365, description="Number of days to include"),
|
||||
account_id: Optional[str] = Query(default=None, description="Filter by account ID"),
|
||||
) -> APIResponse:
|
||||
"""Get all transactions for analytics (no pagination) for the last N days"""
|
||||
try:
|
||||
# Date range for analytics
|
||||
end_date = datetime.now()
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
# Format dates for database query
|
||||
date_from = start_date.isoformat()
|
||||
date_to = end_date.isoformat()
|
||||
|
||||
# Get ALL transactions from database (no limit for analytics)
|
||||
transactions = await database_service.get_transactions_from_db(
|
||||
account_id=account_id,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
limit=None, # No limit - get all transactions
|
||||
)
|
||||
|
||||
# Transform for frontend (summary format)
|
||||
transaction_summaries = [
|
||||
{
|
||||
"transaction_id": txn["transactionId"],
|
||||
"date": txn["transactionDate"],
|
||||
"description": txn["description"],
|
||||
"amount": txn["transactionValue"],
|
||||
"currency": txn["transactionCurrency"],
|
||||
"status": txn["transactionStatus"],
|
||||
"account_id": txn["accountId"],
|
||||
}
|
||||
for txn in transactions
|
||||
]
|
||||
|
||||
return APIResponse(
|
||||
success=True,
|
||||
data=transaction_summaries,
|
||||
message=f"Retrieved {len(transaction_summaries)} transactions for analytics",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get transactions for analytics: {e}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to get analytics transactions: {str(e)}"
|
||||
) from e
|
||||
@@ -1,20 +1,21 @@
|
||||
import os
|
||||
import requests
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests
|
||||
|
||||
from leggen.utils.text import error
|
||||
|
||||
|
||||
class LeggendAPIClient:
|
||||
"""Client for communicating with the leggend FastAPI service"""
|
||||
class LeggenAPIClient:
|
||||
"""Client for communicating with the leggen FastAPI service"""
|
||||
|
||||
base_url: str
|
||||
|
||||
def __init__(self, base_url: Optional[str] = None):
|
||||
self.base_url = (
|
||||
base_url
|
||||
or os.environ.get("LEGGEND_API_URL", "http://localhost:8000")
|
||||
or os.environ.get("LEGGEN_API_URL", "http://localhost:8000")
|
||||
or "http://localhost:8000"
|
||||
)
|
||||
self.session = requests.Session()
|
||||
@@ -31,7 +32,7 @@ class LeggendAPIClient:
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.ConnectionError:
|
||||
error("Could not connect to leggend service. Is it running?")
|
||||
error("Could not connect to leggen server. Is it running?")
|
||||
error(f"Trying to connect to: {self.base_url}")
|
||||
raise
|
||||
except requests.exceptions.HTTPError as e:
|
||||
@@ -48,7 +49,7 @@ class LeggendAPIClient:
|
||||
raise
|
||||
|
||||
def health_check(self) -> bool:
|
||||
"""Check if the leggend service is healthy"""
|
||||
"""Check if the leggen server is healthy"""
|
||||
try:
|
||||
response = self._make_request("GET", "/health")
|
||||
return response.get("status") == "healthy"
|
||||
|
||||
@@ -2,9 +2,9 @@ from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
from leggend.services.sync_service import SyncService
|
||||
from leggend.services.notification_service import NotificationService
|
||||
from leggen.services.notification_service import NotificationService
|
||||
from leggen.services.sync_service import SyncService
|
||||
from leggen.utils.config import config
|
||||
|
||||
|
||||
class BackgroundScheduler:
|
||||
@@ -1,7 +1,7 @@
|
||||
import click
|
||||
|
||||
from leggen.api_client import LeggenAPIClient
|
||||
from leggen.main import cli
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import datefmt, print_table
|
||||
|
||||
|
||||
@@ -11,12 +11,12 @@ def balances(ctx: click.Context):
|
||||
"""
|
||||
List balances of all connected accounts
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
api_client = LeggenAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
# Check if leggend service is available
|
||||
# Check if leggen server is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
"Error: Cannot connect to leggen server. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import click
|
||||
|
||||
from leggen.api_client import LeggenAPIClient
|
||||
from leggen.main import cli
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.disk import save_file
|
||||
from leggen.utils.text import info, print_table, warning, success
|
||||
from leggen.utils.text import info, print_table, success, warning
|
||||
|
||||
|
||||
@cli.command()
|
||||
@@ -12,12 +12,12 @@ def add(ctx):
|
||||
"""
|
||||
Connect to a bank
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
api_client = LeggenAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
# Check if leggend service is available
|
||||
# Check if leggen server is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
"Error: Cannot connect to leggen server. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
"""Generate sample database command."""
|
||||
|
||||
import click
|
||||
from pathlib import Path
|
||||
|
||||
from leggen.utils.paths import path_manager
|
||||
import click
|
||||
|
||||
|
||||
@click.command()
|
||||
@@ -30,29 +29,33 @@ from leggen.utils.paths import path_manager
|
||||
help="Overwrite existing database without confirmation",
|
||||
)
|
||||
@click.pass_context
|
||||
def generate_sample_db(ctx: click.Context, database: Path, accounts: int, transactions: int, force: bool):
|
||||
def generate_sample_db(
|
||||
ctx: click.Context, database: Path, accounts: int, transactions: int, force: bool
|
||||
):
|
||||
"""Generate a sample database with realistic financial data for testing."""
|
||||
|
||||
|
||||
# Import here to avoid circular imports
|
||||
import sys
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path as PathlibPath
|
||||
|
||||
|
||||
# Get the script path
|
||||
script_path = PathlibPath(__file__).parent.parent.parent / "scripts" / "generate_sample_db.py"
|
||||
|
||||
script_path = (
|
||||
PathlibPath(__file__).parent.parent.parent / "scripts" / "generate_sample_db.py"
|
||||
)
|
||||
|
||||
# Build command arguments
|
||||
cmd = [sys.executable, str(script_path)]
|
||||
|
||||
|
||||
if database:
|
||||
cmd.extend(["--database", str(database)])
|
||||
|
||||
|
||||
cmd.extend(["--accounts", str(accounts)])
|
||||
cmd.extend(["--transactions", str(transactions)])
|
||||
|
||||
|
||||
if force:
|
||||
cmd.append("--force")
|
||||
|
||||
|
||||
# Execute the script
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
@@ -62,4 +65,4 @@ def generate_sample_db(ctx: click.Context, database: Path, accounts: int, transa
|
||||
|
||||
|
||||
# Export the command
|
||||
generate_sample_db = generate_sample_db
|
||||
generate_sample_db = generate_sample_db
|
||||
|
||||
@@ -1,20 +1,22 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from importlib import metadata
|
||||
|
||||
import click
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.routes import banks, accounts, sync, notifications, transactions
|
||||
from leggend.background.scheduler import scheduler
|
||||
from leggend.config import config
|
||||
from leggen.api.routes import accounts, banks, notifications, sync, transactions
|
||||
from leggen.background.scheduler import scheduler
|
||||
from leggen.utils.config import config
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Startup
|
||||
logger.info("Starting leggend service...")
|
||||
logger.info("Starting leggen server...")
|
||||
|
||||
# Load configuration
|
||||
try:
|
||||
@@ -26,7 +28,7 @@ async def lifespan(app: FastAPI):
|
||||
|
||||
# Run database migrations
|
||||
try:
|
||||
from leggend.services.database_service import DatabaseService
|
||||
from leggen.services.database_service import DatabaseService
|
||||
|
||||
db_service = DatabaseService()
|
||||
await db_service.run_migrations_if_needed()
|
||||
@@ -42,7 +44,7 @@ async def lifespan(app: FastAPI):
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down leggend service...")
|
||||
logger.info("Shutting down leggen server...")
|
||||
scheduler.shutdown()
|
||||
|
||||
|
||||
@@ -54,7 +56,7 @@ def create_app() -> FastAPI:
|
||||
version = "unknown"
|
||||
|
||||
app = FastAPI(
|
||||
title="Leggend API",
|
||||
title="Leggen API",
|
||||
description="Open Banking API for Leggen",
|
||||
version=version,
|
||||
lifespan=lifespan,
|
||||
@@ -87,13 +89,13 @@ def create_app() -> FastAPI:
|
||||
version = metadata.version("leggen")
|
||||
except metadata.PackageNotFoundError:
|
||||
version = "unknown"
|
||||
return {"message": "Leggend API is running", "version": version}
|
||||
return {"message": "Leggen API is running", "version": version}
|
||||
|
||||
@app.get("/api/v1/health")
|
||||
async def health():
|
||||
"""Health check endpoint for API connectivity"""
|
||||
try:
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggen.api.models.common import APIResponse
|
||||
|
||||
config_loaded = config._config is not None
|
||||
|
||||
@@ -108,7 +110,7 @@ def create_app() -> FastAPI:
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed: {e}")
|
||||
from leggend.api.models.common import APIResponse
|
||||
from leggen.api.models.common import APIResponse
|
||||
|
||||
return APIResponse(
|
||||
success=False,
|
||||
@@ -119,61 +121,58 @@ def create_app() -> FastAPI:
|
||||
return app
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from leggen.utils.paths import path_manager
|
||||
@click.command()
|
||||
@click.option(
|
||||
"--reload",
|
||||
is_flag=True,
|
||||
help="Enable auto-reload for development",
|
||||
)
|
||||
@click.option(
|
||||
"--host",
|
||||
default="0.0.0.0",
|
||||
help="Host to bind to (default: 0.0.0.0)",
|
||||
)
|
||||
@click.option(
|
||||
"--port",
|
||||
type=int,
|
||||
default=8000,
|
||||
help="Port to bind to (default: 8000)",
|
||||
)
|
||||
@click.pass_context
|
||||
def server(ctx: click.Context, reload: bool, host: str, port: int):
|
||||
"""Start the Leggen API server"""
|
||||
|
||||
parser = argparse.ArgumentParser(description="Start the Leggend API service")
|
||||
parser.add_argument(
|
||||
"--reload", action="store_true", help="Enable auto-reload for development"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host", default="0.0.0.0", help="Host to bind to (default: 0.0.0.0)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port", type=int, default=8000, help="Port to bind to (default: 8000)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config-dir",
|
||||
type=Path,
|
||||
help="Directory containing configuration files (default: ~/.config/leggen)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--database",
|
||||
type=Path,
|
||||
help="Path to SQLite database file (default: <config-dir>/leggen.db)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
# Get config_dir and database from main CLI context
|
||||
config_dir = None
|
||||
database = None
|
||||
if ctx.parent:
|
||||
config_dir = ctx.parent.params.get("config_dir")
|
||||
database = ctx.parent.params.get("database")
|
||||
|
||||
# Set up path manager with user-provided paths
|
||||
if args.config_dir:
|
||||
path_manager.set_config_dir(args.config_dir)
|
||||
if args.database:
|
||||
path_manager.set_database_path(args.database)
|
||||
if config_dir:
|
||||
path_manager.set_config_dir(config_dir)
|
||||
if database:
|
||||
path_manager.set_database_path(database)
|
||||
|
||||
if args.reload:
|
||||
if reload:
|
||||
# Use string import for reload to work properly
|
||||
uvicorn.run(
|
||||
"leggend.main:create_app",
|
||||
"leggen.commands.server:create_app",
|
||||
factory=True,
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
host=host,
|
||||
port=port,
|
||||
log_level="info",
|
||||
access_log=True,
|
||||
reload=True,
|
||||
reload_dirs=["leggend", "leggen"], # Watch both directories
|
||||
reload_dirs=["leggen"], # Watch leggen directory
|
||||
)
|
||||
else:
|
||||
app = create_app()
|
||||
uvicorn.run(
|
||||
app,
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
host=host,
|
||||
port=port,
|
||||
log_level="info",
|
||||
access_log=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,7 +1,7 @@
|
||||
import click
|
||||
|
||||
from leggen.api_client import LeggenAPIClient
|
||||
from leggen.main import cli
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import datefmt, echo, info, print_table
|
||||
|
||||
|
||||
@@ -11,12 +11,12 @@ def status(ctx: click.Context):
|
||||
"""
|
||||
List all connected banks and their status
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
api_client = LeggenAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
# Check if leggend service is available
|
||||
# Check if leggen server is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
"Error: Cannot connect to leggen server. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import click
|
||||
|
||||
from leggen.api_client import LeggenAPIClient
|
||||
from leggen.main import cli
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import error, info, success
|
||||
|
||||
|
||||
@@ -13,11 +13,11 @@ def sync(ctx: click.Context, wait: bool, force: bool):
|
||||
"""
|
||||
Sync all transactions with database
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
api_client = LeggenAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
# Check if leggend service is available
|
||||
# Check if leggen server is available
|
||||
if not api_client.health_check():
|
||||
error("Cannot connect to leggend service. Please ensure it's running.")
|
||||
error("Cannot connect to leggen server. Please ensure it's running.")
|
||||
return
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import click
|
||||
|
||||
from leggen.api_client import LeggenAPIClient
|
||||
from leggen.main import cli
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.utils.text import datefmt, info, print_table
|
||||
|
||||
|
||||
@@ -20,12 +20,12 @@ def transactions(ctx: click.Context, account: str, limit: int, full: bool):
|
||||
|
||||
If the --account option is used, it will only list transactions for that account.
|
||||
"""
|
||||
api_client = LeggendAPIClient(ctx.obj.get("api_url"))
|
||||
api_client = LeggenAPIClient(ctx.obj.get("api_url"))
|
||||
|
||||
# Check if leggend service is available
|
||||
# Check if leggen server is available
|
||||
if not api_client.health_check():
|
||||
click.echo(
|
||||
"Error: Cannot connect to leggend service. Please ensure it's running."
|
||||
"Error: Cannot connect to leggen server. Please ensure it's running."
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
@@ -1,522 +0,0 @@
|
||||
import json
|
||||
import sqlite3
|
||||
from sqlite3 import IntegrityError
|
||||
|
||||
import click
|
||||
|
||||
from leggen.utils.text import success, warning
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
|
||||
def persist_balances(ctx: click.Context, balance: dict):
|
||||
# Connect to SQLite database
|
||||
db_path = path_manager.get_database_path()
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the accounts table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS accounts (
|
||||
id TEXT PRIMARY KEY,
|
||||
institution_id TEXT,
|
||||
status TEXT,
|
||||
iban TEXT,
|
||||
name TEXT,
|
||||
currency TEXT,
|
||||
created DATETIME,
|
||||
last_accessed DATETIME,
|
||||
last_updated DATETIME
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for accounts table
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_accounts_institution_id
|
||||
ON accounts(institution_id)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_accounts_status
|
||||
ON accounts(status)"""
|
||||
)
|
||||
|
||||
# Create the balances table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS balances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
account_id TEXT,
|
||||
bank TEXT,
|
||||
status TEXT,
|
||||
iban TEXT,
|
||||
amount REAL,
|
||||
currency TEXT,
|
||||
type TEXT,
|
||||
timestamp DATETIME
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_account_id
|
||||
ON balances(account_id)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_timestamp
|
||||
ON balances(timestamp)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp
|
||||
ON balances(account_id, type, timestamp)"""
|
||||
)
|
||||
|
||||
# Insert balance into SQLite database
|
||||
try:
|
||||
cursor.execute(
|
||||
"""INSERT INTO balances (
|
||||
account_id,
|
||||
bank,
|
||||
status,
|
||||
iban,
|
||||
amount,
|
||||
currency,
|
||||
type,
|
||||
timestamp
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
balance["account_id"],
|
||||
balance["bank"],
|
||||
balance["status"],
|
||||
balance["iban"],
|
||||
balance["amount"],
|
||||
balance["currency"],
|
||||
balance["type"],
|
||||
balance["timestamp"],
|
||||
),
|
||||
)
|
||||
except IntegrityError:
|
||||
warning(f"[{balance['account_id']}] Skipped duplicate balance")
|
||||
|
||||
# Commit changes and close the connection
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
success(f"[{balance['account_id']}] Inserted balance of type {balance['type']}")
|
||||
|
||||
return balance
|
||||
|
||||
|
||||
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||
# Connect to SQLite database
|
||||
db_path = path_manager.get_database_path()
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the transactions table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS transactions (
|
||||
accountId TEXT NOT NULL,
|
||||
transactionId TEXT NOT NULL,
|
||||
internalTransactionId TEXT,
|
||||
institutionId TEXT,
|
||||
iban TEXT,
|
||||
transactionDate DATETIME,
|
||||
description TEXT,
|
||||
transactionValue REAL,
|
||||
transactionCurrency TEXT,
|
||||
transactionStatus TEXT,
|
||||
rawTransaction JSON,
|
||||
PRIMARY KEY (accountId, transactionId)
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_internal_id
|
||||
ON transactions(internalTransactionId)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_date
|
||||
ON transactions(transactionDate)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_account_date
|
||||
ON transactions(accountId, transactionDate)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_transactions_amount
|
||||
ON transactions(transactionValue)"""
|
||||
)
|
||||
|
||||
# Insert transactions into SQLite database
|
||||
duplicates_count = 0
|
||||
|
||||
# Prepare an SQL statement for inserting data
|
||||
insert_sql = """INSERT OR REPLACE INTO transactions (
|
||||
accountId,
|
||||
transactionId,
|
||||
internalTransactionId,
|
||||
institutionId,
|
||||
iban,
|
||||
transactionDate,
|
||||
description,
|
||||
transactionValue,
|
||||
transactionCurrency,
|
||||
transactionStatus,
|
||||
rawTransaction
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
|
||||
|
||||
new_transactions = []
|
||||
|
||||
for transaction in transactions:
|
||||
try:
|
||||
cursor.execute(
|
||||
insert_sql,
|
||||
(
|
||||
transaction["accountId"],
|
||||
transaction["transactionId"],
|
||||
transaction.get("internalTransactionId"),
|
||||
transaction["institutionId"],
|
||||
transaction["iban"],
|
||||
transaction["transactionDate"],
|
||||
transaction["description"],
|
||||
transaction["transactionValue"],
|
||||
transaction["transactionCurrency"],
|
||||
transaction["transactionStatus"],
|
||||
json.dumps(transaction["rawTransaction"]),
|
||||
),
|
||||
)
|
||||
new_transactions.append(transaction)
|
||||
except IntegrityError:
|
||||
# A transaction with the same ID already exists, indicating a duplicate
|
||||
duplicates_count += 1
|
||||
|
||||
# Commit changes and close the connection
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
success(f"[{account}] Inserted {len(new_transactions)} new transactions")
|
||||
if duplicates_count:
|
||||
warning(f"[{account}] Skipped {duplicates_count} duplicate transactions")
|
||||
|
||||
return new_transactions
|
||||
|
||||
|
||||
def get_transactions(
|
||||
account_id=None,
|
||||
limit=100,
|
||||
offset=0,
|
||||
date_from=None,
|
||||
date_to=None,
|
||||
min_amount=None,
|
||||
max_amount=None,
|
||||
search=None,
|
||||
):
|
||||
"""Get transactions from SQLite database with optional filtering"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row # Enable dict-like access
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build query with filters
|
||||
query = "SELECT * FROM transactions WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND accountId = ?"
|
||||
params.append(account_id)
|
||||
|
||||
if date_from:
|
||||
query += " AND transactionDate >= ?"
|
||||
params.append(date_from)
|
||||
|
||||
if date_to:
|
||||
query += " AND transactionDate <= ?"
|
||||
params.append(date_to)
|
||||
|
||||
if min_amount is not None:
|
||||
query += " AND transactionValue >= ?"
|
||||
params.append(min_amount)
|
||||
|
||||
if max_amount is not None:
|
||||
query += " AND transactionValue <= ?"
|
||||
params.append(max_amount)
|
||||
|
||||
if search:
|
||||
query += " AND description LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
# Add ordering and pagination
|
||||
query += " ORDER BY transactionDate DESC"
|
||||
|
||||
if limit:
|
||||
query += " LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
if offset:
|
||||
query += " OFFSET ?"
|
||||
params.append(offset)
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Convert to list of dicts and parse JSON fields
|
||||
transactions = []
|
||||
for row in rows:
|
||||
transaction = dict(row)
|
||||
if transaction["rawTransaction"]:
|
||||
transaction["rawTransaction"] = json.loads(
|
||||
transaction["rawTransaction"]
|
||||
)
|
||||
transactions.append(transaction)
|
||||
|
||||
conn.close()
|
||||
return transactions
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_balances(account_id=None):
|
||||
"""Get latest balances from SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get latest balance for each account_id and type combination
|
||||
query = """
|
||||
SELECT * FROM balances b1
|
||||
WHERE b1.timestamp = (
|
||||
SELECT MAX(b2.timestamp)
|
||||
FROM balances b2
|
||||
WHERE b2.account_id = b1.account_id AND b2.type = b1.type
|
||||
)
|
||||
"""
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND b1.account_id = ?"
|
||||
params.append(account_id)
|
||||
|
||||
query += " ORDER BY b1.account_id, b1.type"
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
balances = [dict(row) for row in rows]
|
||||
conn.close()
|
||||
return balances
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_account_summary(account_id):
|
||||
"""Get basic account info from transactions table (avoids GoCardless API call)"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return None
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Get account info from most recent transaction
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT DISTINCT accountId, institutionId, iban
|
||||
FROM transactions
|
||||
WHERE accountId = ?
|
||||
ORDER BY transactionDate DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(account_id,),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_transaction_count(account_id=None, **filters):
|
||||
"""Get total count of transactions matching filters"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT COUNT(*) FROM transactions WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND accountId = ?"
|
||||
params.append(account_id)
|
||||
|
||||
# Add same filters as get_transactions
|
||||
if filters.get("date_from"):
|
||||
query += " AND transactionDate >= ?"
|
||||
params.append(filters["date_from"])
|
||||
|
||||
if filters.get("date_to"):
|
||||
query += " AND transactionDate <= ?"
|
||||
params.append(filters["date_to"])
|
||||
|
||||
if filters.get("min_amount") is not None:
|
||||
query += " AND transactionValue >= ?"
|
||||
params.append(filters["min_amount"])
|
||||
|
||||
if filters.get("max_amount") is not None:
|
||||
query += " AND transactionValue <= ?"
|
||||
params.append(filters["max_amount"])
|
||||
|
||||
if filters.get("search"):
|
||||
query += " AND description LIKE ?"
|
||||
params.append(f"%{filters['search']}%")
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
count = cursor.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def persist_account(account_data: dict):
|
||||
"""Persist account details to SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the accounts table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS accounts (
|
||||
id TEXT PRIMARY KEY,
|
||||
institution_id TEXT,
|
||||
status TEXT,
|
||||
iban TEXT,
|
||||
name TEXT,
|
||||
currency TEXT,
|
||||
created DATETIME,
|
||||
last_accessed DATETIME,
|
||||
last_updated DATETIME
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for accounts table
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_accounts_institution_id
|
||||
ON accounts(institution_id)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_accounts_status
|
||||
ON accounts(status)"""
|
||||
)
|
||||
|
||||
try:
|
||||
# Insert or replace account data
|
||||
cursor.execute(
|
||||
"""INSERT OR REPLACE INTO accounts (
|
||||
id,
|
||||
institution_id,
|
||||
status,
|
||||
iban,
|
||||
name,
|
||||
currency,
|
||||
created,
|
||||
last_accessed,
|
||||
last_updated
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
account_data["id"],
|
||||
account_data["institution_id"],
|
||||
account_data["status"],
|
||||
account_data.get("iban"),
|
||||
account_data.get("name"),
|
||||
account_data.get("currency"),
|
||||
account_data["created"],
|
||||
account_data.get("last_accessed"),
|
||||
account_data.get("last_updated", account_data["created"]),
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
success(f"[{account_data['id']}] Account details persisted to database")
|
||||
return account_data
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_accounts(account_ids=None):
|
||||
"""Get account details from SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT * FROM accounts"
|
||||
params = []
|
||||
|
||||
if account_ids:
|
||||
placeholders = ",".join("?" * len(account_ids))
|
||||
query += f" WHERE id IN ({placeholders})"
|
||||
params.extend(account_ids)
|
||||
|
||||
query += " ORDER BY created DESC"
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
accounts = [dict(row) for row in rows]
|
||||
conn.close()
|
||||
return accounts
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
|
||||
def get_account(account_id: str):
|
||||
"""Get specific account details from SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return None
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("SELECT * FROM accounts WHERE id = ?", (account_id,))
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
@@ -6,8 +6,8 @@ from pathlib import Path
|
||||
import click
|
||||
|
||||
from leggen.utils.config import load_config
|
||||
from leggen.utils.text import error
|
||||
from leggen.utils.paths import path_manager
|
||||
from leggen.utils.text import error
|
||||
|
||||
cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "commands"))
|
||||
|
||||
@@ -105,9 +105,9 @@ class Group(click.Group):
|
||||
"--api-url",
|
||||
type=str,
|
||||
default="http://localhost:8000",
|
||||
envvar="LEGGEND_API_URL",
|
||||
envvar="LEGGEN_API_URL",
|
||||
show_envvar=True,
|
||||
help="URL of the leggend API service",
|
||||
help="URL of the leggen API service",
|
||||
)
|
||||
@click.group(
|
||||
cls=Group,
|
||||
|
||||
65
leggen/models/config.py
Normal file
65
leggen/models/config.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class GoCardlessConfig(BaseModel):
|
||||
key: str = Field(..., description="GoCardless API key")
|
||||
secret: str = Field(..., description="GoCardless API secret")
|
||||
url: str = Field(
|
||||
default="https://bankaccountdata.gocardless.com/api/v2",
|
||||
description="GoCardless API URL",
|
||||
)
|
||||
|
||||
|
||||
class DatabaseConfig(BaseModel):
|
||||
sqlite: bool = Field(default=True, description="Enable SQLite database")
|
||||
|
||||
|
||||
class DiscordNotificationConfig(BaseModel):
|
||||
webhook: str = Field(..., description="Discord webhook URL")
|
||||
enabled: bool = Field(default=True, description="Enable Discord notifications")
|
||||
|
||||
|
||||
class TelegramNotificationConfig(BaseModel):
|
||||
token: str = Field(..., alias="api-key", description="Telegram bot token")
|
||||
chat_id: int = Field(..., alias="chat-id", description="Telegram chat ID")
|
||||
enabled: bool = Field(default=True, description="Enable Telegram notifications")
|
||||
|
||||
|
||||
class NotificationConfig(BaseModel):
|
||||
discord: Optional[DiscordNotificationConfig] = None
|
||||
telegram: Optional[TelegramNotificationConfig] = None
|
||||
|
||||
|
||||
class FilterConfig(BaseModel):
|
||||
case_insensitive: Optional[List[str]] = Field(
|
||||
default_factory=list, alias="case-insensitive"
|
||||
)
|
||||
case_sensitive: Optional[List[str]] = Field(
|
||||
default_factory=list, alias="case-sensitive"
|
||||
)
|
||||
|
||||
|
||||
class SyncScheduleConfig(BaseModel):
|
||||
enabled: bool = Field(default=True, description="Enable sync scheduling")
|
||||
hour: int = Field(default=3, ge=0, le=23, description="Hour to run sync (0-23)")
|
||||
minute: int = Field(default=0, ge=0, le=59, description="Minute to run sync (0-59)")
|
||||
cron: Optional[str] = Field(
|
||||
default=None, description="Custom cron expression (overrides hour/minute)"
|
||||
)
|
||||
|
||||
|
||||
class SchedulerConfig(BaseModel):
|
||||
sync: SyncScheduleConfig = Field(default_factory=SyncScheduleConfig)
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
gocardless: GoCardlessConfig
|
||||
database: DatabaseConfig = Field(default_factory=DatabaseConfig)
|
||||
notifications: Optional[NotificationConfig] = None
|
||||
filters: Optional[FilterConfig] = None
|
||||
scheduler: SchedulerConfig = Field(default_factory=SchedulerConfig)
|
||||
|
||||
class Config:
|
||||
validate_by_name = True
|
||||
@@ -1,11 +1,13 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Optional
|
||||
import json
|
||||
import sqlite3
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
import leggen.database.sqlite as sqlite_db
|
||||
from leggen.services.transaction_processor import TransactionProcessor
|
||||
from leggen.utils.config import config
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
|
||||
@@ -13,6 +15,7 @@ class DatabaseService:
|
||||
def __init__(self):
|
||||
self.db_config = config.database_config
|
||||
self.sqlite_enabled = self.db_config.get("sqlite", True)
|
||||
self.transaction_processor = TransactionProcessor()
|
||||
|
||||
async def persist_balance(
|
||||
self, account_id: str, balance_data: Dict[str, Any]
|
||||
@@ -41,84 +44,14 @@ class DatabaseService:
|
||||
transaction_data: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Process raw transaction data into standardized format"""
|
||||
transactions = []
|
||||
|
||||
# Process booked transactions
|
||||
for transaction in transaction_data.get("transactions", {}).get("booked", []):
|
||||
processed = self._process_single_transaction(
|
||||
account_id, account_info, transaction, "booked"
|
||||
)
|
||||
transactions.append(processed)
|
||||
|
||||
# Process pending transactions
|
||||
for transaction in transaction_data.get("transactions", {}).get("pending", []):
|
||||
processed = self._process_single_transaction(
|
||||
account_id, account_info, transaction, "pending"
|
||||
)
|
||||
transactions.append(processed)
|
||||
|
||||
return transactions
|
||||
|
||||
def _process_single_transaction(
|
||||
self,
|
||||
account_id: str,
|
||||
account_info: Dict[str, Any],
|
||||
transaction: Dict[str, Any],
|
||||
status: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""Process a single transaction into standardized format"""
|
||||
# Extract dates
|
||||
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||
"bookingDate"
|
||||
return self.transaction_processor.process_transactions(
|
||||
account_id, account_info, transaction_data
|
||||
)
|
||||
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||
|
||||
if booked_date and value_date:
|
||||
min_date = min(
|
||||
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||
)
|
||||
else:
|
||||
date_str = booked_date or value_date
|
||||
if not date_str:
|
||||
raise ValueError("No valid date found in transaction")
|
||||
min_date = datetime.fromisoformat(date_str)
|
||||
|
||||
# Extract amount and currency
|
||||
transaction_amount = transaction.get("transactionAmount", {})
|
||||
amount = float(transaction_amount.get("amount", 0))
|
||||
currency = transaction_amount.get("currency", "")
|
||||
|
||||
# Extract description
|
||||
description = transaction.get(
|
||||
"remittanceInformationUnstructured",
|
||||
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||
)
|
||||
|
||||
# Extract transaction IDs - transactionId is now primary, internalTransactionId is reference
|
||||
transaction_id = transaction.get("transactionId")
|
||||
internal_transaction_id = transaction.get("internalTransactionId")
|
||||
|
||||
if not transaction_id:
|
||||
raise ValueError("Transaction missing required transactionId field")
|
||||
|
||||
return {
|
||||
"accountId": account_id,
|
||||
"transactionId": transaction_id,
|
||||
"internalTransactionId": internal_transaction_id,
|
||||
"institutionId": account_info["institution_id"],
|
||||
"iban": account_info.get("iban", "N/A"),
|
||||
"transactionDate": min_date,
|
||||
"description": description,
|
||||
"transactionValue": amount,
|
||||
"transactionCurrency": currency,
|
||||
"transactionStatus": status,
|
||||
"rawTransaction": transaction,
|
||||
}
|
||||
|
||||
async def get_transactions_from_db(
|
||||
self,
|
||||
account_id: Optional[str] = None,
|
||||
limit: Optional[int] = 100,
|
||||
limit: Optional[int] = None, # None means no limit, used for stats
|
||||
offset: Optional[int] = 0,
|
||||
date_from: Optional[str] = None,
|
||||
date_to: Optional[str] = None,
|
||||
@@ -132,9 +65,9 @@ class DatabaseService:
|
||||
return []
|
||||
|
||||
try:
|
||||
transactions = sqlite_db.get_transactions(
|
||||
transactions = self._get_transactions(
|
||||
account_id=account_id,
|
||||
limit=limit or 100,
|
||||
limit=limit, # Pass limit as-is, None means no limit
|
||||
offset=offset or 0,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
@@ -172,7 +105,7 @@ class DatabaseService:
|
||||
# Remove None values
|
||||
filters = {k: v for k, v in filters.items() if v is not None}
|
||||
|
||||
count = sqlite_db.get_transaction_count(account_id=account_id, **filters)
|
||||
count = self._get_transaction_count(account_id=account_id, **filters)
|
||||
logger.debug(f"Total transaction count: {count}")
|
||||
return count
|
||||
except Exception as e:
|
||||
@@ -188,13 +121,31 @@ class DatabaseService:
|
||||
return []
|
||||
|
||||
try:
|
||||
balances = sqlite_db.get_balances(account_id=account_id)
|
||||
balances = self._get_balances(account_id=account_id)
|
||||
logger.debug(f"Retrieved {len(balances)} balances from database")
|
||||
return balances
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get balances from database: {e}")
|
||||
return []
|
||||
|
||||
async def get_historical_balances_from_db(
|
||||
self, account_id: Optional[str] = None, days: int = 365
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get historical balance progression from SQLite database"""
|
||||
if not self.sqlite_enabled:
|
||||
logger.warning("SQLite database disabled, cannot read historical balances")
|
||||
return []
|
||||
|
||||
try:
|
||||
balances = self._get_historical_balances(account_id=account_id, days=days)
|
||||
logger.debug(
|
||||
f"Retrieved {len(balances)} historical balance points from database"
|
||||
)
|
||||
return balances
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get historical balances from database: {e}")
|
||||
return []
|
||||
|
||||
async def get_account_summary_from_db(
|
||||
self, account_id: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
@@ -203,7 +154,7 @@ class DatabaseService:
|
||||
return None
|
||||
|
||||
try:
|
||||
summary = sqlite_db.get_account_summary(account_id)
|
||||
summary = self._get_account_summary(account_id)
|
||||
if summary:
|
||||
logger.debug(
|
||||
f"Retrieved account summary from database for {account_id}"
|
||||
@@ -230,7 +181,7 @@ class DatabaseService:
|
||||
return []
|
||||
|
||||
try:
|
||||
accounts = sqlite_db.get_accounts(account_ids=account_ids)
|
||||
accounts = self._get_accounts(account_ids=account_ids)
|
||||
logger.debug(f"Retrieved {len(accounts)} accounts from database")
|
||||
return accounts
|
||||
except Exception as e:
|
||||
@@ -246,7 +197,7 @@ class DatabaseService:
|
||||
return None
|
||||
|
||||
try:
|
||||
account = sqlite_db.get_account(account_id)
|
||||
account = self._get_account(account_id)
|
||||
if account:
|
||||
logger.debug(
|
||||
f"Retrieved account details from database for {account_id}"
|
||||
@@ -424,7 +375,7 @@ class DatabaseService:
|
||||
async def _migrate_null_transaction_ids(self):
|
||||
"""Populate null internalTransactionId fields using transactionId from raw data"""
|
||||
import uuid
|
||||
|
||||
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
logger.warning("Database file not found, skipping migration")
|
||||
@@ -770,8 +721,8 @@ class DatabaseService:
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Persist transactions to SQLite"""
|
||||
try:
|
||||
import sqlite3
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
db_path = path_manager.get_database_path()
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
@@ -873,7 +824,7 @@ class DatabaseService:
|
||||
"""Persist account details to SQLite"""
|
||||
try:
|
||||
# Use the sqlite_db module function
|
||||
sqlite_db.persist_account(account_data)
|
||||
self._persist_account(account_data)
|
||||
|
||||
logger.info(
|
||||
f"Persisted account details to SQLite for account {account_data['id']}"
|
||||
@@ -881,3 +832,453 @@ class DatabaseService:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to persist account details to SQLite: {e}")
|
||||
raise
|
||||
|
||||
def _get_transactions(
|
||||
self,
|
||||
account_id=None,
|
||||
limit=100,
|
||||
offset=0,
|
||||
date_from=None,
|
||||
date_to=None,
|
||||
min_amount=None,
|
||||
max_amount=None,
|
||||
search=None,
|
||||
):
|
||||
"""Get transactions from SQLite database with optional filtering"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row # Enable dict-like access
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Build query with filters
|
||||
query = "SELECT * FROM transactions WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND accountId = ?"
|
||||
params.append(account_id)
|
||||
|
||||
if date_from:
|
||||
query += " AND transactionDate >= ?"
|
||||
params.append(date_from)
|
||||
|
||||
if date_to:
|
||||
query += " AND transactionDate <= ?"
|
||||
params.append(date_to)
|
||||
|
||||
if min_amount is not None:
|
||||
query += " AND transactionValue >= ?"
|
||||
params.append(min_amount)
|
||||
|
||||
if max_amount is not None:
|
||||
query += " AND transactionValue <= ?"
|
||||
params.append(max_amount)
|
||||
|
||||
if search:
|
||||
query += " AND description LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
# Add ordering and pagination
|
||||
query += " ORDER BY transactionDate DESC"
|
||||
|
||||
if limit:
|
||||
query += " LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
if offset:
|
||||
query += " OFFSET ?"
|
||||
params.append(offset)
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Convert to list of dicts and parse JSON fields
|
||||
transactions = []
|
||||
for row in rows:
|
||||
transaction = dict(row)
|
||||
if transaction["rawTransaction"]:
|
||||
transaction["rawTransaction"] = json.loads(
|
||||
transaction["rawTransaction"]
|
||||
)
|
||||
transactions.append(transaction)
|
||||
|
||||
conn.close()
|
||||
return transactions
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _get_balances(self, account_id=None):
|
||||
"""Get latest balances from SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get latest balance for each account_id and type combination
|
||||
query = """
|
||||
SELECT * FROM balances b1
|
||||
WHERE b1.timestamp = (
|
||||
SELECT MAX(b2.timestamp)
|
||||
FROM balances b2
|
||||
WHERE b2.account_id = b1.account_id AND b2.type = b1.type
|
||||
)
|
||||
"""
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND b1.account_id = ?"
|
||||
params.append(account_id)
|
||||
|
||||
query += " ORDER BY b1.account_id, b1.type"
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
balances = [dict(row) for row in rows]
|
||||
conn.close()
|
||||
return balances
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _get_account_summary(self, account_id):
|
||||
"""Get basic account info from transactions table (avoids GoCardless API call)"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return None
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Get account info from most recent transaction
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT DISTINCT accountId, institutionId, iban
|
||||
FROM transactions
|
||||
WHERE accountId = ?
|
||||
ORDER BY transactionDate DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(account_id,),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _get_transaction_count(self, account_id=None, **filters):
|
||||
"""Get total count of transactions matching filters"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return 0
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT COUNT(*) FROM transactions WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
query += " AND accountId = ?"
|
||||
params.append(account_id)
|
||||
|
||||
# Add same filters as get_transactions
|
||||
if filters.get("date_from"):
|
||||
query += " AND transactionDate >= ?"
|
||||
params.append(filters["date_from"])
|
||||
|
||||
if filters.get("date_to"):
|
||||
query += " AND transactionDate <= ?"
|
||||
params.append(filters["date_to"])
|
||||
|
||||
if filters.get("min_amount") is not None:
|
||||
query += " AND transactionValue >= ?"
|
||||
params.append(filters["min_amount"])
|
||||
|
||||
if filters.get("max_amount") is not None:
|
||||
query += " AND transactionValue <= ?"
|
||||
params.append(filters["max_amount"])
|
||||
|
||||
if filters.get("search"):
|
||||
query += " AND description LIKE ?"
|
||||
params.append(f"%{filters['search']}%")
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
count = cursor.fetchone()[0]
|
||||
conn.close()
|
||||
return count
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _persist_account(self, account_data: dict):
|
||||
"""Persist account details to SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create the accounts table if it doesn't exist
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS accounts (
|
||||
id TEXT PRIMARY KEY,
|
||||
institution_id TEXT,
|
||||
status TEXT,
|
||||
iban TEXT,
|
||||
name TEXT,
|
||||
currency TEXT,
|
||||
created DATETIME,
|
||||
last_accessed DATETIME,
|
||||
last_updated DATETIME
|
||||
)"""
|
||||
)
|
||||
|
||||
# Create indexes for accounts table
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_accounts_institution_id
|
||||
ON accounts(institution_id)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE INDEX IF NOT EXISTS idx_accounts_status
|
||||
ON accounts(status)"""
|
||||
)
|
||||
|
||||
try:
|
||||
# Insert or replace account data
|
||||
cursor.execute(
|
||||
"""INSERT OR REPLACE INTO accounts (
|
||||
id,
|
||||
institution_id,
|
||||
status,
|
||||
iban,
|
||||
name,
|
||||
currency,
|
||||
created,
|
||||
last_accessed,
|
||||
last_updated
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
|
||||
(
|
||||
account_data["id"],
|
||||
account_data["institution_id"],
|
||||
account_data["status"],
|
||||
account_data.get("iban"),
|
||||
account_data.get("name"),
|
||||
account_data.get("currency"),
|
||||
account_data["created"],
|
||||
account_data.get("last_accessed"),
|
||||
account_data.get("last_updated", account_data["created"]),
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return account_data
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _get_accounts(self, account_ids=None):
|
||||
"""Get account details from SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT * FROM accounts"
|
||||
params = []
|
||||
|
||||
if account_ids:
|
||||
placeholders = ",".join("?" * len(account_ids))
|
||||
query += f" WHERE id IN ({placeholders})"
|
||||
params.extend(account_ids)
|
||||
|
||||
query += " ORDER BY created DESC"
|
||||
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
accounts = [dict(row) for row in rows]
|
||||
conn.close()
|
||||
return accounts
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _get_account(self, account_id: str):
|
||||
"""Get specific account details from SQLite database"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return None
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("SELECT * FROM accounts WHERE id = ?", (account_id,))
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
|
||||
def _get_historical_balances(self, account_id=None, days=365):
|
||||
"""Get historical balance progression based on transaction history"""
|
||||
db_path = path_manager.get_database_path()
|
||||
if not db_path.exists():
|
||||
return []
|
||||
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Get current balance for each account/type to use as the final balance
|
||||
current_balances_query = """
|
||||
SELECT account_id, type, amount, currency
|
||||
FROM balances b1
|
||||
WHERE b1.timestamp = (
|
||||
SELECT MAX(b2.timestamp)
|
||||
FROM balances b2
|
||||
WHERE b2.account_id = b1.account_id AND b2.type = b1.type
|
||||
)
|
||||
"""
|
||||
params = []
|
||||
|
||||
if account_id:
|
||||
current_balances_query += " AND b1.account_id = ?"
|
||||
params.append(account_id)
|
||||
|
||||
cursor.execute(current_balances_query, params)
|
||||
current_balances = {
|
||||
(row["account_id"], row["type"]): {
|
||||
"amount": row["amount"],
|
||||
"currency": row["currency"],
|
||||
}
|
||||
for row in cursor.fetchall()
|
||||
}
|
||||
|
||||
# Get transactions for the specified period, ordered by date descending
|
||||
cutoff_date = (datetime.now() - timedelta(days=days)).isoformat()
|
||||
|
||||
transactions_query = """
|
||||
SELECT accountId, transactionDate, transactionValue
|
||||
FROM transactions
|
||||
WHERE transactionDate >= ?
|
||||
"""
|
||||
|
||||
if account_id:
|
||||
transactions_query += " AND accountId = ?"
|
||||
params = [cutoff_date, account_id]
|
||||
else:
|
||||
params = [cutoff_date]
|
||||
|
||||
transactions_query += " ORDER BY transactionDate DESC"
|
||||
|
||||
cursor.execute(transactions_query, params)
|
||||
transactions = cursor.fetchall()
|
||||
|
||||
# Calculate historical balances by working backwards from current balance
|
||||
historical_balances = []
|
||||
account_running_balances: dict[str, dict[str, float]] = {}
|
||||
|
||||
# Initialize running balances with current balances
|
||||
for (acc_id, balance_type), balance_info in current_balances.items():
|
||||
if acc_id not in account_running_balances:
|
||||
account_running_balances[acc_id] = {}
|
||||
account_running_balances[acc_id][balance_type] = balance_info["amount"]
|
||||
|
||||
# Group transactions by date
|
||||
transactions_by_date = defaultdict(list)
|
||||
|
||||
for txn in transactions:
|
||||
date_str = txn["transactionDate"][:10] # Extract just the date part
|
||||
transactions_by_date[date_str].append(txn)
|
||||
|
||||
# Generate historical balance points
|
||||
# Start from today and work backwards
|
||||
current_date = datetime.now().date()
|
||||
|
||||
for day_offset in range(0, days, 7): # Sample every 7 days for performance
|
||||
target_date = current_date - timedelta(days=day_offset)
|
||||
target_date_str = target_date.isoformat()
|
||||
|
||||
# For each account, create balance entries
|
||||
for acc_id in account_running_balances:
|
||||
for balance_type in [
|
||||
"closingBooked"
|
||||
]: # Focus on closingBooked for the chart
|
||||
if balance_type in account_running_balances[acc_id]:
|
||||
balance_amount = account_running_balances[acc_id][
|
||||
balance_type
|
||||
]
|
||||
currency = current_balances.get(
|
||||
(acc_id, balance_type), {}
|
||||
).get("currency", "EUR")
|
||||
|
||||
historical_balances.append(
|
||||
{
|
||||
"id": f"{acc_id}_{balance_type}_{target_date_str}",
|
||||
"account_id": acc_id,
|
||||
"balance_amount": balance_amount,
|
||||
"balance_type": balance_type,
|
||||
"currency": currency,
|
||||
"reference_date": target_date_str,
|
||||
"created_at": None,
|
||||
"updated_at": None,
|
||||
}
|
||||
)
|
||||
|
||||
# Subtract transactions that occurred on this date and later dates
|
||||
# to simulate going back in time
|
||||
for date_str in list(transactions_by_date.keys()):
|
||||
if date_str >= target_date_str:
|
||||
for txn in transactions_by_date[date_str]:
|
||||
acc_id = txn["accountId"]
|
||||
amount = txn["transactionValue"]
|
||||
|
||||
if acc_id in account_running_balances:
|
||||
for balance_type in account_running_balances[acc_id]:
|
||||
account_running_balances[acc_id][balance_type] -= (
|
||||
amount
|
||||
)
|
||||
|
||||
# Remove processed transactions to avoid double-processing
|
||||
del transactions_by_date[date_str]
|
||||
|
||||
conn.close()
|
||||
|
||||
# Sort by date for proper chronological order
|
||||
historical_balances.sort(key=lambda x: x["reference_date"])
|
||||
|
||||
return historical_balances
|
||||
|
||||
except Exception as e:
|
||||
conn.close()
|
||||
raise e
|
||||
@@ -1,11 +1,11 @@
|
||||
import json
|
||||
import httpx
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import httpx
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
from leggen.utils.config import config
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import List, Dict, Any
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.config import config
|
||||
from leggen.utils.config import config
|
||||
|
||||
|
||||
class NotificationService:
|
||||
@@ -109,33 +109,78 @@ class NotificationService:
|
||||
"""Check if Telegram notifications are enabled"""
|
||||
telegram_config = self.notifications_config.get("telegram", {})
|
||||
return bool(
|
||||
telegram_config.get("api-key")
|
||||
and telegram_config.get("chat-id")
|
||||
telegram_config.get("token")
|
||||
and telegram_config.get("chat_id")
|
||||
and telegram_config.get("enabled", True)
|
||||
)
|
||||
|
||||
async def _send_discord_notifications(
|
||||
self, transactions: List[Dict[str, Any]]
|
||||
) -> None:
|
||||
"""Send Discord notifications - placeholder implementation"""
|
||||
# Would import and use leggen.notifications.discord
|
||||
logger.info(f"Sending {len(transactions)} transaction notifications to Discord")
|
||||
"""Send Discord notifications for transactions"""
|
||||
try:
|
||||
import click
|
||||
|
||||
from leggen.notifications.discord import send_transactions_message
|
||||
|
||||
# Create a mock context with the webhook
|
||||
ctx = click.Context(click.Command("notifications"))
|
||||
ctx.obj = {
|
||||
"notifications": {
|
||||
"discord": {
|
||||
"webhook": self.notifications_config.get("discord", {}).get(
|
||||
"webhook"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Send transaction notifications using the actual implementation
|
||||
send_transactions_message(ctx, transactions)
|
||||
logger.info(
|
||||
f"Sent {len(transactions)} transaction notifications to Discord"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send Discord transaction notifications: {e}")
|
||||
raise
|
||||
|
||||
async def _send_telegram_notifications(
|
||||
self, transactions: List[Dict[str, Any]]
|
||||
) -> None:
|
||||
"""Send Telegram notifications - placeholder implementation"""
|
||||
# Would import and use leggen.notifications.telegram
|
||||
logger.info(
|
||||
f"Sending {len(transactions)} transaction notifications to Telegram"
|
||||
)
|
||||
"""Send Telegram notifications for transactions"""
|
||||
try:
|
||||
import click
|
||||
|
||||
from leggen.notifications.telegram import send_transaction_message
|
||||
|
||||
# Create a mock context with the telegram config
|
||||
ctx = click.Context(click.Command("notifications"))
|
||||
telegram_config = self.notifications_config.get("telegram", {})
|
||||
ctx.obj = {
|
||||
"notifications": {
|
||||
"telegram": {
|
||||
"api-key": telegram_config.get("token"),
|
||||
"chat-id": telegram_config.get("chat_id"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Send transaction notifications using the actual implementation
|
||||
send_transaction_message(ctx, transactions)
|
||||
logger.info(
|
||||
f"Sent {len(transactions)} transaction notifications to Telegram"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send Telegram transaction notifications: {e}")
|
||||
raise
|
||||
|
||||
async def _send_discord_test(self, message: str) -> None:
|
||||
"""Send Discord test notification"""
|
||||
try:
|
||||
from leggen.notifications.discord import send_expire_notification
|
||||
import click
|
||||
|
||||
from leggen.notifications.discord import send_expire_notification
|
||||
|
||||
# Create a mock context with the webhook
|
||||
ctx = click.Context(click.Command("test"))
|
||||
ctx.obj = {
|
||||
@@ -164,17 +209,18 @@ class NotificationService:
|
||||
async def _send_telegram_test(self, message: str) -> None:
|
||||
"""Send Telegram test notification"""
|
||||
try:
|
||||
from leggen.notifications.telegram import send_expire_notification
|
||||
import click
|
||||
|
||||
from leggen.notifications.telegram import send_expire_notification
|
||||
|
||||
# Create a mock context with the telegram config
|
||||
ctx = click.Context(click.Command("test"))
|
||||
telegram_config = self.notifications_config.get("telegram", {})
|
||||
ctx.obj = {
|
||||
"notifications": {
|
||||
"telegram": {
|
||||
"api-key": telegram_config.get("api-key"),
|
||||
"chat-id": telegram_config.get("chat-id"),
|
||||
"api-key": telegram_config.get("token"),
|
||||
"chat-id": telegram_config.get("chat_id"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -194,8 +240,52 @@ class NotificationService:
|
||||
|
||||
async def _send_discord_expiry(self, notification_data: Dict[str, Any]) -> None:
|
||||
"""Send Discord expiry notification"""
|
||||
logger.info(f"Sending Discord expiry notification: {notification_data}")
|
||||
try:
|
||||
import click
|
||||
|
||||
from leggen.notifications.discord import send_expire_notification
|
||||
|
||||
# Create a mock context with the webhook
|
||||
ctx = click.Context(click.Command("expiry"))
|
||||
ctx.obj = {
|
||||
"notifications": {
|
||||
"discord": {
|
||||
"webhook": self.notifications_config.get("discord", {}).get(
|
||||
"webhook"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Send expiry notification using the actual implementation
|
||||
send_expire_notification(ctx, notification_data)
|
||||
logger.info(f"Sent Discord expiry notification: {notification_data}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send Discord expiry notification: {e}")
|
||||
raise
|
||||
|
||||
async def _send_telegram_expiry(self, notification_data: Dict[str, Any]) -> None:
|
||||
"""Send Telegram expiry notification"""
|
||||
logger.info(f"Sending Telegram expiry notification: {notification_data}")
|
||||
try:
|
||||
import click
|
||||
|
||||
from leggen.notifications.telegram import send_expire_notification
|
||||
|
||||
# Create a mock context with the telegram config
|
||||
ctx = click.Context(click.Command("expiry"))
|
||||
telegram_config = self.notifications_config.get("telegram", {})
|
||||
ctx.obj = {
|
||||
"notifications": {
|
||||
"telegram": {
|
||||
"api-key": telegram_config.get("token"),
|
||||
"chat-id": telegram_config.get("chat_id"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Send expiry notification using the actual implementation
|
||||
send_expire_notification(ctx, notification_data)
|
||||
logger.info(f"Sent Telegram expiry notification: {notification_data}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send Telegram expiry notification: {e}")
|
||||
raise
|
||||
@@ -3,10 +3,10 @@ from typing import List
|
||||
|
||||
from loguru import logger
|
||||
|
||||
from leggend.api.models.sync import SyncResult, SyncStatus
|
||||
from leggend.services.gocardless_service import GoCardlessService
|
||||
from leggend.services.database_service import DatabaseService
|
||||
from leggend.services.notification_service import NotificationService
|
||||
from leggen.api.models.sync import SyncResult, SyncStatus
|
||||
from leggen.services.database_service import DatabaseService
|
||||
from leggen.services.gocardless_service import GoCardlessService
|
||||
from leggen.services.notification_service import NotificationService
|
||||
|
||||
|
||||
class SyncService:
|
||||
87
leggen/services/transaction_processor.py
Normal file
87
leggen/services/transaction_processor.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
class TransactionProcessor:
|
||||
"""Handles processing and transformation of raw transaction data"""
|
||||
|
||||
def process_transactions(
|
||||
self,
|
||||
account_id: str,
|
||||
account_info: Dict[str, Any],
|
||||
transaction_data: Dict[str, Any],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Process raw transaction data into standardized format"""
|
||||
transactions = []
|
||||
|
||||
# Process booked transactions
|
||||
for transaction in transaction_data.get("transactions", {}).get("booked", []):
|
||||
processed = self._process_single_transaction(
|
||||
account_id, account_info, transaction, "booked"
|
||||
)
|
||||
transactions.append(processed)
|
||||
|
||||
# Process pending transactions
|
||||
for transaction in transaction_data.get("transactions", {}).get("pending", []):
|
||||
processed = self._process_single_transaction(
|
||||
account_id, account_info, transaction, "pending"
|
||||
)
|
||||
transactions.append(processed)
|
||||
|
||||
return transactions
|
||||
|
||||
def _process_single_transaction(
|
||||
self,
|
||||
account_id: str,
|
||||
account_info: Dict[str, Any],
|
||||
transaction: Dict[str, Any],
|
||||
status: str,
|
||||
) -> Dict[str, Any]:
|
||||
"""Process a single transaction into standardized format"""
|
||||
# Extract dates
|
||||
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||
"bookingDate"
|
||||
)
|
||||
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||
|
||||
if booked_date and value_date:
|
||||
min_date = min(
|
||||
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||
)
|
||||
else:
|
||||
date_str = booked_date or value_date
|
||||
if not date_str:
|
||||
raise ValueError("No valid date found in transaction")
|
||||
min_date = datetime.fromisoformat(date_str)
|
||||
|
||||
# Extract amount and currency
|
||||
transaction_amount = transaction.get("transactionAmount", {})
|
||||
amount = float(transaction_amount.get("amount", 0))
|
||||
currency = transaction_amount.get("currency", "")
|
||||
|
||||
# Extract description
|
||||
description = transaction.get(
|
||||
"remittanceInformationUnstructured",
|
||||
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||
)
|
||||
|
||||
# Extract transaction IDs - transactionId is now primary, internalTransactionId is reference
|
||||
transaction_id = transaction.get("transactionId")
|
||||
internal_transaction_id = transaction.get("internalTransactionId")
|
||||
|
||||
if not transaction_id:
|
||||
raise ValueError("Transaction missing required transactionId field")
|
||||
|
||||
return {
|
||||
"accountId": account_id,
|
||||
"transactionId": transaction_id,
|
||||
"internalTransactionId": internal_transaction_id,
|
||||
"institutionId": account_info["institution_id"],
|
||||
"iban": account_info.get("iban", "N/A"),
|
||||
"transactionDate": min_date,
|
||||
"description": description,
|
||||
"transactionValue": amount,
|
||||
"transactionCurrency": currency,
|
||||
"transactionStatus": status,
|
||||
"rawTransaction": transaction,
|
||||
}
|
||||
@@ -1,18 +1,189 @@
|
||||
import os
|
||||
import sys
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import click
|
||||
import tomli_w
|
||||
from loguru import logger
|
||||
from pydantic import ValidationError
|
||||
|
||||
from leggen.models.config import Config as ConfigModel
|
||||
from leggen.utils.paths import path_manager
|
||||
from leggen.utils.text import error
|
||||
|
||||
|
||||
class Config:
|
||||
_instance = None
|
||||
_config = None
|
||||
_config_model = None
|
||||
_config_path = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def load_config(self, config_path: Optional[str] = None) -> Dict[str, Any]:
|
||||
if self._config is not None:
|
||||
return self._config
|
||||
|
||||
if config_path is None:
|
||||
config_path = os.environ.get("LEGGEN_CONFIG_FILE")
|
||||
if not config_path:
|
||||
config_path = str(path_manager.get_config_file_path())
|
||||
|
||||
self._config_path = config_path
|
||||
|
||||
try:
|
||||
with open(config_path, "rb") as f:
|
||||
raw_config = tomllib.load(f)
|
||||
logger.info(f"Configuration loaded from {config_path}")
|
||||
|
||||
# Validate configuration using Pydantic
|
||||
try:
|
||||
self._config_model = ConfigModel(**raw_config)
|
||||
self._config = self._config_model.dict(by_alias=True, exclude_none=True)
|
||||
logger.info("Configuration validation successful")
|
||||
except ValidationError as e:
|
||||
logger.error(f"Configuration validation failed: {e}")
|
||||
raise ValueError(f"Invalid configuration: {e}") from e
|
||||
|
||||
except FileNotFoundError:
|
||||
logger.error(f"Configuration file not found: {config_path}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading configuration: {e}")
|
||||
raise
|
||||
|
||||
return self._config
|
||||
|
||||
def save_config(
|
||||
self,
|
||||
config_data: Optional[Dict[str, Any]] = None,
|
||||
config_path: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Save configuration to TOML file"""
|
||||
if config_data is None:
|
||||
config_data = self._config
|
||||
|
||||
if config_path is None:
|
||||
config_path = self._config_path or os.environ.get("LEGGEN_CONFIG_FILE")
|
||||
if not config_path:
|
||||
config_path = str(path_manager.get_config_file_path())
|
||||
|
||||
if config_path is None:
|
||||
raise ValueError("No config path specified")
|
||||
if config_data is None:
|
||||
raise ValueError("No config data to save")
|
||||
|
||||
# Validate the configuration before saving
|
||||
try:
|
||||
validated_model = ConfigModel(**config_data)
|
||||
validated_config = validated_model.dict(by_alias=True, exclude_none=True)
|
||||
except ValidationError as e:
|
||||
logger.error(f"Configuration validation failed before save: {e}")
|
||||
raise ValueError(f"Invalid configuration: {e}") from e
|
||||
|
||||
# Ensure directory exists
|
||||
Path(config_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
with open(config_path, "wb") as f:
|
||||
tomli_w.dump(validated_config, f)
|
||||
|
||||
# Update in-memory config
|
||||
self._config = validated_config
|
||||
self._config_model = validated_model
|
||||
self._config_path = config_path
|
||||
logger.info(f"Configuration saved to {config_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving configuration: {e}")
|
||||
raise
|
||||
|
||||
def update_config(self, section: str, key: str, value: Any) -> None:
|
||||
"""Update a specific configuration value"""
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
|
||||
if section not in self._config:
|
||||
self._config[section] = {}
|
||||
|
||||
self._config[section][key] = value
|
||||
self.save_config()
|
||||
|
||||
def update_section(self, section: str, data: Dict[str, Any]) -> None:
|
||||
"""Update an entire configuration section"""
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
|
||||
self._config[section] = data
|
||||
self.save_config()
|
||||
|
||||
@property
|
||||
def config(self) -> Dict[str, Any]:
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def gocardless_config(self) -> Dict[str, str]:
|
||||
return self.config.get("gocardless", {})
|
||||
|
||||
@property
|
||||
def database_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("database", {})
|
||||
|
||||
@property
|
||||
def notifications_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("notifications", {})
|
||||
|
||||
@property
|
||||
def filters_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("filters", {})
|
||||
|
||||
@property
|
||||
def scheduler_config(self) -> Dict[str, Any]:
|
||||
"""Get scheduler configuration with defaults"""
|
||||
default_schedule = {
|
||||
"sync": {
|
||||
"enabled": True,
|
||||
"hour": 3,
|
||||
"minute": 0,
|
||||
"cron": None, # Optional custom cron expression
|
||||
}
|
||||
}
|
||||
return self.config.get("scheduler", default_schedule)
|
||||
|
||||
|
||||
def load_config(ctx: click.Context, _, filename):
|
||||
try:
|
||||
with click.open_file(str(filename), "rb") as f:
|
||||
# TODO: Implement configuration file validation (use pydantic?)
|
||||
ctx.obj = tomllib.load(f)
|
||||
raw_config = tomllib.load(f)
|
||||
|
||||
# Validate configuration using Pydantic
|
||||
try:
|
||||
validated_model = ConfigModel(**raw_config)
|
||||
ctx.obj = validated_model.dict(by_alias=True, exclude_none=True)
|
||||
except ValidationError as e:
|
||||
error(f"Configuration validation failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
except FileNotFoundError:
|
||||
error(
|
||||
"Configuration file not found. Provide a valid configuration file path with leggen --config <path> or LEGGEN_CONFIG=<path> environment variable."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# Global singleton instance
|
||||
config = Config()
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
import click
|
||||
|
||||
import leggen.database.sqlite as sqlite_engine
|
||||
from leggen.utils.text import info, warning
|
||||
|
||||
|
||||
def persist_balance(ctx: click.Context, account: str, balance: dict) -> None:
|
||||
sqlite = ctx.obj.get("database", {}).get("sqlite", True)
|
||||
|
||||
if not sqlite:
|
||||
warning("SQLite database is disabled, skipping balance saving")
|
||||
return
|
||||
|
||||
info(f"[{account}] Fetched balances, saving to SQLite")
|
||||
sqlite_engine.persist_balances(ctx, balance)
|
||||
|
||||
|
||||
def persist_transactions(ctx: click.Context, account: str, transactions: list) -> list:
|
||||
sqlite = ctx.obj.get("database", {}).get("sqlite", True)
|
||||
|
||||
if not sqlite:
|
||||
warning("SQLite database is disabled, skipping transaction saving")
|
||||
# WARNING: This will return the transactions list as is, without saving it to any database
|
||||
# Possible duplicate notifications will be sent if the filters are enabled
|
||||
return transactions
|
||||
|
||||
info(f"[{account}] Fetched {len(transactions)} transactions, saving to SQLite")
|
||||
return sqlite_engine.persist_transactions(ctx, account, transactions)
|
||||
|
||||
|
||||
def save_transactions(ctx: click.Context, account: str) -> list:
|
||||
import requests
|
||||
|
||||
api_url = ctx.obj.get("api_url", "http://localhost:8000")
|
||||
|
||||
info(f"[{account}] Getting account details")
|
||||
res = requests.get(f"{api_url}/accounts/{account}")
|
||||
res.raise_for_status()
|
||||
account_info = res.json()
|
||||
|
||||
info(f"[{account}] Getting transactions")
|
||||
transactions = []
|
||||
|
||||
res = requests.get(f"{api_url}/accounts/{account}/transactions/")
|
||||
res.raise_for_status()
|
||||
account_transactions = res.json().get("transactions", [])
|
||||
|
||||
for transaction in account_transactions.get("booked", []):
|
||||
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||
"bookingDate"
|
||||
)
|
||||
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||
if booked_date and value_date:
|
||||
min_date = min(
|
||||
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||
)
|
||||
else:
|
||||
min_date = datetime.fromisoformat(booked_date or value_date)
|
||||
|
||||
transactionValue = float(
|
||||
transaction.get("transactionAmount", {}).get("amount", 0)
|
||||
)
|
||||
currency = transaction.get("transactionAmount", {}).get("currency", "")
|
||||
|
||||
description = transaction.get(
|
||||
"remittanceInformationUnstructured",
|
||||
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||
)
|
||||
|
||||
# Extract transaction ID, using transactionId as fallback when internalTransactionId is missing
|
||||
transaction_id = transaction.get("internalTransactionId") or transaction.get(
|
||||
"transactionId"
|
||||
)
|
||||
|
||||
t = {
|
||||
"internalTransactionId": transaction_id,
|
||||
"institutionId": account_info["institution_id"],
|
||||
"iban": account_info.get("iban", "N/A"),
|
||||
"transactionDate": min_date,
|
||||
"description": description,
|
||||
"transactionValue": transactionValue,
|
||||
"transactionCurrency": currency,
|
||||
"transactionStatus": "booked",
|
||||
"accountId": account,
|
||||
"rawTransaction": transaction,
|
||||
}
|
||||
transactions.append(t)
|
||||
|
||||
for transaction in account_transactions.get("pending", []):
|
||||
booked_date = transaction.get("bookingDateTime") or transaction.get(
|
||||
"bookingDate"
|
||||
)
|
||||
value_date = transaction.get("valueDateTime") or transaction.get("valueDate")
|
||||
if booked_date and value_date:
|
||||
min_date = min(
|
||||
datetime.fromisoformat(booked_date), datetime.fromisoformat(value_date)
|
||||
)
|
||||
else:
|
||||
min_date = datetime.fromisoformat(booked_date or value_date)
|
||||
|
||||
transactionValue = float(
|
||||
transaction.get("transactionAmount", {}).get("amount", 0)
|
||||
)
|
||||
currency = transaction.get("transactionAmount", {}).get("currency", "")
|
||||
|
||||
description = transaction.get(
|
||||
"remittanceInformationUnstructured",
|
||||
",".join(transaction.get("remittanceInformationUnstructuredArray", [])),
|
||||
)
|
||||
|
||||
# Extract transaction ID, using transactionId as fallback when internalTransactionId is missing
|
||||
transaction_id = transaction.get("internalTransactionId") or transaction.get(
|
||||
"transactionId"
|
||||
)
|
||||
|
||||
t = {
|
||||
"internalTransactionId": transaction_id,
|
||||
"institutionId": account_info["institution_id"],
|
||||
"iban": account_info.get("iban", "N/A"),
|
||||
"transactionDate": min_date,
|
||||
"description": description,
|
||||
"transactionValue": transactionValue,
|
||||
"transactionCurrency": currency,
|
||||
"transactionStatus": "pending",
|
||||
"accountId": account,
|
||||
"rawTransaction": transaction,
|
||||
}
|
||||
transactions.append(t)
|
||||
|
||||
return persist_transactions(ctx, account, transactions)
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Centralized path management for Leggen."""
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
@@ -7,32 +8,32 @@ from typing import Optional
|
||||
|
||||
class PathManager:
|
||||
"""Manages configurable paths for config and database files."""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self._config_dir: Optional[Path] = None
|
||||
self._database_path: Optional[Path] = None
|
||||
|
||||
|
||||
def get_config_dir(self) -> Path:
|
||||
"""Get the configuration directory."""
|
||||
if self._config_dir is not None:
|
||||
return self._config_dir
|
||||
|
||||
|
||||
# Check environment variable first
|
||||
config_dir = os.environ.get("LEGGEN_CONFIG_DIR")
|
||||
if config_dir:
|
||||
return Path(config_dir)
|
||||
|
||||
|
||||
# Default to ~/.config/leggen
|
||||
return Path.home() / ".config" / "leggen"
|
||||
|
||||
|
||||
def set_config_dir(self, path: Path) -> None:
|
||||
"""Set the configuration directory."""
|
||||
self._config_dir = Path(path)
|
||||
|
||||
|
||||
def get_config_file_path(self) -> Path:
|
||||
"""Get the configuration file path."""
|
||||
return self.get_config_dir() / "config.toml"
|
||||
|
||||
|
||||
def get_database_path(self) -> Path:
|
||||
"""Get the database file path and ensure the directory exists."""
|
||||
if self._database_path is not None:
|
||||
@@ -45,32 +46,28 @@ class PathManager:
|
||||
else:
|
||||
# Default to config_dir/leggen.db
|
||||
db_path = self.get_config_dir() / "leggen.db"
|
||||
|
||||
|
||||
# Try to ensure the directory exists, but handle permission errors gracefully
|
||||
try:
|
||||
with contextlib.suppress(PermissionError, OSError):
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
except (PermissionError, OSError):
|
||||
# If we can't create the directory, continue anyway
|
||||
# This allows tests and error cases to work as expected
|
||||
pass
|
||||
|
||||
|
||||
return db_path
|
||||
|
||||
|
||||
def set_database_path(self, path: Path) -> None:
|
||||
"""Set the database file path."""
|
||||
self._database_path = Path(path)
|
||||
|
||||
|
||||
def get_auth_file_path(self) -> Path:
|
||||
"""Get the authentication file path."""
|
||||
return self.get_config_dir() / "auth.json"
|
||||
|
||||
|
||||
def ensure_config_dir_exists(self) -> None:
|
||||
"""Ensure the configuration directory exists."""
|
||||
self.get_config_dir().mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def ensure_database_dir_exists(self) -> None:
|
||||
"""Ensure the database directory exists.
|
||||
|
||||
"""Ensure the database directory exists.
|
||||
|
||||
Note: get_database_path() now automatically ensures the directory exists,
|
||||
so this method is mainly for explicit directory creation in tests.
|
||||
"""
|
||||
@@ -78,4 +75,4 @@ class PathManager:
|
||||
|
||||
|
||||
# Global instance for the application
|
||||
path_manager = PathManager()
|
||||
path_manager = PathManager()
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
import os
|
||||
import tomllib
|
||||
import tomli_w
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
from loguru import logger
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
|
||||
class Config:
|
||||
_instance = None
|
||||
_config = None
|
||||
_config_path = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def load_config(self, config_path: Optional[str] = None) -> Dict[str, Any]:
|
||||
if self._config is not None:
|
||||
return self._config
|
||||
|
||||
if config_path is None:
|
||||
config_path = os.environ.get(
|
||||
"LEGGEN_CONFIG_FILE"
|
||||
)
|
||||
if not config_path:
|
||||
config_path = str(path_manager.get_config_file_path())
|
||||
|
||||
self._config_path = config_path
|
||||
|
||||
try:
|
||||
with open(config_path, "rb") as f:
|
||||
self._config = tomllib.load(f)
|
||||
logger.info(f"Configuration loaded from {config_path}")
|
||||
except FileNotFoundError:
|
||||
logger.error(f"Configuration file not found: {config_path}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading configuration: {e}")
|
||||
raise
|
||||
|
||||
return self._config
|
||||
|
||||
def save_config(
|
||||
self,
|
||||
config_data: Optional[Dict[str, Any]] = None,
|
||||
config_path: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Save configuration to TOML file"""
|
||||
if config_data is None:
|
||||
config_data = self._config
|
||||
|
||||
if config_path is None:
|
||||
config_path = self._config_path or os.environ.get(
|
||||
"LEGGEN_CONFIG_FILE"
|
||||
)
|
||||
if not config_path:
|
||||
config_path = str(path_manager.get_config_file_path())
|
||||
|
||||
if config_path is None:
|
||||
raise ValueError("No config path specified")
|
||||
if config_data is None:
|
||||
raise ValueError("No config data to save")
|
||||
|
||||
# Ensure directory exists
|
||||
Path(config_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
try:
|
||||
with open(config_path, "wb") as f:
|
||||
tomli_w.dump(config_data, f)
|
||||
|
||||
# Update in-memory config
|
||||
self._config = config_data
|
||||
self._config_path = config_path
|
||||
logger.info(f"Configuration saved to {config_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving configuration: {e}")
|
||||
raise
|
||||
|
||||
def update_config(self, section: str, key: str, value: Any) -> None:
|
||||
"""Update a specific configuration value"""
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
|
||||
if section not in self._config:
|
||||
self._config[section] = {}
|
||||
|
||||
self._config[section][key] = value
|
||||
self.save_config()
|
||||
|
||||
def update_section(self, section: str, data: Dict[str, Any]) -> None:
|
||||
"""Update an entire configuration section"""
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
|
||||
self._config[section] = data
|
||||
self.save_config()
|
||||
|
||||
@property
|
||||
def config(self) -> Dict[str, Any]:
|
||||
if self._config is None:
|
||||
self.load_config()
|
||||
if self._config is None:
|
||||
raise RuntimeError("Failed to load config")
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def gocardless_config(self) -> Dict[str, str]:
|
||||
return self.config.get("gocardless", {})
|
||||
|
||||
@property
|
||||
def database_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("database", {})
|
||||
|
||||
@property
|
||||
def notifications_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("notifications", {})
|
||||
|
||||
@property
|
||||
def filters_config(self) -> Dict[str, Any]:
|
||||
return self.config.get("filters", {})
|
||||
|
||||
@property
|
||||
def scheduler_config(self) -> Dict[str, Any]:
|
||||
"""Get scheduler configuration with defaults"""
|
||||
default_schedule = {
|
||||
"sync": {
|
||||
"enabled": True,
|
||||
"hour": 3,
|
||||
"minute": 0,
|
||||
"cron": None, # Optional custom cron expression
|
||||
}
|
||||
}
|
||||
return self.config.get("scheduler", default_schedule)
|
||||
|
||||
|
||||
config = Config()
|
||||
@@ -1,10 +0,0 @@
|
||||
REQUISITION_STATUS = {
|
||||
"CR": "CREATED",
|
||||
"GC": "GIVING_CONSENT",
|
||||
"UA": "UNDERGOING_AUTHENTICATION",
|
||||
"RJ": "REJECTED",
|
||||
"SA": "SELECTING_ACCOUNTS",
|
||||
"GA": "GRANTING_ACCESS",
|
||||
"LN": "LINKED",
|
||||
"EX": "EXPIRED",
|
||||
}
|
||||
@@ -34,6 +34,7 @@ dependencies = [
|
||||
"apscheduler>=3.10.0,<4",
|
||||
"tomli-w>=1.0.0,<2",
|
||||
"httpx>=0.28.1",
|
||||
"pydantic>=2.0.0,<3",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -41,7 +42,6 @@ Repository = "https://github.com/elisiariocouto/leggen"
|
||||
|
||||
[project.scripts]
|
||||
leggen = "leggen.main:cli"
|
||||
leggend = "leggend.main:main"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
@@ -58,10 +58,10 @@ dev = [
|
||||
]
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = ["leggen", "leggend"]
|
||||
include = ["leggen"]
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
include = ["leggen", "leggend"]
|
||||
include = ["leggen"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
@@ -69,7 +69,7 @@ build-backend = "hatchling.build"
|
||||
|
||||
[tool.ruff]
|
||||
lint.ignore = ["E501", "B008", "B006"]
|
||||
lint.extend-select = ["B", "C4", "PIE", "T20", "SIM", "TCH"]
|
||||
lint.extend-select = ["B", "C4", "I", "PIE", "T20", "SIM", "TCH"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Sample database generator for Leggen testing and development."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import random
|
||||
import sqlite3
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import click
|
||||
|
||||
# Add the project root to the Python path
|
||||
project_root = Path(__file__).parent.parent
|
||||
sys.path.insert(0, str(project_root))
|
||||
|
||||
import click
|
||||
from leggen.utils.paths import path_manager
|
||||
# Import after path setup - this is necessary for the script to work
|
||||
from leggen.utils.paths import path_manager # noqa: E402
|
||||
|
||||
|
||||
class SampleDataGenerator:
|
||||
@@ -32,7 +32,7 @@ class SampleDataGenerator:
|
||||
"country": "LT",
|
||||
},
|
||||
{
|
||||
"id": "BANCOBPI_BBPIPTPL",
|
||||
"id": "BANCOBPI_BBPIPTPL",
|
||||
"name": "Banco BPI",
|
||||
"bic": "BBPIPTPL",
|
||||
"country": "PT",
|
||||
@@ -40,7 +40,7 @@ class SampleDataGenerator:
|
||||
{
|
||||
"id": "MONZO_MONZGB2L",
|
||||
"name": "Monzo Bank",
|
||||
"bic": "MONZGB2L",
|
||||
"bic": "MONZGB2L",
|
||||
"country": "GB",
|
||||
},
|
||||
{
|
||||
@@ -50,16 +50,40 @@ class SampleDataGenerator:
|
||||
"country": "BR",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
self.transaction_types = [
|
||||
{"description": "Grocery Store", "amount_range": (-150, -20), "frequency": 0.3},
|
||||
{
|
||||
"description": "Grocery Store",
|
||||
"amount_range": (-150, -20),
|
||||
"frequency": 0.3,
|
||||
},
|
||||
{"description": "Coffee Shop", "amount_range": (-15, -3), "frequency": 0.2},
|
||||
{"description": "Gas Station", "amount_range": (-80, -30), "frequency": 0.1},
|
||||
{"description": "Online Shopping", "amount_range": (-200, -25), "frequency": 0.15},
|
||||
{"description": "Restaurant", "amount_range": (-60, -15), "frequency": 0.15},
|
||||
{
|
||||
"description": "Gas Station",
|
||||
"amount_range": (-80, -30),
|
||||
"frequency": 0.1,
|
||||
},
|
||||
{
|
||||
"description": "Online Shopping",
|
||||
"amount_range": (-200, -25),
|
||||
"frequency": 0.15,
|
||||
},
|
||||
{
|
||||
"description": "Restaurant",
|
||||
"amount_range": (-60, -15),
|
||||
"frequency": 0.15,
|
||||
},
|
||||
{"description": "Salary", "amount_range": (2500, 5000), "frequency": 0.02},
|
||||
{"description": "ATM Withdrawal", "amount_range": (-200, -20), "frequency": 0.05},
|
||||
{"description": "Transfer to Savings", "amount_range": (-1000, -100), "frequency": 0.03},
|
||||
{
|
||||
"description": "ATM Withdrawal",
|
||||
"amount_range": (-200, -20),
|
||||
"frequency": 0.05,
|
||||
},
|
||||
{
|
||||
"description": "Transfer to Savings",
|
||||
"amount_range": (-1000, -100),
|
||||
"frequency": 0.03,
|
||||
},
|
||||
]
|
||||
|
||||
def ensure_database_dir(self):
|
||||
@@ -120,15 +144,33 @@ class SampleDataGenerator:
|
||||
""")
|
||||
|
||||
# Create indexes
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_transactions_internal_id ON transactions(internalTransactionId)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_transactions_date ON transactions(transactionDate)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_transactions_account_date ON transactions(accountId, transactionDate)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_transactions_amount ON transactions(transactionValue)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_balances_account_id ON balances(account_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_balances_timestamp ON balances(timestamp)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp ON balances(account_id, type, timestamp)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_accounts_institution_id ON accounts(institution_id)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_accounts_status ON accounts(status)")
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_transactions_internal_id ON transactions(internalTransactionId)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_transactions_date ON transactions(transactionDate)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_transactions_account_date ON transactions(accountId, transactionDate)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_transactions_amount ON transactions(transactionValue)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_balances_account_id ON balances(account_id)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_balances_timestamp ON balances(timestamp)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_balances_account_type_timestamp ON balances(account_id, type, timestamp)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_accounts_institution_id ON accounts(institution_id)"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_accounts_status ON accounts(status)"
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
@@ -141,78 +183,109 @@ class SampleDataGenerator:
|
||||
"GB": lambda: f"GB{random.randint(10, 99)}MONZ{random.randint(100000, 999999)}{random.randint(100000, 999999)}",
|
||||
"BR": lambda: f"BR{random.randint(10, 99)}{random.randint(10000000, 99999999)}{random.randint(1000, 9999)}{random.randint(10000000, 99999999)}",
|
||||
}
|
||||
return ibans.get(country_code, lambda: f"{country_code}{random.randint(1000000000000000, 9999999999999999)}")()
|
||||
return ibans.get(
|
||||
country_code,
|
||||
lambda: f"{country_code}{random.randint(1000000000000000, 9999999999999999)}",
|
||||
)()
|
||||
|
||||
def generate_accounts(self, num_accounts: int = 3) -> List[Dict[str, Any]]:
|
||||
"""Generate sample accounts."""
|
||||
accounts = []
|
||||
base_date = datetime.now() - timedelta(days=90)
|
||||
|
||||
|
||||
for i in range(num_accounts):
|
||||
institution = random.choice(self.institutions)
|
||||
account_id = f"account-{i+1:03d}-{random.randint(1000, 9999)}"
|
||||
|
||||
account_id = f"account-{i + 1:03d}-{random.randint(1000, 9999)}"
|
||||
|
||||
account = {
|
||||
"id": account_id,
|
||||
"institution_id": institution["id"],
|
||||
"status": "READY",
|
||||
"iban": self.generate_iban(institution["country"]),
|
||||
"name": f"Personal Account {i+1}",
|
||||
"name": f"Personal Account {i + 1}",
|
||||
"currency": "EUR",
|
||||
"created": (base_date + timedelta(days=random.randint(0, 30))).isoformat(),
|
||||
"last_accessed": (datetime.now() - timedelta(hours=random.randint(1, 48))).isoformat(),
|
||||
"created": (
|
||||
base_date + timedelta(days=random.randint(0, 30))
|
||||
).isoformat(),
|
||||
"last_accessed": (
|
||||
datetime.now() - timedelta(hours=random.randint(1, 48))
|
||||
).isoformat(),
|
||||
"last_updated": datetime.now().isoformat(),
|
||||
}
|
||||
accounts.append(account)
|
||||
|
||||
|
||||
return accounts
|
||||
|
||||
def generate_transactions(self, accounts: List[Dict[str, Any]], num_transactions_per_account: int = 50) -> List[Dict[str, Any]]:
|
||||
def generate_transactions(
|
||||
self, accounts: List[Dict[str, Any]], num_transactions_per_account: int = 50
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Generate sample transactions for accounts."""
|
||||
transactions = []
|
||||
base_date = datetime.now() - timedelta(days=60)
|
||||
|
||||
|
||||
for account in accounts:
|
||||
account_transactions = []
|
||||
current_balance = random.uniform(500, 3000)
|
||||
|
||||
|
||||
for i in range(num_transactions_per_account):
|
||||
# Choose transaction type based on frequency weights
|
||||
transaction_type = random.choices(
|
||||
self.transaction_types,
|
||||
weights=[t["frequency"] for t in self.transaction_types]
|
||||
weights=[t["frequency"] for t in self.transaction_types],
|
||||
)[0]
|
||||
|
||||
|
||||
# Generate transaction amount
|
||||
min_amount, max_amount = transaction_type["amount_range"]
|
||||
amount = round(random.uniform(min_amount, max_amount), 2)
|
||||
|
||||
|
||||
# Generate transaction date (more recent transactions are more likely)
|
||||
days_ago = random.choices(
|
||||
range(60),
|
||||
weights=[1.5 ** (60 - d) for d in range(60)]
|
||||
range(60), weights=[1.5 ** (60 - d) for d in range(60)]
|
||||
)[0]
|
||||
transaction_date = base_date + timedelta(days=days_ago, hours=random.randint(6, 22), minutes=random.randint(0, 59))
|
||||
|
||||
transaction_date = base_date + timedelta(
|
||||
days=days_ago,
|
||||
hours=random.randint(6, 22),
|
||||
minutes=random.randint(0, 59),
|
||||
)
|
||||
|
||||
# Generate transaction IDs
|
||||
transaction_id = f"bank-txn-{account['id']}-{i+1:04d}"
|
||||
transaction_id = f"bank-txn-{account['id']}-{i + 1:04d}"
|
||||
internal_transaction_id = f"int-txn-{random.randint(100000, 999999)}"
|
||||
|
||||
|
||||
# Create realistic descriptions
|
||||
descriptions = {
|
||||
"Grocery Store": ["TESCO", "SAINSBURY'S", "LIDL", "ALDI", "WALMART", "CARREFOUR"],
|
||||
"Coffee Shop": ["STARBUCKS", "COSTA COFFEE", "PRET A MANGER", "LOCAL CAFE"],
|
||||
"Grocery Store": [
|
||||
"TESCO",
|
||||
"SAINSBURY'S",
|
||||
"LIDL",
|
||||
"ALDI",
|
||||
"WALMART",
|
||||
"CARREFOUR",
|
||||
],
|
||||
"Coffee Shop": [
|
||||
"STARBUCKS",
|
||||
"COSTA COFFEE",
|
||||
"PRET A MANGER",
|
||||
"LOCAL CAFE",
|
||||
],
|
||||
"Gas Station": ["BP", "SHELL", "ESSO", "GALP", "PETROBRAS"],
|
||||
"Online Shopping": ["AMAZON", "EBAY", "ZALANDO", "ASOS", "APPLE"],
|
||||
"Restaurant": ["PIZZA HUT", "MCDONALD'S", "BURGER KING", "LOCAL RESTAURANT"],
|
||||
"Restaurant": [
|
||||
"PIZZA HUT",
|
||||
"MCDONALD'S",
|
||||
"BURGER KING",
|
||||
"LOCAL RESTAURANT",
|
||||
],
|
||||
"Salary": ["MONTHLY SALARY", "PAYROLL DEPOSIT", "SALARY PAYMENT"],
|
||||
"ATM Withdrawal": ["ATM WITHDRAWAL", "CASH WITHDRAWAL"],
|
||||
"Transfer to Savings": ["SAVINGS TRANSFER", "INVESTMENT TRANSFER"],
|
||||
}
|
||||
|
||||
specific_descriptions = descriptions.get(transaction_type["description"], [transaction_type["description"]])
|
||||
|
||||
specific_descriptions = descriptions.get(
|
||||
transaction_type["description"], [transaction_type["description"]]
|
||||
)
|
||||
description = random.choice(specific_descriptions)
|
||||
|
||||
|
||||
# Create raw transaction (simplified GoCardless format)
|
||||
raw_transaction = {
|
||||
"transactionId": transaction_id,
|
||||
@@ -220,15 +293,17 @@ class SampleDataGenerator:
|
||||
"valueDate": transaction_date.strftime("%Y-%m-%d"),
|
||||
"transactionAmount": {
|
||||
"amount": str(amount),
|
||||
"currency": account["currency"]
|
||||
"currency": account["currency"],
|
||||
},
|
||||
"remittanceInformationUnstructured": description,
|
||||
"bankTransactionCode": "PMNT" if amount < 0 else "RCDT",
|
||||
}
|
||||
|
||||
|
||||
# Determine status (most are booked, some recent ones might be pending)
|
||||
status = "pending" if days_ago < 2 and random.random() < 0.1 else "booked"
|
||||
|
||||
status = (
|
||||
"pending" if days_ago < 2 and random.random() < 0.1 else "booked"
|
||||
)
|
||||
|
||||
transaction = {
|
||||
"accountId": account["id"],
|
||||
"transactionId": transaction_id,
|
||||
@@ -242,31 +317,33 @@ class SampleDataGenerator:
|
||||
"transactionStatus": status,
|
||||
"rawTransaction": raw_transaction,
|
||||
}
|
||||
|
||||
|
||||
account_transactions.append(transaction)
|
||||
current_balance += amount
|
||||
|
||||
|
||||
# Sort transactions by date for realistic ordering
|
||||
account_transactions.sort(key=lambda x: x["transactionDate"])
|
||||
transactions.extend(account_transactions)
|
||||
|
||||
|
||||
return transactions
|
||||
|
||||
def generate_balances(self, accounts: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""Generate sample balances for accounts."""
|
||||
balances = []
|
||||
|
||||
|
||||
for account in accounts:
|
||||
# Calculate balance from transactions (simplified)
|
||||
base_balance = random.uniform(500, 2000)
|
||||
|
||||
|
||||
balance_types = ["interimAvailable", "closingBooked", "authorised"]
|
||||
|
||||
|
||||
for balance_type in balance_types:
|
||||
# Add some variation to balance types
|
||||
variation = random.uniform(-50, 50) if balance_type != "interimAvailable" else 0
|
||||
variation = (
|
||||
random.uniform(-50, 50) if balance_type != "interimAvailable" else 0
|
||||
)
|
||||
balance_amount = base_balance + variation
|
||||
|
||||
|
||||
balance = {
|
||||
"account_id": account["id"],
|
||||
"bank": account["institution_id"],
|
||||
@@ -278,87 +355,129 @@ class SampleDataGenerator:
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
}
|
||||
balances.append(balance)
|
||||
|
||||
|
||||
return balances
|
||||
|
||||
def insert_data(self, accounts: List[Dict[str, Any]], transactions: List[Dict[str, Any]], balances: List[Dict[str, Any]]):
|
||||
def insert_data(
|
||||
self,
|
||||
accounts: List[Dict[str, Any]],
|
||||
transactions: List[Dict[str, Any]],
|
||||
balances: List[Dict[str, Any]],
|
||||
):
|
||||
"""Insert generated data into the database."""
|
||||
conn = sqlite3.connect(str(self.db_path))
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Insert accounts
|
||||
for account in accounts:
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO accounts
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO accounts
|
||||
(id, institution_id, status, iban, name, currency, created, last_accessed, last_updated)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
account["id"], account["institution_id"], account["status"], account["iban"],
|
||||
account["name"], account["currency"], account["created"],
|
||||
account["last_accessed"], account["last_updated"]
|
||||
))
|
||||
""",
|
||||
(
|
||||
account["id"],
|
||||
account["institution_id"],
|
||||
account["status"],
|
||||
account["iban"],
|
||||
account["name"],
|
||||
account["currency"],
|
||||
account["created"],
|
||||
account["last_accessed"],
|
||||
account["last_updated"],
|
||||
),
|
||||
)
|
||||
|
||||
# Insert transactions
|
||||
for transaction in transactions:
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO transactions
|
||||
(accountId, transactionId, internalTransactionId, institutionId, iban,
|
||||
transactionDate, description, transactionValue, transactionCurrency,
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO transactions
|
||||
(accountId, transactionId, internalTransactionId, institutionId, iban,
|
||||
transactionDate, description, transactionValue, transactionCurrency,
|
||||
transactionStatus, rawTransaction)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
transaction["accountId"], transaction["transactionId"],
|
||||
transaction["internalTransactionId"], transaction["institutionId"],
|
||||
transaction["iban"], transaction["transactionDate"], transaction["description"],
|
||||
transaction["transactionValue"], transaction["transactionCurrency"],
|
||||
transaction["transactionStatus"], json.dumps(transaction["rawTransaction"])
|
||||
))
|
||||
""",
|
||||
(
|
||||
transaction["accountId"],
|
||||
transaction["transactionId"],
|
||||
transaction["internalTransactionId"],
|
||||
transaction["institutionId"],
|
||||
transaction["iban"],
|
||||
transaction["transactionDate"],
|
||||
transaction["description"],
|
||||
transaction["transactionValue"],
|
||||
transaction["transactionCurrency"],
|
||||
transaction["transactionStatus"],
|
||||
json.dumps(transaction["rawTransaction"]),
|
||||
),
|
||||
)
|
||||
|
||||
# Insert balances
|
||||
for balance in balances:
|
||||
cursor.execute("""
|
||||
INSERT INTO balances
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO balances
|
||||
(account_id, bank, status, iban, amount, currency, type, timestamp)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
balance["account_id"], balance["bank"], balance["status"], balance["iban"],
|
||||
balance["amount"], balance["currency"], balance["type"], balance["timestamp"]
|
||||
))
|
||||
""",
|
||||
(
|
||||
balance["account_id"],
|
||||
balance["bank"],
|
||||
balance["status"],
|
||||
balance["iban"],
|
||||
balance["amount"],
|
||||
balance["currency"],
|
||||
balance["type"],
|
||||
balance["timestamp"],
|
||||
),
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
def generate_sample_database(self, num_accounts: int = 3, num_transactions_per_account: int = 50):
|
||||
def generate_sample_database(
|
||||
self, num_accounts: int = 3, num_transactions_per_account: int = 50
|
||||
):
|
||||
"""Generate complete sample database."""
|
||||
click.echo(f"🗄️ Creating sample database at: {self.db_path}")
|
||||
|
||||
|
||||
self.ensure_database_dir()
|
||||
self.create_tables()
|
||||
|
||||
|
||||
click.echo(f"👥 Generating {num_accounts} sample accounts...")
|
||||
accounts = self.generate_accounts(num_accounts)
|
||||
|
||||
click.echo(f"💳 Generating {num_transactions_per_account} transactions per account...")
|
||||
transactions = self.generate_transactions(accounts, num_transactions_per_account)
|
||||
|
||||
|
||||
click.echo(
|
||||
f"💳 Generating {num_transactions_per_account} transactions per account..."
|
||||
)
|
||||
transactions = self.generate_transactions(
|
||||
accounts, num_transactions_per_account
|
||||
)
|
||||
|
||||
click.echo("💰 Generating account balances...")
|
||||
balances = self.generate_balances(accounts)
|
||||
|
||||
|
||||
click.echo("💾 Inserting data into database...")
|
||||
self.insert_data(accounts, transactions, balances)
|
||||
|
||||
|
||||
# Print summary
|
||||
click.echo("\n✅ Sample database created successfully!")
|
||||
click.echo(f"📊 Summary:")
|
||||
click.echo("📊 Summary:")
|
||||
click.echo(f" - Accounts: {len(accounts)}")
|
||||
click.echo(f" - Transactions: {len(transactions)}")
|
||||
click.echo(f" - Balances: {len(balances)}")
|
||||
click.echo(f" - Database: {self.db_path}")
|
||||
|
||||
|
||||
# Show account details
|
||||
click.echo(f"\n📋 Sample accounts:")
|
||||
click.echo("\n📋 Sample accounts:")
|
||||
for account in accounts:
|
||||
institution_name = next(inst["name"] for inst in self.institutions if inst["id"] == account["institution_id"])
|
||||
institution_name = next(
|
||||
inst["name"]
|
||||
for inst in self.institutions
|
||||
if inst["id"] == account["institution_id"]
|
||||
)
|
||||
click.echo(f" - {account['id']} ({institution_name}) - {account['iban']}")
|
||||
|
||||
|
||||
@@ -387,40 +506,41 @@ class SampleDataGenerator:
|
||||
)
|
||||
def main(database: Path, accounts: int, transactions: int, force: bool):
|
||||
"""Generate a sample database with realistic financial data for testing Leggen."""
|
||||
|
||||
|
||||
# Determine database path
|
||||
if database:
|
||||
db_path = database
|
||||
else:
|
||||
# Use development database by default to avoid overwriting production data
|
||||
import os
|
||||
|
||||
env_path = os.environ.get("LEGGEN_DATABASE_PATH")
|
||||
if env_path:
|
||||
db_path = Path(env_path)
|
||||
else:
|
||||
# Default to development database in config directory
|
||||
db_path = path_manager.get_config_dir() / "leggen-dev.db"
|
||||
|
||||
|
||||
# Check if database exists and ask for confirmation
|
||||
if db_path.exists() and not force:
|
||||
click.echo(f"⚠️ Database already exists: {db_path}")
|
||||
if not click.confirm("Do you want to overwrite it?"):
|
||||
click.echo("Aborted.")
|
||||
return
|
||||
|
||||
|
||||
# Generate the sample database
|
||||
generator = SampleDataGenerator(db_path)
|
||||
generator.generate_sample_database(accounts, transactions)
|
||||
|
||||
|
||||
# Show usage instructions
|
||||
click.echo(f"\n🚀 Usage instructions:")
|
||||
click.echo(f"To use this sample database with leggen commands:")
|
||||
click.echo("\n🚀 Usage instructions:")
|
||||
click.echo("To use this sample database with leggen commands:")
|
||||
click.echo(f" export LEGGEN_DATABASE_PATH={db_path}")
|
||||
click.echo(f" leggen transactions")
|
||||
click.echo(f"")
|
||||
click.echo(f"To use this sample database with leggend API:")
|
||||
click.echo(f" leggend --database {db_path}")
|
||||
click.echo(" leggen transactions")
|
||||
click.echo("")
|
||||
click.echo("To use this sample database with leggen server:")
|
||||
click.echo(f" leggen server --database {db_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
main()
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
"""Pytest configuration and shared fixtures."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from leggend.main import create_app
|
||||
from leggend.config import Config
|
||||
from leggen.commands.server import create_app
|
||||
from leggen.utils.config import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -87,11 +88,11 @@ def api_client(fastapi_app):
|
||||
def mock_db_path(temp_db_path):
|
||||
"""Mock the database path to use temporary database for testing."""
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
|
||||
# Set the path manager to use the temporary database
|
||||
original_database_path = path_manager._database_path
|
||||
path_manager.set_database_path(temp_db_path)
|
||||
|
||||
|
||||
try:
|
||||
yield temp_db_path
|
||||
finally:
|
||||
|
||||
139
tests/unit/test_analytics_fix.py
Normal file
139
tests/unit/test_analytics_fix.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""Tests for analytics fixes to ensure all transactions are used in statistics."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import AsyncMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from leggen.commands.server import create_app
|
||||
from leggen.services.database_service import DatabaseService
|
||||
|
||||
|
||||
class TestAnalyticsFix:
|
||||
"""Test analytics fixes for transaction limits"""
|
||||
|
||||
@pytest.fixture
|
||||
def client(self):
|
||||
app = create_app()
|
||||
return TestClient(app)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_database_service(self):
|
||||
return Mock(spec=DatabaseService)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transaction_stats_uses_all_transactions(self, mock_database_service):
|
||||
"""Test that transaction stats endpoint uses all transactions (not limited to 100)"""
|
||||
# Mock data for 600 transactions (simulating the issue)
|
||||
mock_transactions = []
|
||||
for i in range(600):
|
||||
mock_transactions.append(
|
||||
{
|
||||
"transactionId": f"txn-{i}",
|
||||
"transactionDate": (
|
||||
datetime.now() - timedelta(days=i % 365)
|
||||
).isoformat(),
|
||||
"description": f"Transaction {i}",
|
||||
"transactionValue": 10.0 if i % 2 == 0 else -5.0,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": f"account-{i % 3}",
|
||||
}
|
||||
)
|
||||
|
||||
mock_database_service.get_transactions_from_db = AsyncMock(
|
||||
return_value=mock_transactions
|
||||
)
|
||||
|
||||
# Test that the endpoint calls get_transactions_from_db with limit=None
|
||||
with patch(
|
||||
"leggen.api.routes.transactions.database_service", mock_database_service
|
||||
):
|
||||
app = create_app()
|
||||
client = TestClient(app)
|
||||
|
||||
response = client.get("/api/v1/transactions/stats?days=365")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify that limit=None was passed to get all transactions
|
||||
mock_database_service.get_transactions_from_db.assert_called_once()
|
||||
call_args = mock_database_service.get_transactions_from_db.call_args
|
||||
assert call_args.kwargs.get("limit") is None, (
|
||||
"Stats endpoint should pass limit=None to get all transactions"
|
||||
)
|
||||
|
||||
# Verify that the response contains stats for all 600 transactions
|
||||
assert data["success"] is True
|
||||
stats = data["data"]
|
||||
assert stats["total_transactions"] == 600, (
|
||||
"Should process all 600 transactions, not just 100"
|
||||
)
|
||||
|
||||
# Verify calculations are correct for all transactions
|
||||
expected_income = sum(
|
||||
txn["transactionValue"]
|
||||
for txn in mock_transactions
|
||||
if txn["transactionValue"] > 0
|
||||
)
|
||||
expected_expenses = sum(
|
||||
abs(txn["transactionValue"])
|
||||
for txn in mock_transactions
|
||||
if txn["transactionValue"] < 0
|
||||
)
|
||||
|
||||
assert stats["total_income"] == expected_income
|
||||
assert stats["total_expenses"] == expected_expenses
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_endpoint_returns_all_transactions(
|
||||
self, mock_database_service
|
||||
):
|
||||
"""Test that the new analytics endpoint returns all transactions without pagination"""
|
||||
# Mock data for 600 transactions
|
||||
mock_transactions = []
|
||||
for i in range(600):
|
||||
mock_transactions.append(
|
||||
{
|
||||
"transactionId": f"txn-{i}",
|
||||
"transactionDate": (
|
||||
datetime.now() - timedelta(days=i % 365)
|
||||
).isoformat(),
|
||||
"description": f"Transaction {i}",
|
||||
"transactionValue": 10.0 if i % 2 == 0 else -5.0,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": f"account-{i % 3}",
|
||||
}
|
||||
)
|
||||
|
||||
mock_database_service.get_transactions_from_db = AsyncMock(
|
||||
return_value=mock_transactions
|
||||
)
|
||||
|
||||
with patch(
|
||||
"leggen.api.routes.transactions.database_service", mock_database_service
|
||||
):
|
||||
app = create_app()
|
||||
client = TestClient(app)
|
||||
|
||||
response = client.get("/api/v1/transactions/analytics?days=365")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify that limit=None was passed to get all transactions
|
||||
mock_database_service.get_transactions_from_db.assert_called_once()
|
||||
call_args = mock_database_service.get_transactions_from_db.call_args
|
||||
assert call_args.kwargs.get("limit") is None, (
|
||||
"Analytics endpoint should pass limit=None"
|
||||
)
|
||||
|
||||
# Verify that all 600 transactions are returned
|
||||
assert data["success"] is True
|
||||
transactions_data = data["data"]
|
||||
assert len(transactions_data) == 600, (
|
||||
"Analytics endpoint should return all 600 transactions"
|
||||
)
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Tests for accounts API endpoints."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.api
|
||||
class TestAccountsAPI:
|
||||
@@ -43,13 +44,13 @@ class TestAccountsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_accounts_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_accounts_from_db",
|
||||
return_value=mock_accounts,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_balances_from_db",
|
||||
return_value=mock_balances,
|
||||
),
|
||||
):
|
||||
@@ -98,13 +99,13 @@ class TestAccountsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_account_details_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_account_details_from_db",
|
||||
return_value=mock_account,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_balances_from_db",
|
||||
return_value=mock_balances,
|
||||
),
|
||||
):
|
||||
@@ -148,9 +149,9 @@ class TestAccountsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_balances_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_balances_from_db",
|
||||
return_value=mock_balances,
|
||||
),
|
||||
):
|
||||
@@ -191,13 +192,13 @@ class TestAccountsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
@@ -243,13 +244,13 @@ class TestAccountsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
@@ -273,9 +274,9 @@ class TestAccountsAPI:
|
||||
):
|
||||
"""Test handling of non-existent account."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.accounts.database_service.get_account_details_from_db",
|
||||
"leggen.api.routes.accounts.database_service.get_account_details_from_db",
|
||||
return_value=None,
|
||||
),
|
||||
):
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
"""Tests for banks API endpoints."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
import respx
|
||||
import httpx
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@pytest.mark.api
|
||||
@@ -27,7 +28,7 @@ class TestBanksAPI:
|
||||
return_value=httpx.Response(200, json=sample_bank_data)
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
with patch("leggen.utils.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/institutions?country=PT")
|
||||
|
||||
assert response.status_code == 200
|
||||
@@ -52,7 +53,7 @@ class TestBanksAPI:
|
||||
return_value=httpx.Response(200, json=[])
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
with patch("leggen.utils.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/institutions?country=XX")
|
||||
|
||||
# Should still work but return empty or filtered results
|
||||
@@ -86,7 +87,7 @@ class TestBanksAPI:
|
||||
"redirect_url": "http://localhost:8000/",
|
||||
}
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
with patch("leggen.utils.config.config", mock_config):
|
||||
response = api_client.post("/api/v1/banks/connect", json=request_data)
|
||||
|
||||
assert response.status_code == 200
|
||||
@@ -122,7 +123,7 @@ class TestBanksAPI:
|
||||
return_value=httpx.Response(200, json=requisitions_data)
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
with patch("leggen.utils.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/status")
|
||||
|
||||
assert response.status_code == 200
|
||||
@@ -155,7 +156,7 @@ class TestBanksAPI:
|
||||
return_value=httpx.Response(401, json={"detail": "Invalid credentials"})
|
||||
)
|
||||
|
||||
with patch("leggend.config.config", mock_config):
|
||||
with patch("leggen.utils.config.config", mock_config):
|
||||
response = api_client.get("/api/v1/banks/institutions")
|
||||
|
||||
assert response.status_code == 500
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
"""Tests for CLI API client."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import requests_mock
|
||||
from unittest.mock import patch
|
||||
|
||||
from leggen.api_client import LeggendAPIClient
|
||||
from leggen.api_client import LeggenAPIClient
|
||||
|
||||
|
||||
@pytest.mark.cli
|
||||
class TestLeggendAPIClient:
|
||||
class TestLeggenAPIClient:
|
||||
"""Test the CLI API client."""
|
||||
|
||||
def test_health_check_success(self):
|
||||
"""Test successful health check."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/health", json={"status": "healthy"})
|
||||
@@ -24,7 +25,7 @@ class TestLeggendAPIClient:
|
||||
|
||||
def test_health_check_failure(self):
|
||||
"""Test health check failure."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get("http://localhost:8000/health", status_code=500)
|
||||
@@ -34,7 +35,7 @@ class TestLeggendAPIClient:
|
||||
|
||||
def test_get_institutions_success(self, sample_bank_data):
|
||||
"""Test getting institutions via API client."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
@@ -51,7 +52,7 @@ class TestLeggendAPIClient:
|
||||
|
||||
def test_get_accounts_success(self, sample_account_data):
|
||||
"""Test getting accounts via API client."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
@@ -68,7 +69,7 @@ class TestLeggendAPIClient:
|
||||
|
||||
def test_trigger_sync_success(self):
|
||||
"""Test triggering sync via API client."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
@@ -84,14 +85,14 @@ class TestLeggendAPIClient:
|
||||
|
||||
def test_connection_error_handling(self):
|
||||
"""Test handling of connection errors."""
|
||||
client = LeggendAPIClient("http://localhost:9999") # Non-existent service
|
||||
client = LeggenAPIClient("http://localhost:9999") # Non-existent service
|
||||
|
||||
with pytest.raises((requests.ConnectionError, requests.RequestException)):
|
||||
client.get_accounts()
|
||||
|
||||
def test_http_error_handling(self):
|
||||
"""Test handling of HTTP errors."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
with requests_mock.Mocker() as m:
|
||||
m.get(
|
||||
@@ -106,19 +107,19 @@ class TestLeggendAPIClient:
|
||||
def test_custom_api_url(self):
|
||||
"""Test using custom API URL."""
|
||||
custom_url = "http://custom-host:9000"
|
||||
client = LeggendAPIClient(custom_url)
|
||||
client = LeggenAPIClient(custom_url)
|
||||
|
||||
assert client.base_url == custom_url
|
||||
|
||||
def test_environment_variable_url(self):
|
||||
"""Test using environment variable for API URL."""
|
||||
with patch.dict("os.environ", {"LEGGEND_API_URL": "http://env-host:7000"}):
|
||||
client = LeggendAPIClient()
|
||||
with patch.dict("os.environ", {"LEGGEN_API_URL": "http://env-host:7000"}):
|
||||
client = LeggenAPIClient()
|
||||
assert client.base_url == "http://env-host:7000"
|
||||
|
||||
def test_sync_with_options(self):
|
||||
"""Test sync with various options."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
@@ -135,7 +136,7 @@ class TestLeggendAPIClient:
|
||||
|
||||
def test_get_scheduler_config(self):
|
||||
"""Test getting scheduler configuration."""
|
||||
client = LeggendAPIClient("http://localhost:8000")
|
||||
client = LeggenAPIClient("http://localhost:8000")
|
||||
|
||||
api_response = {
|
||||
"success": True,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
"""Tests for transactions API endpoints."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
from datetime import datetime
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.api
|
||||
@@ -43,13 +44,13 @@ class TestTransactionsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=2,
|
||||
),
|
||||
):
|
||||
@@ -90,13 +91,13 @@ class TestTransactionsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
@@ -135,13 +136,13 @@ class TestTransactionsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
) as mock_get_transactions,
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=1,
|
||||
),
|
||||
):
|
||||
@@ -178,13 +179,13 @@ class TestTransactionsAPI:
|
||||
):
|
||||
"""Test getting transactions when database returns empty result."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=[],
|
||||
),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transaction_count_from_db",
|
||||
return_value=0,
|
||||
),
|
||||
):
|
||||
@@ -203,9 +204,9 @@ class TestTransactionsAPI:
|
||||
):
|
||||
"""Test handling database error when getting transactions."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
side_effect=Exception("Database connection failed"),
|
||||
),
|
||||
):
|
||||
@@ -243,9 +244,9 @@ class TestTransactionsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
),
|
||||
):
|
||||
@@ -284,9 +285,9 @@ class TestTransactionsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
) as mock_get_transactions,
|
||||
):
|
||||
@@ -306,9 +307,9 @@ class TestTransactionsAPI:
|
||||
):
|
||||
"""Test getting stats when no transactions match criteria."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=[],
|
||||
),
|
||||
):
|
||||
@@ -331,9 +332,9 @@ class TestTransactionsAPI:
|
||||
):
|
||||
"""Test handling database error when getting stats."""
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
side_effect=Exception("Database connection failed"),
|
||||
),
|
||||
):
|
||||
@@ -357,9 +358,9 @@ class TestTransactionsAPI:
|
||||
]
|
||||
|
||||
with (
|
||||
patch("leggend.config.config", mock_config),
|
||||
patch("leggen.utils.config.config", mock_config),
|
||||
patch(
|
||||
"leggend.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
"leggen.api.routes.transactions.database_service.get_transactions_from_db",
|
||||
return_value=mock_transactions,
|
||||
) as mock_get_transactions,
|
||||
):
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
"""Tests for configuration management."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
from leggend.config import Config
|
||||
import pytest
|
||||
|
||||
from leggen.utils.config import Config
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@@ -37,10 +38,14 @@ class TestConfig:
|
||||
# Reset singleton state for testing
|
||||
config._config = None
|
||||
config._config_path = None
|
||||
config._config_model = None
|
||||
|
||||
result = config.load_config(str(config_file))
|
||||
|
||||
assert result == config_data
|
||||
# Result should contain validated config data
|
||||
assert result["gocardless"]["key"] == "test-key"
|
||||
assert result["gocardless"]["secret"] == "test-secret"
|
||||
assert result["database"]["sqlite"] is True
|
||||
assert config.gocardless_config["key"] == "test-key"
|
||||
assert config.database_config["sqlite"] is True
|
||||
|
||||
@@ -54,11 +59,19 @@ class TestConfig:
|
||||
|
||||
def test_save_config_success(self, temp_config_dir):
|
||||
"""Test successful configuration saving."""
|
||||
config_data = {"gocardless": {"key": "new-key", "secret": "new-secret"}}
|
||||
config_data = {
|
||||
"gocardless": {
|
||||
"key": "new-key",
|
||||
"secret": "new-secret",
|
||||
"url": "https://bankaccountdata.gocardless.com/api/v2",
|
||||
},
|
||||
"database": {"sqlite": True},
|
||||
}
|
||||
|
||||
config_file = temp_config_dir / "new_config.toml"
|
||||
config = Config()
|
||||
config._config = None
|
||||
config._config_model = None
|
||||
|
||||
config.save_config(config_data, str(config_file))
|
||||
|
||||
@@ -70,12 +83,18 @@ class TestConfig:
|
||||
with open(config_file, "rb") as f:
|
||||
saved_data = tomllib.load(f)
|
||||
|
||||
assert saved_data == config_data
|
||||
assert saved_data["gocardless"]["key"] == "new-key"
|
||||
assert saved_data["gocardless"]["secret"] == "new-secret"
|
||||
assert saved_data["database"]["sqlite"] is True
|
||||
|
||||
def test_update_config_success(self, temp_config_dir):
|
||||
"""Test updating configuration values."""
|
||||
initial_config = {
|
||||
"gocardless": {"key": "old-key"},
|
||||
"gocardless": {
|
||||
"key": "old-key",
|
||||
"secret": "old-secret",
|
||||
"url": "https://bankaccountdata.gocardless.com/api/v2",
|
||||
},
|
||||
"database": {"sqlite": True},
|
||||
}
|
||||
|
||||
@@ -87,6 +106,7 @@ class TestConfig:
|
||||
|
||||
config = Config()
|
||||
config._config = None
|
||||
config._config_model = None
|
||||
config.load_config(str(config_file))
|
||||
|
||||
config.update_config("gocardless", "key", "new-key")
|
||||
@@ -102,7 +122,14 @@ class TestConfig:
|
||||
|
||||
def test_update_section_success(self, temp_config_dir):
|
||||
"""Test updating entire configuration section."""
|
||||
initial_config = {"database": {"sqlite": True}}
|
||||
initial_config = {
|
||||
"gocardless": {
|
||||
"key": "test-key",
|
||||
"secret": "test-secret",
|
||||
"url": "https://bankaccountdata.gocardless.com/api/v2",
|
||||
},
|
||||
"database": {"sqlite": True},
|
||||
}
|
||||
|
||||
config_file = temp_config_dir / "config.toml"
|
||||
with open(config_file, "wb") as f:
|
||||
@@ -112,12 +139,13 @@ class TestConfig:
|
||||
|
||||
config = Config()
|
||||
config._config = None
|
||||
config._config_model = None
|
||||
config.load_config(str(config_file))
|
||||
|
||||
new_db_config = {"sqlite": False, "path": "./custom.db"}
|
||||
new_db_config = {"sqlite": False}
|
||||
config.update_section("database", new_db_config)
|
||||
|
||||
assert config.database_config == new_db_config
|
||||
assert config.database_config["sqlite"] is False
|
||||
|
||||
def test_scheduler_config_defaults(self):
|
||||
"""Test scheduler configuration with defaults."""
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
"""Integration tests for configurable paths."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from leggen.services.database_service import DatabaseService
|
||||
from leggen.utils.paths import path_manager
|
||||
from leggen.database.sqlite import persist_balances, get_balances
|
||||
|
||||
|
||||
class MockContext:
|
||||
"""Mock context for testing."""
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@@ -24,15 +20,15 @@ class TestConfigurablePaths:
|
||||
# Reset path manager
|
||||
original_config = path_manager._config_dir
|
||||
original_db = path_manager._database_path
|
||||
|
||||
|
||||
try:
|
||||
path_manager._config_dir = None
|
||||
path_manager._database_path = None
|
||||
|
||||
|
||||
# Test defaults
|
||||
config_dir = path_manager.get_config_dir()
|
||||
db_path = path_manager.get_database_path()
|
||||
|
||||
|
||||
assert config_dir == Path.home() / ".config" / "leggen"
|
||||
assert db_path == Path.home() / ".config" / "leggen" / "leggen.db"
|
||||
finally:
|
||||
@@ -44,22 +40,25 @@ class TestConfigurablePaths:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
test_config_dir = Path(tmpdir) / "test-config"
|
||||
test_db_path = Path(tmpdir) / "test.db"
|
||||
|
||||
with patch.dict(os.environ, {
|
||||
'LEGGEN_CONFIG_DIR': str(test_config_dir),
|
||||
'LEGGEN_DATABASE_PATH': str(test_db_path)
|
||||
}):
|
||||
|
||||
with patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
"LEGGEN_CONFIG_DIR": str(test_config_dir),
|
||||
"LEGGEN_DATABASE_PATH": str(test_db_path),
|
||||
},
|
||||
):
|
||||
# Reset path manager to pick up environment variables
|
||||
original_config = path_manager._config_dir
|
||||
original_db = path_manager._database_path
|
||||
|
||||
|
||||
try:
|
||||
path_manager._config_dir = None
|
||||
path_manager._database_path = None
|
||||
|
||||
|
||||
config_dir = path_manager.get_config_dir()
|
||||
db_path = path_manager.get_database_path()
|
||||
|
||||
|
||||
assert config_dir == test_config_dir
|
||||
assert db_path == test_db_path
|
||||
finally:
|
||||
@@ -71,20 +70,25 @@ class TestConfigurablePaths:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
test_config_dir = Path(tmpdir) / "explicit-config"
|
||||
test_db_path = Path(tmpdir) / "explicit.db"
|
||||
|
||||
|
||||
# Save original paths
|
||||
original_config = path_manager._config_dir
|
||||
original_db = path_manager._database_path
|
||||
|
||||
|
||||
try:
|
||||
# Set explicit paths
|
||||
path_manager.set_config_dir(test_config_dir)
|
||||
path_manager.set_database_path(test_db_path)
|
||||
|
||||
|
||||
assert path_manager.get_config_dir() == test_config_dir
|
||||
assert path_manager.get_database_path() == test_db_path
|
||||
assert path_manager.get_config_file_path() == test_config_dir / "config.toml"
|
||||
assert path_manager.get_auth_file_path() == test_config_dir / "auth.json"
|
||||
assert (
|
||||
path_manager.get_config_file_path()
|
||||
== test_config_dir / "config.toml"
|
||||
)
|
||||
assert (
|
||||
path_manager.get_auth_file_path() == test_config_dir / "auth.json"
|
||||
)
|
||||
finally:
|
||||
# Restore original paths
|
||||
path_manager._config_dir = original_config
|
||||
@@ -94,40 +98,47 @@ class TestConfigurablePaths:
|
||||
"""Test that database operations work with custom paths."""
|
||||
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp_file:
|
||||
test_db_path = Path(tmp_file.name)
|
||||
|
||||
|
||||
# Save original database path
|
||||
original_db = path_manager._database_path
|
||||
|
||||
|
||||
try:
|
||||
# Set custom database path
|
||||
path_manager.set_database_path(test_db_path)
|
||||
|
||||
# Test database operations
|
||||
ctx = MockContext()
|
||||
balance = {
|
||||
"account_id": "test-account",
|
||||
"bank": "TEST_BANK",
|
||||
"status": "active",
|
||||
|
||||
# Test database operations using DatabaseService
|
||||
database_service = DatabaseService()
|
||||
balance_data = {
|
||||
"balances": [
|
||||
{
|
||||
"balanceAmount": {"amount": "1000.0", "currency": "EUR"},
|
||||
"balanceType": "available",
|
||||
}
|
||||
],
|
||||
"institution_id": "TEST_BANK",
|
||||
"account_status": "active",
|
||||
"iban": "TEST_IBAN",
|
||||
"amount": 1000.0,
|
||||
"currency": "EUR",
|
||||
"type": "available",
|
||||
"timestamp": "2023-01-01T00:00:00",
|
||||
}
|
||||
|
||||
# Persist balance
|
||||
persist_balances(ctx, balance)
|
||||
|
||||
|
||||
# Use the internal balance persistence method since the test needs direct database access
|
||||
import asyncio
|
||||
|
||||
asyncio.run(
|
||||
database_service._persist_balance_sqlite("test-account", balance_data)
|
||||
)
|
||||
|
||||
# Retrieve balances
|
||||
balances = get_balances()
|
||||
|
||||
balances = asyncio.run(
|
||||
database_service.get_balances_from_db("test-account")
|
||||
)
|
||||
|
||||
assert len(balances) == 1
|
||||
assert balances[0]["account_id"] == "test-account"
|
||||
assert balances[0]["amount"] == 1000.0
|
||||
|
||||
|
||||
# Verify database file exists at custom location
|
||||
assert test_db_path.exists()
|
||||
|
||||
|
||||
finally:
|
||||
# Restore original path and cleanup
|
||||
path_manager._database_path = original_db
|
||||
@@ -139,24 +150,24 @@ class TestConfigurablePaths:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
test_config_dir = Path(tmpdir) / "new" / "config" / "dir"
|
||||
test_db_path = Path(tmpdir) / "new" / "db" / "dir" / "test.db"
|
||||
|
||||
|
||||
# Save original paths
|
||||
original_config = path_manager._config_dir
|
||||
original_db = path_manager._database_path
|
||||
|
||||
|
||||
try:
|
||||
# Set paths to non-existent directories
|
||||
path_manager.set_config_dir(test_config_dir)
|
||||
path_manager.set_database_path(test_db_path)
|
||||
|
||||
|
||||
# Ensure directories are created
|
||||
path_manager.ensure_config_dir_exists()
|
||||
path_manager.ensure_database_dir_exists()
|
||||
|
||||
|
||||
assert test_config_dir.exists()
|
||||
assert test_db_path.parent.exists()
|
||||
|
||||
|
||||
finally:
|
||||
# Restore original paths
|
||||
path_manager._config_dir = original_config
|
||||
path_manager._database_path = original_db
|
||||
path_manager._database_path = original_db
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""Tests for database service."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
from datetime import datetime
|
||||
from unittest.mock import patch
|
||||
|
||||
from leggend.services.database_service import DatabaseService
|
||||
import pytest
|
||||
|
||||
from leggen.services.database_service import DatabaseService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -83,7 +84,9 @@ class TestDatabaseService:
|
||||
self, database_service, sample_transactions_db_format
|
||||
):
|
||||
"""Test successful retrieval of transactions from database."""
|
||||
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||
with patch.object(
|
||||
database_service, "_get_transactions"
|
||||
) as mock_get_transactions:
|
||||
mock_get_transactions.return_value = sample_transactions_db_format
|
||||
|
||||
result = await database_service.get_transactions_from_db(
|
||||
@@ -107,7 +110,9 @@ class TestDatabaseService:
|
||||
self, database_service, sample_transactions_db_format
|
||||
):
|
||||
"""Test retrieving transactions with filters."""
|
||||
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||
with patch.object(
|
||||
database_service, "_get_transactions"
|
||||
) as mock_get_transactions:
|
||||
mock_get_transactions.return_value = sample_transactions_db_format
|
||||
|
||||
result = await database_service.get_transactions_from_db(
|
||||
@@ -143,7 +148,9 @@ class TestDatabaseService:
|
||||
|
||||
async def test_get_transactions_from_db_error(self, database_service):
|
||||
"""Test handling error when getting transactions."""
|
||||
with patch("leggen.database.sqlite.get_transactions") as mock_get_transactions:
|
||||
with patch.object(
|
||||
database_service, "_get_transactions"
|
||||
) as mock_get_transactions:
|
||||
mock_get_transactions.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_transactions_from_db()
|
||||
@@ -152,7 +159,7 @@ class TestDatabaseService:
|
||||
|
||||
async def test_get_transaction_count_from_db_success(self, database_service):
|
||||
"""Test successful retrieval of transaction count."""
|
||||
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||
with patch.object(database_service, "_get_transaction_count") as mock_get_count:
|
||||
mock_get_count.return_value = 42
|
||||
|
||||
result = await database_service.get_transaction_count_from_db(
|
||||
@@ -164,7 +171,7 @@ class TestDatabaseService:
|
||||
|
||||
async def test_get_transaction_count_from_db_with_filters(self, database_service):
|
||||
"""Test getting transaction count with filters."""
|
||||
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||
with patch.object(database_service, "_get_transaction_count") as mock_get_count:
|
||||
mock_get_count.return_value = 15
|
||||
|
||||
result = await database_service.get_transaction_count_from_db(
|
||||
@@ -194,7 +201,7 @@ class TestDatabaseService:
|
||||
|
||||
async def test_get_transaction_count_from_db_error(self, database_service):
|
||||
"""Test handling error when getting count."""
|
||||
with patch("leggen.database.sqlite.get_transaction_count") as mock_get_count:
|
||||
with patch.object(database_service, "_get_transaction_count") as mock_get_count:
|
||||
mock_get_count.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_transaction_count_from_db()
|
||||
@@ -205,7 +212,7 @@ class TestDatabaseService:
|
||||
self, database_service, sample_balances_db_format
|
||||
):
|
||||
"""Test successful retrieval of balances from database."""
|
||||
with patch("leggen.database.sqlite.get_balances") as mock_get_balances:
|
||||
with patch.object(database_service, "_get_balances") as mock_get_balances:
|
||||
mock_get_balances.return_value = sample_balances_db_format
|
||||
|
||||
result = await database_service.get_balances_from_db(
|
||||
@@ -227,7 +234,7 @@ class TestDatabaseService:
|
||||
|
||||
async def test_get_balances_from_db_error(self, database_service):
|
||||
"""Test handling error when getting balances."""
|
||||
with patch("leggen.database.sqlite.get_balances") as mock_get_balances:
|
||||
with patch.object(database_service, "_get_balances") as mock_get_balances:
|
||||
mock_get_balances.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_balances_from_db()
|
||||
@@ -242,7 +249,7 @@ class TestDatabaseService:
|
||||
"iban": "LT313250081177977789",
|
||||
}
|
||||
|
||||
with patch("leggen.database.sqlite.get_account_summary") as mock_get_summary:
|
||||
with patch.object(database_service, "_get_account_summary") as mock_get_summary:
|
||||
mock_get_summary.return_value = mock_summary
|
||||
|
||||
result = await database_service.get_account_summary_from_db(
|
||||
@@ -262,7 +269,7 @@ class TestDatabaseService:
|
||||
|
||||
async def test_get_account_summary_from_db_error(self, database_service):
|
||||
"""Test handling error when getting summary."""
|
||||
with patch("leggen.database.sqlite.get_account_summary") as mock_get_summary:
|
||||
with patch.object(database_service, "_get_account_summary") as mock_get_summary:
|
||||
mock_get_summary.side_effect = Exception("Database error")
|
||||
|
||||
result = await database_service.get_account_summary_from_db(
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""Tests for background scheduler."""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, AsyncMock, MagicMock
|
||||
from datetime import datetime
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from leggend.background.scheduler import BackgroundScheduler
|
||||
import pytest
|
||||
|
||||
from leggen.background.scheduler import BackgroundScheduler
|
||||
|
||||
|
||||
@pytest.mark.unit
|
||||
@@ -20,8 +21,8 @@ class TestBackgroundScheduler:
|
||||
def scheduler(self):
|
||||
"""Create scheduler instance for testing."""
|
||||
with (
|
||||
patch("leggend.background.scheduler.SyncService"),
|
||||
patch("leggend.background.scheduler.config") as mock_config,
|
||||
patch("leggen.background.scheduler.SyncService"),
|
||||
patch("leggen.background.scheduler.config") as mock_config,
|
||||
):
|
||||
mock_config.scheduler_config = {
|
||||
"sync": {"enabled": True, "hour": 3, "minute": 0}
|
||||
@@ -37,7 +38,7 @@ class TestBackgroundScheduler:
|
||||
|
||||
def test_scheduler_start_default_config(self, scheduler, mock_config):
|
||||
"""Test starting scheduler with default configuration."""
|
||||
with patch("leggend.config.config") as mock_config_obj:
|
||||
with patch("leggen.utils.config.config") as mock_config_obj:
|
||||
mock_config_obj.scheduler_config = mock_config
|
||||
|
||||
# Mock the job that gets added
|
||||
@@ -58,7 +59,7 @@ class TestBackgroundScheduler:
|
||||
|
||||
with (
|
||||
patch.object(scheduler, "scheduler") as mock_scheduler,
|
||||
patch("leggend.background.scheduler.config") as mock_config_obj,
|
||||
patch("leggen.background.scheduler.config") as mock_config_obj,
|
||||
):
|
||||
mock_config_obj.scheduler_config = disabled_config
|
||||
mock_scheduler.running = False
|
||||
@@ -79,7 +80,7 @@ class TestBackgroundScheduler:
|
||||
}
|
||||
}
|
||||
|
||||
with patch("leggend.config.config") as mock_config_obj:
|
||||
with patch("leggen.utils.config.config") as mock_config_obj:
|
||||
mock_config_obj.scheduler_config = cron_config
|
||||
|
||||
scheduler.start()
|
||||
@@ -97,7 +98,7 @@ class TestBackgroundScheduler:
|
||||
|
||||
with (
|
||||
patch.object(scheduler, "scheduler") as mock_scheduler,
|
||||
patch("leggend.background.scheduler.config") as mock_config_obj,
|
||||
patch("leggen.background.scheduler.config") as mock_config_obj,
|
||||
):
|
||||
mock_config_obj.scheduler_config = invalid_cron_config
|
||||
mock_scheduler.running = False
|
||||
@@ -187,7 +188,7 @@ class TestBackgroundScheduler:
|
||||
|
||||
def test_scheduler_job_max_instances(self, scheduler, mock_config):
|
||||
"""Test that sync jobs have max_instances=1."""
|
||||
with patch("leggend.config.config") as mock_config_obj:
|
||||
with patch("leggen.utils.config.config") as mock_config_obj:
|
||||
mock_config_obj.scheduler_config = mock_config
|
||||
scheduler.start()
|
||||
|
||||
|
||||
@@ -1,364 +0,0 @@
|
||||
"""Tests for SQLite database functions."""
|
||||
|
||||
import pytest
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
from datetime import datetime
|
||||
|
||||
import leggen.database.sqlite as sqlite_db
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db_path():
|
||||
"""Create a temporary database file for testing."""
|
||||
import uuid
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
db_path = Path(tmpdir) / f"test_{uuid.uuid4().hex}.db"
|
||||
yield db_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_home_db_path(temp_db_path):
|
||||
"""Mock the database path to use temp file."""
|
||||
from leggen.utils.paths import path_manager
|
||||
|
||||
# Set the path manager to use the temporary database
|
||||
original_database_path = path_manager._database_path
|
||||
path_manager.set_database_path(temp_db_path)
|
||||
|
||||
try:
|
||||
yield temp_db_path
|
||||
finally:
|
||||
# Restore original path
|
||||
path_manager._database_path = original_database_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_transactions():
|
||||
"""Sample transaction data for testing."""
|
||||
return [
|
||||
{
|
||||
"transactionId": "bank-txn-001", # NEW: stable bank-provided ID
|
||||
"internalTransactionId": "txn-001",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 1, 9, 30),
|
||||
"description": "Coffee Shop Payment",
|
||||
"transactionValue": -10.50,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"transactionId": "bank-txn-001", "some": "data"},
|
||||
},
|
||||
{
|
||||
"transactionId": "bank-txn-002", # NEW: stable bank-provided ID
|
||||
"internalTransactionId": "txn-002",
|
||||
"institutionId": "REVOLUT_REVOLT21",
|
||||
"iban": "LT313250081177977789",
|
||||
"transactionDate": datetime(2025, 9, 2, 14, 15),
|
||||
"description": "Grocery Store",
|
||||
"transactionValue": -45.30,
|
||||
"transactionCurrency": "EUR",
|
||||
"transactionStatus": "booked",
|
||||
"accountId": "test-account-123",
|
||||
"rawTransaction": {"transactionId": "bank-txn-002", "other": "data"},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_balance():
|
||||
"""Sample balance data for testing."""
|
||||
return {
|
||||
"account_id": "test-account-123",
|
||||
"bank": "REVOLUT_REVOLT21",
|
||||
"status": "active",
|
||||
"iban": "LT313250081177977789",
|
||||
"amount": 1000.00,
|
||||
"currency": "EUR",
|
||||
"type": "interimAvailable",
|
||||
"timestamp": datetime.now(),
|
||||
}
|
||||
|
||||
|
||||
class MockContext:
|
||||
"""Mock context for testing."""
|
||||
|
||||
|
||||
class TestSQLiteDatabase:
|
||||
"""Test SQLite database operations."""
|
||||
|
||||
def test_persist_transactions(self, mock_home_db_path, sample_transactions):
|
||||
"""Test persisting transactions to database."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Persist transactions
|
||||
new_transactions = sqlite_db.persist_transactions(
|
||||
ctx, "test-account-123", sample_transactions
|
||||
)
|
||||
|
||||
# Should return all transactions as new
|
||||
assert len(new_transactions) == 2
|
||||
assert new_transactions[0]["internalTransactionId"] == "txn-001"
|
||||
|
||||
def test_persist_transactions_duplicates(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test handling duplicate transactions."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Insert transactions twice
|
||||
new_transactions_1 = sqlite_db.persist_transactions(
|
||||
ctx, "test-account-123", sample_transactions
|
||||
)
|
||||
new_transactions_2 = sqlite_db.persist_transactions(
|
||||
ctx, "test-account-123", sample_transactions
|
||||
)
|
||||
|
||||
# First time should return all as new
|
||||
assert len(new_transactions_1) == 2
|
||||
# Second time should also return all (INSERT OR REPLACE behavior with composite key)
|
||||
assert len(new_transactions_2) == 2
|
||||
|
||||
def test_get_transactions_all(self, mock_home_db_path, sample_transactions):
|
||||
"""Test retrieving all transactions."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Insert test data
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get all transactions
|
||||
transactions = sqlite_db.get_transactions()
|
||||
|
||||
assert len(transactions) == 2
|
||||
assert (
|
||||
transactions[0]["internalTransactionId"] == "txn-002"
|
||||
) # Ordered by date DESC
|
||||
assert transactions[1]["internalTransactionId"] == "txn-001"
|
||||
|
||||
def test_get_transactions_filtered_by_account(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test filtering transactions by account ID."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Add transaction for different account
|
||||
other_account_transaction = sample_transactions[0].copy()
|
||||
other_account_transaction["internalTransactionId"] = "txn-003"
|
||||
other_account_transaction["accountId"] = "other-account"
|
||||
|
||||
all_transactions = sample_transactions + [other_account_transaction]
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", all_transactions)
|
||||
|
||||
# Filter by account
|
||||
transactions = sqlite_db.get_transactions(account_id="test-account-123")
|
||||
|
||||
assert len(transactions) == 2
|
||||
for txn in transactions:
|
||||
assert txn["accountId"] == "test-account-123"
|
||||
|
||||
def test_get_transactions_with_pagination(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test transaction pagination."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get first page
|
||||
transactions_page1 = sqlite_db.get_transactions(limit=1, offset=0)
|
||||
assert len(transactions_page1) == 1
|
||||
|
||||
# Get second page
|
||||
transactions_page2 = sqlite_db.get_transactions(limit=1, offset=1)
|
||||
assert len(transactions_page2) == 1
|
||||
|
||||
# Should be different transactions
|
||||
assert (
|
||||
transactions_page1[0]["internalTransactionId"]
|
||||
!= transactions_page2[0]["internalTransactionId"]
|
||||
)
|
||||
|
||||
def test_get_transactions_with_amount_filter(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test filtering transactions by amount."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Filter by minimum amount (should exclude coffee shop payment)
|
||||
transactions = sqlite_db.get_transactions(min_amount=-20.0)
|
||||
assert len(transactions) == 1
|
||||
assert transactions[0]["transactionValue"] == -10.50
|
||||
|
||||
def test_get_transactions_with_search(self, mock_home_db_path, sample_transactions):
|
||||
"""Test searching transactions by description."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Search for "Coffee"
|
||||
transactions = sqlite_db.get_transactions(search="Coffee")
|
||||
assert len(transactions) == 1
|
||||
assert "Coffee" in transactions[0]["description"]
|
||||
|
||||
def test_get_transactions_empty_database(self, mock_home_db_path):
|
||||
"""Test getting transactions from empty database."""
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
transactions = sqlite_db.get_transactions()
|
||||
assert transactions == []
|
||||
|
||||
def test_get_transactions_nonexistent_database(self):
|
||||
"""Test getting transactions when database doesn't exist."""
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = Path("/nonexistent")
|
||||
|
||||
transactions = sqlite_db.get_transactions()
|
||||
assert transactions == []
|
||||
|
||||
def test_persist_balances(self, mock_home_db_path, sample_balance):
|
||||
"""Test persisting balance data."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
result = sqlite_db.persist_balances(ctx, sample_balance)
|
||||
|
||||
# Should return the balance data
|
||||
assert result["account_id"] == "test-account-123"
|
||||
|
||||
def test_get_balances(self, mock_home_db_path, sample_balance):
|
||||
"""Test retrieving balances."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Insert test balance
|
||||
sqlite_db.persist_balances(ctx, sample_balance)
|
||||
|
||||
# Get balances
|
||||
balances = sqlite_db.get_balances()
|
||||
|
||||
assert len(balances) == 1
|
||||
assert balances[0]["account_id"] == "test-account-123"
|
||||
assert balances[0]["amount"] == 1000.00
|
||||
|
||||
def test_get_balances_filtered_by_account(self, mock_home_db_path, sample_balance):
|
||||
"""Test filtering balances by account ID."""
|
||||
ctx = MockContext()
|
||||
|
||||
# Create balance for different account
|
||||
other_balance = sample_balance.copy()
|
||||
other_balance["account_id"] = "other-account"
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_balances(ctx, sample_balance)
|
||||
sqlite_db.persist_balances(ctx, other_balance)
|
||||
|
||||
# Filter by account
|
||||
balances = sqlite_db.get_balances(account_id="test-account-123")
|
||||
|
||||
assert len(balances) == 1
|
||||
assert balances[0]["account_id"] == "test-account-123"
|
||||
|
||||
def test_get_account_summary(self, mock_home_db_path, sample_transactions):
|
||||
"""Test getting account summary from transactions."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
summary = sqlite_db.get_account_summary("test-account-123")
|
||||
|
||||
assert summary is not None
|
||||
assert summary["accountId"] == "test-account-123"
|
||||
assert summary["institutionId"] == "REVOLUT_REVOLT21"
|
||||
assert summary["iban"] == "LT313250081177977789"
|
||||
|
||||
def test_get_account_summary_nonexistent(self, mock_home_db_path):
|
||||
"""Test getting summary for nonexistent account."""
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
summary = sqlite_db.get_account_summary("nonexistent")
|
||||
assert summary is None
|
||||
|
||||
def test_get_transaction_count(self, mock_home_db_path, sample_transactions):
|
||||
"""Test getting transaction count."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get total count
|
||||
count = sqlite_db.get_transaction_count()
|
||||
assert count == 2
|
||||
|
||||
# Get count for specific account
|
||||
count_filtered = sqlite_db.get_transaction_count(
|
||||
account_id="test-account-123"
|
||||
)
|
||||
assert count_filtered == 2
|
||||
|
||||
# Get count for nonexistent account
|
||||
count_none = sqlite_db.get_transaction_count(account_id="nonexistent")
|
||||
assert count_none == 0
|
||||
|
||||
def test_get_transaction_count_with_filters(
|
||||
self, mock_home_db_path, sample_transactions
|
||||
):
|
||||
"""Test getting transaction count with filters."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Filter by search
|
||||
count = sqlite_db.get_transaction_count(search="Coffee")
|
||||
assert count == 1
|
||||
|
||||
# Filter by amount
|
||||
count = sqlite_db.get_transaction_count(min_amount=-20.0)
|
||||
assert count == 1
|
||||
|
||||
def test_database_indexes_created(self, mock_home_db_path, sample_transactions):
|
||||
"""Test that database indexes are created properly."""
|
||||
ctx = MockContext()
|
||||
|
||||
with patch("pathlib.Path.home") as mock_home:
|
||||
mock_home.return_value = mock_home_db_path.parent / ".."
|
||||
|
||||
# Persist transactions to create tables and indexes
|
||||
sqlite_db.persist_transactions(ctx, "test-account-123", sample_transactions)
|
||||
|
||||
# Get transactions to ensure we can query the table (indexes working)
|
||||
transactions = sqlite_db.get_transactions(account_id="test-account-123")
|
||||
assert len(transactions) == 2
|
||||
2
uv.lock
generated
2
uv.lock
generated
@@ -229,6 +229,7 @@ dependencies = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "loguru" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "requests" },
|
||||
{ name = "tabulate" },
|
||||
{ name = "tomli-w" },
|
||||
@@ -257,6 +258,7 @@ requires-dist = [
|
||||
{ name = "fastapi", specifier = ">=0.104.0,<1" },
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "loguru", specifier = ">=0.7.2,<0.8" },
|
||||
{ name = "pydantic", specifier = ">=2.0.0,<3" },
|
||||
{ name = "requests", specifier = ">=2.31.0,<3" },
|
||||
{ name = "tabulate", specifier = ">=0.9.0,<0.10" },
|
||||
{ name = "tomli-w", specifier = ">=1.0.0,<2" },
|
||||
|
||||
Reference in New Issue
Block a user