mirror of
https://github.com/elisiariocouto/leggen.git
synced 2025-12-29 18:09:01 +00:00
refactor: Migrate database service to SQLModel and Alembic.
- Add SQLModel for type-safe database models - Implement Alembic for schema migration management - Create 7 migrations covering all existing schema changes - Add automatic migration system that runs on startup - Maintain backward compatibility with existing raw SQL queries - Remove old manual migration system - All tests pass (109 tests) Benefits: - Full type safety with Pydantic validation - Version-controlled schema changes - Automatic migration detection and application - Better developer experience with typed models
This commit is contained in:
148
alembic.ini
Normal file
148
alembic.ini
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts.
|
||||||
|
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||||
|
# format, relative to the token %(here)s which refers to the location of this
|
||||||
|
# ini file
|
||||||
|
script_location = %(here)s/alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
|
# is defined by "path_separator" below.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to <script_location>/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "path_separator"
|
||||||
|
# below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||||
|
|
||||||
|
# path_separator; This indicates what character is used to split lists of file
|
||||||
|
# paths, including version_locations and prepend_sys_path within configparser
|
||||||
|
# files such as alembic.ini.
|
||||||
|
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||||
|
# to provide os-dependent path splitting.
|
||||||
|
#
|
||||||
|
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||||
|
# take place if path_separator is not present in alembic.ini. If this
|
||||||
|
# option is omitted entirely, fallback logic is as follows:
|
||||||
|
#
|
||||||
|
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||||
|
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||||
|
# behavior of splitting on spaces and/or commas.
|
||||||
|
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||||
|
# behavior of splitting on spaces, commas, or colons.
|
||||||
|
#
|
||||||
|
# Valid values for path_separator are:
|
||||||
|
#
|
||||||
|
# path_separator = :
|
||||||
|
# path_separator = ;
|
||||||
|
# path_separator = space
|
||||||
|
# path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
# database URL. This is consumed by the user-maintained env.py script only.
|
||||||
|
# other means of configuring database URLs may be customized within the env.py
|
||||||
|
# file.
|
||||||
|
# Note: The actual URL is configured programmatically in env.py
|
||||||
|
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = module
|
||||||
|
# ruff.module = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = ruff
|
||||||
|
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration. This is also consumed by the user-maintained
|
||||||
|
# env.py script only.
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
78
alembic/env.py
Normal file
78
alembic/env.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from leggen.models.database import SQLModel
|
||||||
|
from leggen.services.database import get_database_url
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Set the database URL from our configuration
|
||||||
|
config.set_main_option("sqlalchemy.url", get_database_url())
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
target_metadata = SQLModel.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
28
alembic/script.py.mako
Normal file
28
alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
102
alembic/versions/1ba02efe481c_migrate_to_composite_key.py
Normal file
102
alembic/versions/1ba02efe481c_migrate_to_composite_key.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
"""migrate_to_composite_key
|
||||||
|
|
||||||
|
Migrate transactions table to use composite primary key (accountId, transactionId).
|
||||||
|
|
||||||
|
Revision ID: 1ba02efe481c
|
||||||
|
Revises: bf30246cb723
|
||||||
|
Create Date: 2025-09-30 23:16:34.637762
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "1ba02efe481c"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = "bf30246cb723"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Migrate to composite primary key."""
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Check if migration is needed
|
||||||
|
result = conn.execute(
|
||||||
|
text("""
|
||||||
|
SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table' AND name='transactions'
|
||||||
|
""")
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result.fetchone():
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create temporary table with new schema
|
||||||
|
op.execute("""
|
||||||
|
CREATE TABLE transactions_temp (
|
||||||
|
accountId TEXT NOT NULL,
|
||||||
|
transactionId TEXT NOT NULL,
|
||||||
|
internalTransactionId TEXT,
|
||||||
|
institutionId TEXT NOT NULL,
|
||||||
|
iban TEXT,
|
||||||
|
transactionDate DATETIME,
|
||||||
|
description TEXT,
|
||||||
|
transactionValue REAL,
|
||||||
|
transactionCurrency TEXT,
|
||||||
|
transactionStatus TEXT,
|
||||||
|
rawTransaction JSON NOT NULL,
|
||||||
|
PRIMARY KEY (accountId, transactionId)
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Insert deduplicated data (keep most recent duplicate)
|
||||||
|
op.execute("""
|
||||||
|
INSERT INTO transactions_temp
|
||||||
|
SELECT
|
||||||
|
accountId,
|
||||||
|
json_extract(rawTransaction, '$.transactionId') as transactionId,
|
||||||
|
internalTransactionId,
|
||||||
|
institutionId,
|
||||||
|
iban,
|
||||||
|
transactionDate,
|
||||||
|
description,
|
||||||
|
transactionValue,
|
||||||
|
transactionCurrency,
|
||||||
|
transactionStatus,
|
||||||
|
rawTransaction
|
||||||
|
FROM (
|
||||||
|
SELECT *,
|
||||||
|
ROW_NUMBER() OVER (
|
||||||
|
PARTITION BY accountId, json_extract(rawTransaction, '$.transactionId')
|
||||||
|
ORDER BY transactionDate DESC, rowid DESC
|
||||||
|
) as rn
|
||||||
|
FROM transactions
|
||||||
|
WHERE json_extract(rawTransaction, '$.transactionId') IS NOT NULL
|
||||||
|
AND accountId IS NOT NULL
|
||||||
|
) WHERE rn = 1
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Replace tables
|
||||||
|
op.execute("DROP TABLE transactions")
|
||||||
|
op.execute("ALTER TABLE transactions_temp RENAME TO transactions")
|
||||||
|
|
||||||
|
# Recreate indexes
|
||||||
|
op.create_index(
|
||||||
|
"idx_transactions_internal_id", "transactions", ["internalTransactionId"]
|
||||||
|
)
|
||||||
|
op.create_index("idx_transactions_date", "transactions", ["transactionDate"])
|
||||||
|
op.create_index(
|
||||||
|
"idx_transactions_account_date",
|
||||||
|
"transactions",
|
||||||
|
["accountId", "transactionDate"],
|
||||||
|
)
|
||||||
|
op.create_index("idx_transactions_amount", "transactions", ["transactionValue"])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Not implemented - would require changing primary key back."""
|
||||||
@@ -0,0 +1,56 @@
|
|||||||
|
"""add_transaction_enrichments_table
|
||||||
|
|
||||||
|
Add transaction_enrichments table for storing enriched transaction data.
|
||||||
|
|
||||||
|
Revision ID: 4819c868ebc1
|
||||||
|
Revises: dd9f6a55604c
|
||||||
|
Create Date: 2025-09-30 23:20:00.969614
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "4819c868ebc1"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = "dd9f6a55604c"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Create transaction_enrichments table."""
|
||||||
|
op.create_table(
|
||||||
|
"transaction_enrichments",
|
||||||
|
sa.Column("accountId", sa.String(), nullable=False),
|
||||||
|
sa.Column("transactionId", sa.String(), nullable=False),
|
||||||
|
sa.Column("clean_name", sa.String(), nullable=True),
|
||||||
|
sa.Column("category", sa.String(), nullable=True),
|
||||||
|
sa.Column("logo_url", sa.String(), nullable=True),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["accountId", "transactionId"],
|
||||||
|
["transactions.accountId", "transactions.transactionId"],
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("accountId", "transactionId"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index(
|
||||||
|
"idx_transaction_enrichments_category", "transaction_enrichments", ["category"]
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_transaction_enrichments_clean_name",
|
||||||
|
"transaction_enrichments",
|
||||||
|
["clean_name"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Drop transaction_enrichments table."""
|
||||||
|
op.drop_table("transaction_enrichments")
|
||||||
33
alembic/versions/be8d5807feca_add_display_name_column.py
Normal file
33
alembic/versions/be8d5807feca_add_display_name_column.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
"""add_display_name_column
|
||||||
|
|
||||||
|
Add display_name column to accounts table.
|
||||||
|
|
||||||
|
Revision ID: be8d5807feca
|
||||||
|
Revises: 1ba02efe481c
|
||||||
|
Create Date: 2025-09-30 23:16:34.929968
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "be8d5807feca"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = "1ba02efe481c"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Add display_name column to accounts table."""
|
||||||
|
with op.batch_alter_table("accounts", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("display_name", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Remove display_name column."""
|
||||||
|
with op.batch_alter_table("accounts", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("display_name")
|
||||||
62
alembic/versions/bf30246cb723_migrate_balance_timestamps.py
Normal file
62
alembic/versions/bf30246cb723_migrate_balance_timestamps.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
"""migrate_balance_timestamps
|
||||||
|
|
||||||
|
Convert Unix timestamps to datetime strings in balances table.
|
||||||
|
|
||||||
|
Revision ID: bf30246cb723
|
||||||
|
Revises: de8bfb1169d4
|
||||||
|
Create Date: 2025-09-30 23:14:03.128959
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "bf30246cb723"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = "de8bfb1169d4"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Convert all Unix timestamps to datetime strings."""
|
||||||
|
conn = op.get_bind()
|
||||||
|
|
||||||
|
# Get all balances with REAL timestamps
|
||||||
|
result = conn.execute(
|
||||||
|
text("""
|
||||||
|
SELECT id, timestamp
|
||||||
|
FROM balances
|
||||||
|
WHERE typeof(timestamp) = 'real'
|
||||||
|
ORDER BY id
|
||||||
|
""")
|
||||||
|
)
|
||||||
|
|
||||||
|
unix_records = result.fetchall()
|
||||||
|
|
||||||
|
if not unix_records:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Convert and update in batches
|
||||||
|
for record_id, unix_timestamp in unix_records:
|
||||||
|
try:
|
||||||
|
# Convert Unix timestamp to datetime string
|
||||||
|
dt_string = datetime.fromtimestamp(float(unix_timestamp)).isoformat()
|
||||||
|
|
||||||
|
# Update the record
|
||||||
|
conn.execute(
|
||||||
|
text("UPDATE balances SET timestamp = :dt WHERE id = :id"),
|
||||||
|
{"dt": dt_string, "id": record_id},
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Not implemented - converting back would lose precision."""
|
||||||
33
alembic/versions/dd9f6a55604c_add_logo_column.py
Normal file
33
alembic/versions/dd9f6a55604c_add_logo_column.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
"""add_logo_column
|
||||||
|
|
||||||
|
Add logo column to accounts table.
|
||||||
|
|
||||||
|
Revision ID: dd9f6a55604c
|
||||||
|
Revises: f854fd498a6e
|
||||||
|
Create Date: 2025-09-30 23:16:35.530858
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "dd9f6a55604c"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = "f854fd498a6e"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Add logo column to accounts table."""
|
||||||
|
with op.batch_alter_table("accounts", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("logo", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Remove logo column."""
|
||||||
|
with op.batch_alter_table("accounts", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("logo")
|
||||||
95
alembic/versions/de8bfb1169d4_create_initial_tables.py
Normal file
95
alembic/versions/de8bfb1169d4_create_initial_tables.py
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
"""create_initial_tables
|
||||||
|
|
||||||
|
Revision ID: de8bfb1169d4
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-09-30 23:09:24.255875
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "de8bfb1169d4"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Create initial database tables."""
|
||||||
|
# Create accounts table
|
||||||
|
op.create_table(
|
||||||
|
"accounts",
|
||||||
|
sa.Column("id", sa.String(), nullable=False),
|
||||||
|
sa.Column("institution_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("iban", sa.String(), nullable=True),
|
||||||
|
sa.Column("name", sa.String(), nullable=True),
|
||||||
|
sa.Column("currency", sa.String(), nullable=True),
|
||||||
|
sa.Column("created", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("last_accessed", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("last_updated", sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("idx_accounts_institution_id", "accounts", ["institution_id"])
|
||||||
|
op.create_index("idx_accounts_status", "accounts", ["status"])
|
||||||
|
|
||||||
|
# Create balances table
|
||||||
|
op.create_table(
|
||||||
|
"balances",
|
||||||
|
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column("account_id", sa.String(), nullable=False),
|
||||||
|
sa.Column("bank", sa.String(), nullable=False),
|
||||||
|
sa.Column("status", sa.String(), nullable=False),
|
||||||
|
sa.Column("iban", sa.String(), nullable=False),
|
||||||
|
sa.Column("amount", sa.Float(), nullable=False),
|
||||||
|
sa.Column("currency", sa.String(), nullable=False),
|
||||||
|
sa.Column("type", sa.String(), nullable=False),
|
||||||
|
sa.Column("timestamp", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_index("idx_balances_account_id", "balances", ["account_id"])
|
||||||
|
op.create_index("idx_balances_timestamp", "balances", ["timestamp"])
|
||||||
|
op.create_index(
|
||||||
|
"idx_balances_account_type_timestamp",
|
||||||
|
"balances",
|
||||||
|
["account_id", "type", "timestamp"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create transactions table (old schema with internalTransactionId as PK)
|
||||||
|
op.create_table(
|
||||||
|
"transactions",
|
||||||
|
sa.Column("accountId", sa.String(), nullable=False),
|
||||||
|
sa.Column("transactionId", sa.String(), nullable=False),
|
||||||
|
sa.Column("internalTransactionId", sa.String(), nullable=True),
|
||||||
|
sa.Column("institutionId", sa.String(), nullable=False),
|
||||||
|
sa.Column("iban", sa.String(), nullable=True),
|
||||||
|
sa.Column("transactionDate", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("transactionValue", sa.Float(), nullable=True),
|
||||||
|
sa.Column("transactionCurrency", sa.String(), nullable=True),
|
||||||
|
sa.Column("transactionStatus", sa.String(), nullable=True),
|
||||||
|
sa.Column("rawTransaction", sa.JSON(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("internalTransactionId"),
|
||||||
|
)
|
||||||
|
op.create_index(
|
||||||
|
"idx_transactions_internal_id", "transactions", ["internalTransactionId"]
|
||||||
|
)
|
||||||
|
op.create_index("idx_transactions_date", "transactions", ["transactionDate"])
|
||||||
|
op.create_index(
|
||||||
|
"idx_transactions_account_date",
|
||||||
|
"transactions",
|
||||||
|
["accountId", "transactionDate"],
|
||||||
|
)
|
||||||
|
op.create_index("idx_transactions_amount", "transactions", ["transactionValue"])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Drop initial tables."""
|
||||||
|
op.drop_table("transactions")
|
||||||
|
op.drop_table("balances")
|
||||||
|
op.drop_table("accounts")
|
||||||
59
alembic/versions/f854fd498a6e_add_sync_operations_table.py
Normal file
59
alembic/versions/f854fd498a6e_add_sync_operations_table.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""add_sync_operations_table
|
||||||
|
|
||||||
|
Add sync_operations table for tracking synchronization operations.
|
||||||
|
|
||||||
|
Revision ID: f854fd498a6e
|
||||||
|
Revises: be8d5807feca
|
||||||
|
Create Date: 2025-09-30 23:16:35.229062
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "f854fd498a6e"
|
||||||
|
down_revision: Union[str, Sequence[str], None] = "be8d5807feca"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Create sync_operations table."""
|
||||||
|
op.create_table(
|
||||||
|
"sync_operations",
|
||||||
|
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column("started_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("completed_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("success", sa.Boolean(), nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"accounts_processed", sa.Integer(), nullable=False, server_default="0"
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"transactions_added", sa.Integer(), nullable=False, server_default="0"
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"transactions_updated", sa.Integer(), nullable=False, server_default="0"
|
||||||
|
),
|
||||||
|
sa.Column("balances_updated", sa.Integer(), nullable=False, server_default="0"),
|
||||||
|
sa.Column("duration_seconds", sa.Float(), nullable=True),
|
||||||
|
sa.Column("errors", sa.String(), nullable=True),
|
||||||
|
sa.Column("logs", sa.String(), nullable=True),
|
||||||
|
sa.Column("trigger_type", sa.String(), nullable=False, server_default="manual"),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create indexes
|
||||||
|
op.create_index("idx_sync_operations_started_at", "sync_operations", ["started_at"])
|
||||||
|
op.create_index("idx_sync_operations_success", "sync_operations", ["success"])
|
||||||
|
op.create_index(
|
||||||
|
"idx_sync_operations_trigger_type", "sync_operations", ["trigger_type"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Drop sync_operations table."""
|
||||||
|
op.drop_table("sync_operations")
|
||||||
93
leggen/models/database.py
Normal file
93
leggen/models/database.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
"""SQLModel database models for Leggen."""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlmodel import JSON, Column, Field, SQLModel
|
||||||
|
|
||||||
|
|
||||||
|
class Account(SQLModel, table=True):
|
||||||
|
"""Account model."""
|
||||||
|
|
||||||
|
__tablename__ = "accounts"
|
||||||
|
|
||||||
|
id: str = Field(primary_key=True)
|
||||||
|
institution_id: str = Field(index=True)
|
||||||
|
status: str = Field(index=True)
|
||||||
|
iban: Optional[str] = None
|
||||||
|
name: Optional[str] = None
|
||||||
|
currency: Optional[str] = None
|
||||||
|
created: datetime
|
||||||
|
last_accessed: Optional[datetime] = None
|
||||||
|
last_updated: Optional[datetime] = None
|
||||||
|
display_name: Optional[str] = None
|
||||||
|
logo: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class Balance(SQLModel, table=True):
|
||||||
|
"""Balance model."""
|
||||||
|
|
||||||
|
__tablename__ = "balances"
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
account_id: str = Field(index=True)
|
||||||
|
bank: str
|
||||||
|
status: str
|
||||||
|
iban: str
|
||||||
|
amount: float
|
||||||
|
currency: str
|
||||||
|
type: str
|
||||||
|
timestamp: datetime = Field(index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(SQLModel, table=True):
|
||||||
|
"""Transaction model."""
|
||||||
|
|
||||||
|
__tablename__ = "transactions"
|
||||||
|
|
||||||
|
accountId: str = Field(primary_key=True)
|
||||||
|
transactionId: str = Field(primary_key=True)
|
||||||
|
internalTransactionId: Optional[str] = Field(default=None, index=True)
|
||||||
|
institutionId: str
|
||||||
|
iban: Optional[str] = None
|
||||||
|
transactionDate: Optional[datetime] = Field(default=None, index=True)
|
||||||
|
description: Optional[str] = None
|
||||||
|
transactionValue: Optional[float] = Field(default=None, index=True)
|
||||||
|
transactionCurrency: Optional[str] = None
|
||||||
|
transactionStatus: Optional[str] = None
|
||||||
|
rawTransaction: dict = Field(sa_column=Column(JSON))
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionEnrichment(SQLModel, table=True):
|
||||||
|
"""Transaction enrichment model."""
|
||||||
|
|
||||||
|
__tablename__ = "transaction_enrichments"
|
||||||
|
|
||||||
|
accountId: str = Field(primary_key=True, foreign_key="transactions.accountId")
|
||||||
|
transactionId: str = Field(
|
||||||
|
primary_key=True, foreign_key="transactions.transactionId"
|
||||||
|
)
|
||||||
|
clean_name: Optional[str] = Field(default=None, index=True)
|
||||||
|
category: Optional[str] = Field(default=None, index=True)
|
||||||
|
logo_url: Optional[str] = None
|
||||||
|
created_at: datetime
|
||||||
|
updated_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class SyncOperation(SQLModel, table=True):
|
||||||
|
"""Sync operation model."""
|
||||||
|
|
||||||
|
__tablename__ = "sync_operations"
|
||||||
|
|
||||||
|
id: Optional[int] = Field(default=None, primary_key=True)
|
||||||
|
started_at: datetime = Field(index=True)
|
||||||
|
completed_at: Optional[datetime] = None
|
||||||
|
success: Optional[bool] = Field(default=None, index=True)
|
||||||
|
accounts_processed: int = Field(default=0)
|
||||||
|
transactions_added: int = Field(default=0)
|
||||||
|
transactions_updated: int = Field(default=0)
|
||||||
|
balances_updated: int = Field(default=0)
|
||||||
|
duration_seconds: Optional[float] = None
|
||||||
|
errors: Optional[str] = None
|
||||||
|
logs: Optional[str] = None
|
||||||
|
trigger_type: str = Field(default="manual", index=True)
|
||||||
65
leggen/services/database.py
Normal file
65
leggen/services/database.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
"""Database connection and session management using SQLModel."""
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from typing import Generator
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.pool import StaticPool
|
||||||
|
from sqlmodel import Session, SQLModel
|
||||||
|
|
||||||
|
from leggen.utils.paths import path_manager
|
||||||
|
|
||||||
|
_engine = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_database_url() -> str:
|
||||||
|
"""Get the database URL for SQLAlchemy."""
|
||||||
|
db_path = path_manager.get_database_path()
|
||||||
|
return f"sqlite:///{db_path}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_engine():
|
||||||
|
"""Get or create the database engine."""
|
||||||
|
global _engine
|
||||||
|
if _engine is None:
|
||||||
|
db_url = get_database_url()
|
||||||
|
_engine = create_engine(
|
||||||
|
db_url,
|
||||||
|
connect_args={"check_same_thread": False},
|
||||||
|
poolclass=StaticPool,
|
||||||
|
echo=False,
|
||||||
|
)
|
||||||
|
return _engine
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_and_tables():
|
||||||
|
"""Create all database tables."""
|
||||||
|
engine = get_engine()
|
||||||
|
SQLModel.metadata.create_all(engine)
|
||||||
|
logger.info("Database tables created/verified")
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def get_session() -> Generator[Session, None, None]:
|
||||||
|
"""Get a database session context manager.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
with get_session() as session:
|
||||||
|
result = session.exec(select(Account)).all()
|
||||||
|
"""
|
||||||
|
session = Session(get_engine())
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
session.commit()
|
||||||
|
except Exception as e:
|
||||||
|
session.rollback()
|
||||||
|
logger.error(f"Database session error: {e}")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
|
def init_database():
|
||||||
|
"""Initialize the database with tables."""
|
||||||
|
create_db_and_tables()
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
"""Database helper utilities for Leggen."""
|
"""Database helper utilities for Leggen - Compatibility layer."""
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|||||||
@@ -1,657 +0,0 @@
|
|||||||
"""Database migration functions for Leggen."""
|
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
|
|
||||||
def run_all_migrations(db_path: Path) -> None:
|
|
||||||
"""Run all necessary database migrations."""
|
|
||||||
if not db_path.exists():
|
|
||||||
logger.info("Database file not found, skipping migrations")
|
|
||||||
return
|
|
||||||
|
|
||||||
migrate_balance_timestamps_if_needed(db_path)
|
|
||||||
migrate_null_transaction_ids_if_needed(db_path)
|
|
||||||
migrate_to_composite_key_if_needed(db_path)
|
|
||||||
migrate_add_display_name_if_needed(db_path)
|
|
||||||
migrate_add_sync_operations_if_needed(db_path)
|
|
||||||
migrate_add_logo_if_needed(db_path)
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_balance_timestamps_if_needed(db_path: Path) -> None:
|
|
||||||
"""Check and migrate balance timestamps if needed."""
|
|
||||||
try:
|
|
||||||
if _check_balance_timestamp_migration_needed(db_path):
|
|
||||||
logger.info("Balance timestamp migration needed, starting...")
|
|
||||||
_migrate_balance_timestamps(db_path)
|
|
||||||
logger.info("Balance timestamp migration completed")
|
|
||||||
else:
|
|
||||||
logger.info("Balance timestamps are already consistent")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Balance timestamp migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _check_balance_timestamp_migration_needed(db_path: Path) -> bool:
|
|
||||||
"""Check if balance timestamps need migration."""
|
|
||||||
if not db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Check for mixed timestamp types
|
|
||||||
cursor.execute("""
|
|
||||||
SELECT typeof(timestamp) as type, COUNT(*) as count
|
|
||||||
FROM balances
|
|
||||||
GROUP BY typeof(timestamp)
|
|
||||||
""")
|
|
||||||
|
|
||||||
types = cursor.fetchall()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# If we have both 'real' and 'text' types, migration is needed
|
|
||||||
type_names = [row[0] for row in types]
|
|
||||||
return "real" in type_names and "text" in type_names
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to check migration status: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_balance_timestamps(db_path: Path) -> None:
|
|
||||||
"""Convert all Unix timestamps to datetime strings."""
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Get all balances with REAL timestamps
|
|
||||||
cursor.execute("""
|
|
||||||
SELECT id, timestamp
|
|
||||||
FROM balances
|
|
||||||
WHERE typeof(timestamp) = 'real'
|
|
||||||
ORDER BY id
|
|
||||||
""")
|
|
||||||
|
|
||||||
unix_records = cursor.fetchall()
|
|
||||||
total_records = len(unix_records)
|
|
||||||
|
|
||||||
if total_records == 0:
|
|
||||||
logger.info("No Unix timestamps found to migrate")
|
|
||||||
conn.close()
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Migrating {total_records} balance records from Unix to datetime format"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Convert and update in batches
|
|
||||||
batch_size = 100
|
|
||||||
migrated_count = 0
|
|
||||||
|
|
||||||
for i in range(0, total_records, batch_size):
|
|
||||||
batch = unix_records[i : i + batch_size]
|
|
||||||
|
|
||||||
for record_id, unix_timestamp in batch:
|
|
||||||
try:
|
|
||||||
# Convert Unix timestamp to datetime string
|
|
||||||
dt_string = _unix_to_datetime_string(float(unix_timestamp))
|
|
||||||
|
|
||||||
# Update the record
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
UPDATE balances
|
|
||||||
SET timestamp = ?
|
|
||||||
WHERE id = ?
|
|
||||||
""",
|
|
||||||
(dt_string, record_id),
|
|
||||||
)
|
|
||||||
|
|
||||||
migrated_count += 1
|
|
||||||
|
|
||||||
if migrated_count % 100 == 0:
|
|
||||||
logger.info(
|
|
||||||
f"Migrated {migrated_count}/{total_records} balance records"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to migrate record {record_id}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Commit batch
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
logger.info(f"Successfully migrated {migrated_count} balance records")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Balance timestamp migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_null_transaction_ids_if_needed(db_path: Path) -> None:
|
|
||||||
"""Check and migrate null transaction IDs if needed."""
|
|
||||||
try:
|
|
||||||
if _check_null_transaction_ids_migration_needed(db_path):
|
|
||||||
logger.info("Null transaction IDs migration needed, starting...")
|
|
||||||
_migrate_null_transaction_ids(db_path)
|
|
||||||
logger.info("Null transaction IDs migration completed")
|
|
||||||
else:
|
|
||||||
logger.info("No null transaction IDs found to migrate")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Null transaction IDs migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _check_null_transaction_ids_migration_needed(db_path: Path) -> bool:
|
|
||||||
"""Check if null transaction IDs need migration."""
|
|
||||||
if not db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Check for transactions with null or empty internalTransactionId
|
|
||||||
cursor.execute("""
|
|
||||||
SELECT COUNT(*)
|
|
||||||
FROM transactions
|
|
||||||
WHERE (internalTransactionId IS NULL OR internalTransactionId = '')
|
|
||||||
AND json_extract(rawTransaction, '$.transactionId') IS NOT NULL
|
|
||||||
""")
|
|
||||||
|
|
||||||
count = cursor.fetchone()[0]
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return count > 0
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to check null transaction IDs migration status: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_null_transaction_ids(db_path: Path) -> None:
|
|
||||||
"""Populate null internalTransactionId fields using transactionId from raw data."""
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Get all transactions with null/empty internalTransactionId but valid transactionId in raw data
|
|
||||||
cursor.execute("""
|
|
||||||
SELECT rowid, json_extract(rawTransaction, '$.transactionId') as transactionId
|
|
||||||
FROM transactions
|
|
||||||
WHERE (internalTransactionId IS NULL OR internalTransactionId = '')
|
|
||||||
AND json_extract(rawTransaction, '$.transactionId') IS NOT NULL
|
|
||||||
ORDER BY rowid
|
|
||||||
""")
|
|
||||||
|
|
||||||
null_records = cursor.fetchall()
|
|
||||||
total_records = len(null_records)
|
|
||||||
|
|
||||||
if total_records == 0:
|
|
||||||
logger.info("No null transaction IDs found to migrate")
|
|
||||||
conn.close()
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Migrating {total_records} transaction records with null internalTransactionId"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update in batches
|
|
||||||
batch_size = 100
|
|
||||||
migrated_count = 0
|
|
||||||
skipped_duplicates = 0
|
|
||||||
|
|
||||||
for i in range(0, total_records, batch_size):
|
|
||||||
batch = null_records[i : i + batch_size]
|
|
||||||
|
|
||||||
for rowid, transaction_id in batch:
|
|
||||||
try:
|
|
||||||
# Check if this transactionId is already used by another record
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT COUNT(*) FROM transactions WHERE internalTransactionId = ?",
|
|
||||||
(str(transaction_id),),
|
|
||||||
)
|
|
||||||
existing_count = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
if existing_count > 0:
|
|
||||||
# Generate a unique ID to avoid constraint violation
|
|
||||||
unique_id = f"{str(transaction_id)}_{uuid.uuid4().hex[:8]}"
|
|
||||||
logger.debug(
|
|
||||||
f"Generated unique ID for duplicate transactionId: {unique_id}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Use the original transactionId
|
|
||||||
unique_id = str(transaction_id)
|
|
||||||
|
|
||||||
# Update the record
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
UPDATE transactions
|
|
||||||
SET internalTransactionId = ?
|
|
||||||
WHERE rowid = ?
|
|
||||||
""",
|
|
||||||
(unique_id, rowid),
|
|
||||||
)
|
|
||||||
|
|
||||||
migrated_count += 1
|
|
||||||
|
|
||||||
if migrated_count % 100 == 0:
|
|
||||||
logger.info(
|
|
||||||
f"Migrated {migrated_count}/{total_records} transaction records"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to migrate record {rowid}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Commit batch
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
logger.info(f"Successfully migrated {migrated_count} transaction records")
|
|
||||||
if skipped_duplicates > 0:
|
|
||||||
logger.info(
|
|
||||||
f"Generated unique IDs for {skipped_duplicates} duplicate transactionIds"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Null transaction IDs migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_to_composite_key_if_needed(db_path: Path) -> None:
|
|
||||||
"""Check and migrate to composite primary key if needed."""
|
|
||||||
try:
|
|
||||||
if _check_composite_key_migration_needed(db_path):
|
|
||||||
logger.info("Composite key migration needed, starting...")
|
|
||||||
_migrate_to_composite_key(db_path)
|
|
||||||
logger.info("Composite key migration completed")
|
|
||||||
else:
|
|
||||||
logger.info("Composite key migration not needed")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Composite key migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _check_composite_key_migration_needed(db_path: Path) -> bool:
|
|
||||||
"""Check if composite key migration is needed."""
|
|
||||||
if not db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Check if transactions table exists
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='transactions'"
|
|
||||||
)
|
|
||||||
if not cursor.fetchone():
|
|
||||||
conn.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if transactions table has the old primary key structure
|
|
||||||
cursor.execute("PRAGMA table_info(transactions)")
|
|
||||||
columns = cursor.fetchall()
|
|
||||||
|
|
||||||
# Check if internalTransactionId is the primary key (old structure)
|
|
||||||
internal_transaction_id_is_pk = any(
|
|
||||||
col[1] == "internalTransactionId" and col[5] == 1 # col[5] is pk flag
|
|
||||||
for col in columns
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if we have the new composite primary key structure
|
|
||||||
has_composite_key = any(
|
|
||||||
col[1] in ["accountId", "transactionId"]
|
|
||||||
and col[5] == 1 # col[5] is pk flag
|
|
||||||
for col in columns
|
|
||||||
)
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
# Migration is needed if:
|
|
||||||
# 1. internalTransactionId is still the primary key (old structure), OR
|
|
||||||
# 2. We don't have the new composite key structure yet
|
|
||||||
return internal_transaction_id_is_pk or not has_composite_key
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to check composite key migration status: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_to_composite_key(db_path: Path) -> None:
|
|
||||||
"""Migrate transactions table to use composite primary key (accountId, transactionId)."""
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
logger.info("Starting composite key migration...")
|
|
||||||
|
|
||||||
# Step 1: Create temporary table with new schema
|
|
||||||
logger.info("Creating temporary table with composite primary key...")
|
|
||||||
cursor.execute("DROP TABLE IF EXISTS transactions_temp")
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TABLE transactions_temp (
|
|
||||||
accountId TEXT NOT NULL,
|
|
||||||
transactionId TEXT NOT NULL,
|
|
||||||
internalTransactionId TEXT,
|
|
||||||
institutionId TEXT,
|
|
||||||
iban TEXT,
|
|
||||||
transactionDate DATETIME,
|
|
||||||
description TEXT,
|
|
||||||
transactionValue REAL,
|
|
||||||
transactionCurrency TEXT,
|
|
||||||
transactionStatus TEXT,
|
|
||||||
rawTransaction JSON,
|
|
||||||
PRIMARY KEY (accountId, transactionId)
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Step 2: Insert deduplicated data (keep most recent duplicate)
|
|
||||||
logger.info("Inserting deduplicated data...")
|
|
||||||
cursor.execute("""
|
|
||||||
INSERT INTO transactions_temp
|
|
||||||
SELECT
|
|
||||||
accountId,
|
|
||||||
json_extract(rawTransaction, '$.transactionId') as transactionId,
|
|
||||||
internalTransactionId,
|
|
||||||
institutionId,
|
|
||||||
iban,
|
|
||||||
transactionDate,
|
|
||||||
description,
|
|
||||||
transactionValue,
|
|
||||||
transactionCurrency,
|
|
||||||
transactionStatus,
|
|
||||||
rawTransaction
|
|
||||||
FROM (
|
|
||||||
SELECT *,
|
|
||||||
ROW_NUMBER() OVER (
|
|
||||||
PARTITION BY accountId, json_extract(rawTransaction, '$.transactionId')
|
|
||||||
ORDER BY transactionDate DESC, rowid DESC
|
|
||||||
) as rn
|
|
||||||
FROM transactions
|
|
||||||
WHERE json_extract(rawTransaction, '$.transactionId') IS NOT NULL
|
|
||||||
AND accountId IS NOT NULL
|
|
||||||
) WHERE rn = 1
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Get counts for reporting
|
|
||||||
cursor.execute("SELECT COUNT(*) FROM transactions")
|
|
||||||
old_count = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
cursor.execute("SELECT COUNT(*) FROM transactions_temp")
|
|
||||||
new_count = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
duplicates_removed = old_count - new_count
|
|
||||||
logger.info(
|
|
||||||
f"Migration stats: {old_count} → {new_count} records ({duplicates_removed} duplicates removed)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Step 3: Replace tables
|
|
||||||
logger.info("Replacing tables...")
|
|
||||||
cursor.execute("ALTER TABLE transactions RENAME TO transactions_old")
|
|
||||||
cursor.execute("ALTER TABLE transactions_temp RENAME TO transactions")
|
|
||||||
|
|
||||||
# Step 4: Recreate indexes
|
|
||||||
logger.info("Recreating indexes...")
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_transactions_internal_id ON transactions(internalTransactionId)"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_transactions_date ON transactions(transactionDate)"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_transactions_account_date ON transactions(accountId, transactionDate)"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_transactions_amount ON transactions(transactionValue)"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Step 5: Cleanup
|
|
||||||
logger.info("Cleaning up...")
|
|
||||||
cursor.execute("DROP TABLE transactions_old")
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
logger.info("Composite key migration completed successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Composite key migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_add_display_name_if_needed(db_path: Path) -> None:
|
|
||||||
"""Check and add display_name column to accounts table if needed."""
|
|
||||||
try:
|
|
||||||
if _check_display_name_migration_needed(db_path):
|
|
||||||
logger.info("Display name column migration needed, starting...")
|
|
||||||
_migrate_add_display_name(db_path)
|
|
||||||
logger.info("Display name column migration completed")
|
|
||||||
else:
|
|
||||||
logger.info("Display name column already exists")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Display name column migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _check_display_name_migration_needed(db_path: Path) -> bool:
|
|
||||||
"""Check if display_name column needs to be added to accounts table."""
|
|
||||||
if not db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Check if accounts table exists
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='accounts'"
|
|
||||||
)
|
|
||||||
if not cursor.fetchone():
|
|
||||||
conn.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if display_name column exists
|
|
||||||
cursor.execute("PRAGMA table_info(accounts)")
|
|
||||||
columns = cursor.fetchall()
|
|
||||||
|
|
||||||
# Check if display_name column exists
|
|
||||||
has_display_name = any(col[1] == "display_name" for col in columns)
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
return not has_display_name
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to check display_name migration status: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_add_display_name(db_path: Path) -> None:
|
|
||||||
"""Add display_name column to accounts table."""
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
logger.info("Adding display_name column to accounts table...")
|
|
||||||
|
|
||||||
# Add the display_name column
|
|
||||||
cursor.execute("""
|
|
||||||
ALTER TABLE accounts
|
|
||||||
ADD COLUMN display_name TEXT
|
|
||||||
""")
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
logger.info("Display name column migration completed successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Display name column migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_add_sync_operations_if_needed(db_path: Path) -> None:
|
|
||||||
"""Check and add sync_operations table if needed."""
|
|
||||||
try:
|
|
||||||
if _check_sync_operations_migration_needed(db_path):
|
|
||||||
logger.info("Sync operations table migration needed, starting...")
|
|
||||||
_migrate_add_sync_operations(db_path)
|
|
||||||
logger.info("Sync operations table migration completed")
|
|
||||||
else:
|
|
||||||
logger.info("Sync operations table already exists")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Sync operations table migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _check_sync_operations_migration_needed(db_path: Path) -> bool:
|
|
||||||
"""Check if sync_operations table needs to be created."""
|
|
||||||
if not db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Check if sync_operations table exists
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='sync_operations'"
|
|
||||||
)
|
|
||||||
table_exists = cursor.fetchone() is not None
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
return not table_exists
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to check sync_operations migration status: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_add_sync_operations(db_path: Path) -> None:
|
|
||||||
"""Add sync_operations table."""
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
logger.info("Creating sync_operations table...")
|
|
||||||
|
|
||||||
# Create the sync_operations table
|
|
||||||
cursor.execute("""
|
|
||||||
CREATE TABLE sync_operations (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
started_at DATETIME NOT NULL,
|
|
||||||
completed_at DATETIME,
|
|
||||||
success BOOLEAN,
|
|
||||||
accounts_processed INTEGER DEFAULT 0,
|
|
||||||
transactions_added INTEGER DEFAULT 0,
|
|
||||||
transactions_updated INTEGER DEFAULT 0,
|
|
||||||
balances_updated INTEGER DEFAULT 0,
|
|
||||||
duration_seconds REAL,
|
|
||||||
errors TEXT,
|
|
||||||
logs TEXT,
|
|
||||||
trigger_type TEXT DEFAULT 'manual'
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Create indexes for better performance
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_sync_operations_started_at ON sync_operations(started_at)"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_sync_operations_success ON sync_operations(success)"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_sync_operations_trigger_type ON sync_operations(trigger_type)"
|
|
||||||
)
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
logger.info("Sync operations table migration completed successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Sync operations table migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_add_logo_if_needed(db_path: Path) -> None:
|
|
||||||
"""Check and add logo column to accounts table if needed."""
|
|
||||||
try:
|
|
||||||
if _check_logo_migration_needed(db_path):
|
|
||||||
logger.info("Logo column migration needed, starting...")
|
|
||||||
_migrate_add_logo(db_path)
|
|
||||||
logger.info("Logo column migration completed")
|
|
||||||
else:
|
|
||||||
logger.info("Logo column already exists")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Logo column migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _check_logo_migration_needed(db_path: Path) -> bool:
|
|
||||||
"""Check if logo column needs to be added to accounts table."""
|
|
||||||
if not db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# Check if accounts table exists
|
|
||||||
cursor.execute(
|
|
||||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='accounts'"
|
|
||||||
)
|
|
||||||
if not cursor.fetchone():
|
|
||||||
conn.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if logo column exists
|
|
||||||
cursor.execute("PRAGMA table_info(accounts)")
|
|
||||||
columns = cursor.fetchall()
|
|
||||||
|
|
||||||
# Check if logo column exists
|
|
||||||
has_logo = any(col[1] == "logo" for col in columns)
|
|
||||||
|
|
||||||
conn.close()
|
|
||||||
return not has_logo
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to check logo migration status: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_add_logo(db_path: Path) -> None:
|
|
||||||
"""Add logo column to accounts table."""
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(str(db_path))
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
logger.info("Adding logo column to accounts table...")
|
|
||||||
|
|
||||||
# Add the logo column
|
|
||||||
cursor.execute("""
|
|
||||||
ALTER TABLE accounts
|
|
||||||
ADD COLUMN logo TEXT
|
|
||||||
""")
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
logger.info("Logo column migration completed successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Logo column migration failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def _unix_to_datetime_string(unix_timestamp: float) -> str:
|
|
||||||
"""Convert Unix timestamp to datetime string."""
|
|
||||||
dt = datetime.fromtimestamp(unix_timestamp)
|
|
||||||
return dt.isoformat()
|
|
||||||
@@ -4,8 +4,8 @@ from typing import Any, Dict, List, Optional
|
|||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
|
from leggen.services.database import init_database
|
||||||
from leggen.services.database_helpers import get_db_connection
|
from leggen.services.database_helpers import get_db_connection
|
||||||
from leggen.services.database_migrations import run_all_migrations
|
|
||||||
from leggen.services.transaction_processor import TransactionProcessor
|
from leggen.services.transaction_processor import TransactionProcessor
|
||||||
from leggen.utils.config import config
|
from leggen.utils.config import config
|
||||||
from leggen.utils.paths import path_manager
|
from leggen.utils.paths import path_manager
|
||||||
@@ -208,13 +208,73 @@ class DatabaseService:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
async def run_migrations_if_needed(self):
|
async def run_migrations_if_needed(self):
|
||||||
"""Run all necessary database migrations"""
|
"""Run all necessary database migrations using Alembic"""
|
||||||
if not self.sqlite_enabled:
|
if not self.sqlite_enabled:
|
||||||
logger.info("SQLite database disabled, skipping migrations")
|
logger.info("SQLite database disabled, skipping migrations")
|
||||||
return
|
return
|
||||||
|
|
||||||
db_path = path_manager.get_database_path()
|
db_path = path_manager.get_database_path()
|
||||||
run_all_migrations(db_path)
|
|
||||||
|
# Initialize SQLModel tables (creates if not exists)
|
||||||
|
init_database()
|
||||||
|
|
||||||
|
# Run Alembic migrations
|
||||||
|
import os
|
||||||
|
|
||||||
|
from alembic.config import Config
|
||||||
|
|
||||||
|
from alembic import command
|
||||||
|
|
||||||
|
# Get the alembic.ini path (project root)
|
||||||
|
alembic_ini_path = os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "alembic.ini"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.exists(alembic_ini_path):
|
||||||
|
logger.warning(f"Alembic config not found at {alembic_ini_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
alembic_cfg = Config(alembic_ini_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if database already has all tables (existing database)
|
||||||
|
# If so, stamp it with the latest revision without running migrations
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Check if alembic_version table exists
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table' AND name='alembic_version'
|
||||||
|
""")
|
||||||
|
has_alembic = cursor.fetchone() is not None
|
||||||
|
|
||||||
|
# Check if all main tables exist
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT name FROM sqlite_master
|
||||||
|
WHERE type='table' AND name IN ('accounts', 'transactions', 'balances', 'sync_operations')
|
||||||
|
""")
|
||||||
|
existing_tables = [row[0] for row in cursor.fetchall()]
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
if not has_alembic and len(existing_tables) >= 4:
|
||||||
|
# This is an existing database without Alembic tracking
|
||||||
|
# Stamp it with the latest revision
|
||||||
|
logger.info("Marking existing database with current Alembic revision")
|
||||||
|
command.stamp(alembic_cfg, "head")
|
||||||
|
else:
|
||||||
|
# Run migrations normally
|
||||||
|
logger.info("Running Alembic migrations")
|
||||||
|
command.upgrade(alembic_cfg, "head")
|
||||||
|
|
||||||
|
logger.info("Database migrations completed successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to run Alembic migrations: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
logger.info("Database migrations completed")
|
||||||
|
|
||||||
async def _persist_balance_sqlite(
|
async def _persist_balance_sqlite(
|
||||||
self, account_id: str, balance_data: Dict[str, Any]
|
self, account_id: str, balance_data: Dict[str, Any]
|
||||||
|
|||||||
@@ -36,6 +36,8 @@ dependencies = [
|
|||||||
"httpx>=0.28.1",
|
"httpx>=0.28.1",
|
||||||
"pydantic>=2.0.0,<3",
|
"pydantic>=2.0.0,<3",
|
||||||
"boto3>=1.35.0,<2",
|
"boto3>=1.35.0,<2",
|
||||||
|
"sqlmodel>=0.0.25",
|
||||||
|
"alembic>=1.16.5",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
|
|||||||
111
uv.lock
generated
111
uv.lock
generated
@@ -2,6 +2,20 @@ version = 1
|
|||||||
revision = 3
|
revision = 3
|
||||||
requires-python = "==3.13.*"
|
requires-python = "==3.13.*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alembic"
|
||||||
|
version = "1.16.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "mako" },
|
||||||
|
{ name = "sqlalchemy" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9a/ca/4dc52902cf3491892d464f5265a81e9dff094692c8a049a3ed6a05fe7ee8/alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e", size = 1969868, upload-time = "2025-08-27T18:02:05.668Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/39/4a/4c61d4c84cfd9befb6fa08a702535b27b21fff08c946bc2f6139decbf7f7/alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3", size = 247355, upload-time = "2025-08-27T18:02:07.37Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "annotated-types"
|
name = "annotated-types"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
@@ -167,6 +181,23 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
|
{ url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "greenlet"
|
||||||
|
version = "3.2.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "h11"
|
name = "h11"
|
||||||
version = "0.16.0"
|
version = "0.16.0"
|
||||||
@@ -260,6 +291,7 @@ name = "leggen"
|
|||||||
version = "2025.9.26"
|
version = "2025.9.26"
|
||||||
source = { editable = "." }
|
source = { editable = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
{ name = "alembic" },
|
||||||
{ name = "apscheduler" },
|
{ name = "apscheduler" },
|
||||||
{ name = "boto3" },
|
{ name = "boto3" },
|
||||||
{ name = "click" },
|
{ name = "click" },
|
||||||
@@ -269,6 +301,7 @@ dependencies = [
|
|||||||
{ name = "loguru" },
|
{ name = "loguru" },
|
||||||
{ name = "pydantic" },
|
{ name = "pydantic" },
|
||||||
{ name = "requests" },
|
{ name = "requests" },
|
||||||
|
{ name = "sqlmodel" },
|
||||||
{ name = "tabulate" },
|
{ name = "tabulate" },
|
||||||
{ name = "tomli-w" },
|
{ name = "tomli-w" },
|
||||||
{ name = "uvicorn", extra = ["standard"] },
|
{ name = "uvicorn", extra = ["standard"] },
|
||||||
@@ -290,6 +323,7 @@ dev = [
|
|||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
|
{ name = "alembic", specifier = ">=1.16.5" },
|
||||||
{ name = "apscheduler", specifier = ">=3.10.0,<4" },
|
{ name = "apscheduler", specifier = ">=3.10.0,<4" },
|
||||||
{ name = "boto3", specifier = ">=1.35.0,<2" },
|
{ name = "boto3", specifier = ">=1.35.0,<2" },
|
||||||
{ name = "click", specifier = ">=8.1.7,<9" },
|
{ name = "click", specifier = ">=8.1.7,<9" },
|
||||||
@@ -299,6 +333,7 @@ requires-dist = [
|
|||||||
{ name = "loguru", specifier = ">=0.7.2,<0.8" },
|
{ name = "loguru", specifier = ">=0.7.2,<0.8" },
|
||||||
{ name = "pydantic", specifier = ">=2.0.0,<3" },
|
{ name = "pydantic", specifier = ">=2.0.0,<3" },
|
||||||
{ name = "requests", specifier = ">=2.31.0,<3" },
|
{ name = "requests", specifier = ">=2.31.0,<3" },
|
||||||
|
{ name = "sqlmodel", specifier = ">=0.0.25" },
|
||||||
{ name = "tabulate", specifier = ">=0.9.0,<0.10" },
|
{ name = "tabulate", specifier = ">=0.9.0,<0.10" },
|
||||||
{ name = "tomli-w", specifier = ">=1.0.0,<2" },
|
{ name = "tomli-w", specifier = ">=1.0.0,<2" },
|
||||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0,<1" },
|
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0,<1" },
|
||||||
@@ -331,6 +366,48 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
|
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mako"
|
||||||
|
version = "1.3.10"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "markupsafe" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markupsafe"
|
||||||
|
version = "3.0.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mypy"
|
name = "mypy"
|
||||||
version = "1.17.1"
|
version = "1.17.1"
|
||||||
@@ -646,6 +723,40 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlalchemy"
|
||||||
|
version = "2.0.43"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/1c/a7260bd47a6fae7e03768bf66451437b36451143f36b285522b865987ced/sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3", size = 2130598, upload-time = "2025-08-11T15:51:15.903Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8e/84/8a337454e82388283830b3586ad7847aa9c76fdd4f1df09cdd1f94591873/sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa", size = 2118415, upload-time = "2025-08-11T15:51:17.256Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cf/ff/22ab2328148492c4d71899d62a0e65370ea66c877aea017a244a35733685/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9", size = 3248707, upload-time = "2025-08-11T15:52:38.444Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/dc/29/11ae2c2b981de60187f7cbc84277d9d21f101093d1b2e945c63774477aba/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f", size = 3253602, upload-time = "2025-08-11T15:56:37.348Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b8/61/987b6c23b12c56d2be451bc70900f67dd7d989d52b1ee64f239cf19aec69/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738", size = 3183248, upload-time = "2025-08-11T15:52:39.865Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/86/85/29d216002d4593c2ce1c0ec2cec46dda77bfbcd221e24caa6e85eff53d89/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164", size = 3219363, upload-time = "2025-08-11T15:56:39.11Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b6/e4/bd78b01919c524f190b4905d47e7630bf4130b9f48fd971ae1c6225b6f6a/sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d", size = 2096718, upload-time = "2025-08-11T15:55:05.349Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ac/a5/ca2f07a2a201f9497de1928f787926613db6307992fe5cda97624eb07c2f/sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197", size = 2123200, upload-time = "2025-08-11T15:55:07.932Z" },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlmodel"
|
||||||
|
version = "0.0.25"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pydantic" },
|
||||||
|
{ name = "sqlalchemy" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ea/80/d9c098a88724ee4554907939cf39590cf67e10c6683723216e228d3315f7/sqlmodel-0.0.25.tar.gz", hash = "sha256:56548c2e645975b1ed94d6c53f0d13c85593f57926a575e2bf566650b2243fa4", size = 117075, upload-time = "2025-09-17T21:44:41.219Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/57/cf/5d175ce8de07fe694ec4e3d4d65c2dd06cc30f6c79599b31f9d2f6dd2830/sqlmodel-0.0.25-py3-none-any.whl", hash = "sha256:c98234cda701fb77e9dcbd81688c23bb251c13bb98ce1dd8d4adc467374d45b7", size = 28893, upload-time = "2025-09-17T21:44:39.764Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "starlette"
|
name = "starlette"
|
||||||
version = "0.47.3"
|
version = "0.47.3"
|
||||||
|
|||||||
Reference in New Issue
Block a user