Compare commits
14 Commits
ef9ca1d1ea
...
bdd7cac4be
| Author | SHA1 | Date | |
|---|---|---|---|
| bdd7cac4be | |||
| 13c783ca0e | |||
| da44ba5306 | |||
| 94322ae542 | |||
| 78ff6faa12 | |||
| 9f39836083 | |||
| ad62317e56 | |||
| e7abae0d17 | |||
| 9c7c06c181 | |||
| d6779b40ea | |||
| 761720b712 | |||
| 541295ef05 | |||
| 14131f50f9 | |||
| c4d2d5a378 |
@ -55,7 +55,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
|
|||||||
# are written from script.py.mako
|
# are written from script.py.mako
|
||||||
# output_encoding = utf-8
|
# output_encoding = utf-8
|
||||||
|
|
||||||
sqlalchemy.url = postgresql://pf-budget:muster-neutron-omega@database.lan/pf-budget
|
sqlalchemy.url = postgresql://pf-budget:muster-neutron-omega@database.home.arpa/pf-budget
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
[post_write_hooks]
|
||||||
|
|||||||
@ -27,7 +27,7 @@ target_metadata = Base.metadata
|
|||||||
|
|
||||||
def include_name(name, type_, parent_names):
|
def include_name(name, type_, parent_names):
|
||||||
if type_ == "schema":
|
if type_ == "schema":
|
||||||
return name in ["bank", "category", "tag", "transactions"]
|
return name == "pfbudget"
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|||||||
95
alembic/versions/ce68ee15e5d2_change_bank_constraints.py
Normal file
95
alembic/versions/ce68ee15e5d2_change_bank_constraints.py
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
"""Change bank constraints
|
||||||
|
|
||||||
|
Revision ID: ce68ee15e5d2
|
||||||
|
Revises: d7f0401c1fd3
|
||||||
|
Create Date: 2023-04-29 15:53:24.753628+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "ce68ee15e5d2"
|
||||||
|
down_revision = "d7f0401c1fd3"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.drop_constraint(
|
||||||
|
"fk_transactions_bank_banks",
|
||||||
|
"transactions",
|
||||||
|
type_="foreignkey",
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.drop_constraint(
|
||||||
|
"fk_banks_nordigen_name_banks",
|
||||||
|
"banks_nordigen",
|
||||||
|
type_="foreignkey",
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint("uq_banks_name", "banks", schema="pfbudget", type_="unique")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
op.execute("ALTER TABLE pfbudget.banks DROP CONSTRAINT pk_banks")
|
||||||
|
op.create_primary_key("pk_banks", "banks", ["name"], schema="pfbudget")
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_transactions_bank_banks",
|
||||||
|
"transactions",
|
||||||
|
"banks",
|
||||||
|
["bank"],
|
||||||
|
["name"],
|
||||||
|
source_schema="pfbudget",
|
||||||
|
referent_schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_banks_nordigen_name_banks",
|
||||||
|
"banks_nordigen",
|
||||||
|
"banks",
|
||||||
|
["name"],
|
||||||
|
["name"],
|
||||||
|
source_schema="pfbudget",
|
||||||
|
referent_schema="pfbudget",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_constraint(
|
||||||
|
"fk_transactions_bank_banks",
|
||||||
|
"transactions",
|
||||||
|
type_="foreignkey",
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.drop_constraint(
|
||||||
|
"fk_banks_nordigen_name_banks",
|
||||||
|
"banks_nordigen",
|
||||||
|
type_="foreignkey",
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
|
||||||
|
op.execute("ALTER TABLE pfbudget.banks DROP CONSTRAINT pk_banks")
|
||||||
|
op.create_primary_key("pk_banks", "banks", ["BIC", "type"], schema="pfbudget")
|
||||||
|
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_unique_constraint("uq_banks_name", "banks", ["name"], schema="pfbudget")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_transactions_bank_banks",
|
||||||
|
"transactions",
|
||||||
|
"banks",
|
||||||
|
["bank"],
|
||||||
|
["name"],
|
||||||
|
source_schema="pfbudget",
|
||||||
|
referent_schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_foreign_key(
|
||||||
|
"fk_banks_nordigen_name_banks",
|
||||||
|
"banks_nordigen",
|
||||||
|
"banks",
|
||||||
|
["name"],
|
||||||
|
["name"],
|
||||||
|
source_schema="pfbudget",
|
||||||
|
referent_schema="pfbudget",
|
||||||
|
)
|
||||||
594
alembic/versions/d7f0401c1fd3_unified_schemas.py
Normal file
594
alembic/versions/d7f0401c1fd3_unified_schemas.py
Normal file
@ -0,0 +1,594 @@
|
|||||||
|
"""unified schemas
|
||||||
|
|
||||||
|
|
||||||
|
Revision ID: d7f0401c1fd3
|
||||||
|
Revises: 952de57a3c43
|
||||||
|
Create Date: 2023-04-27 16:30:08.514985+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "d7f0401c1fd3"
|
||||||
|
down_revision = "952de57a3c43"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"banks",
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("BIC", sa.String(length=8), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"type",
|
||||||
|
sa.Enum(
|
||||||
|
"checking",
|
||||||
|
"savings",
|
||||||
|
"investment",
|
||||||
|
"mealcard",
|
||||||
|
"VISA",
|
||||||
|
"MASTERCARD",
|
||||||
|
name="accounttype",
|
||||||
|
schema="pfbudget",
|
||||||
|
inherit_schema=True,
|
||||||
|
),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("BIC", "type", name=op.f("pk_banks")),
|
||||||
|
sa.UniqueConstraint("name", name=op.f("uq_banks_name")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"category_groups",
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("name", name=op.f("pk_category_groups")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"rules",
|
||||||
|
sa.Column(
|
||||||
|
"id", sa.BigInteger().with_variant(sa.Integer(), "sqlite"), nullable=False
|
||||||
|
),
|
||||||
|
sa.Column("start", sa.Date(), nullable=True),
|
||||||
|
sa.Column("end", sa.Date(), nullable=True),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("regex", sa.String(), nullable=True),
|
||||||
|
sa.Column("bank", sa.String(), nullable=True),
|
||||||
|
sa.Column("min", sa.Numeric(precision=16, scale=2), nullable=True),
|
||||||
|
sa.Column("max", sa.Numeric(precision=16, scale=2), nullable=True),
|
||||||
|
sa.Column("type", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_rules")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"tags",
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("name", name=op.f("pk_tags")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"banks_nordigen",
|
||||||
|
sa.Column("name", sa.Text(), nullable=False),
|
||||||
|
sa.Column("bank_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("requisition_id", sa.String(), nullable=True),
|
||||||
|
sa.Column("invert", sa.Boolean(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"], ["pfbudget.banks.name"], name=op.f("fk_banks_nordigen_name_banks")
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("name", name=op.f("pk_banks_nordigen")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"categories",
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column("group", sa.String(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["group"],
|
||||||
|
["pfbudget.category_groups.name"],
|
||||||
|
name=op.f("fk_categories_group_category_groups"),
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("name", name=op.f("pk_categories")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"tag_rules",
|
||||||
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("tag", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["pfbudget.rules.id"],
|
||||||
|
name=op.f("fk_tag_rules_id_rules"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["tag"],
|
||||||
|
["pfbudget.tags.name"],
|
||||||
|
name=op.f("fk_tag_rules_tag_tags"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_tag_rules")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"transactions",
|
||||||
|
sa.Column(
|
||||||
|
"id", sa.BigInteger().with_variant(sa.Integer(), "sqlite"), nullable=False
|
||||||
|
),
|
||||||
|
sa.Column("date", sa.Date(), nullable=False),
|
||||||
|
sa.Column("description", sa.String(), nullable=True),
|
||||||
|
sa.Column("amount", sa.Numeric(precision=16, scale=2), nullable=False),
|
||||||
|
sa.Column("split", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("type", sa.String(), nullable=False),
|
||||||
|
sa.Column("bank", sa.Text(), nullable=True),
|
||||||
|
sa.Column("original", sa.BigInteger(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["bank"], ["pfbudget.banks.name"], name=op.f("fk_transactions_bank_banks")
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["original"],
|
||||||
|
["pfbudget.transactions.id"],
|
||||||
|
name=op.f("fk_transactions_original_transactions"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_transactions")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"category_rules",
|
||||||
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["pfbudget.rules.id"],
|
||||||
|
name=op.f("fk_category_rules_id_rules"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"],
|
||||||
|
["pfbudget.categories.name"],
|
||||||
|
name=op.f("fk_category_rules_name_categories"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_category_rules")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"category_schedules",
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"period",
|
||||||
|
sa.Enum(
|
||||||
|
"daily",
|
||||||
|
"weekly",
|
||||||
|
"monthly",
|
||||||
|
"yearly",
|
||||||
|
name="period",
|
||||||
|
schema="pfbudget",
|
||||||
|
inherit_schema=True,
|
||||||
|
),
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column("period_multiplier", sa.Integer(), nullable=True),
|
||||||
|
sa.Column("amount", sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"],
|
||||||
|
["pfbudget.categories.name"],
|
||||||
|
name=op.f("fk_category_schedules_name_categories"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("name", name=op.f("pk_category_schedules")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"links",
|
||||||
|
sa.Column("original", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("link", sa.BigInteger(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["link"],
|
||||||
|
["pfbudget.transactions.id"],
|
||||||
|
name=op.f("fk_links_link_transactions"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["original"],
|
||||||
|
["pfbudget.transactions.id"],
|
||||||
|
name=op.f("fk_links_original_transactions"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("original", "link", name=op.f("pk_links")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"notes",
|
||||||
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("note", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["pfbudget.transactions.id"],
|
||||||
|
name=op.f("fk_notes_id_transactions"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_notes")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"transactions_categorized",
|
||||||
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("name", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["pfbudget.transactions.id"],
|
||||||
|
name=op.f("fk_transactions_categorized_id_transactions"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"],
|
||||||
|
["pfbudget.categories.name"],
|
||||||
|
name=op.f("fk_transactions_categorized_name_categories"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_transactions_categorized")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"transactions_tagged",
|
||||||
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column("tag", sa.String(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["pfbudget.transactions.id"],
|
||||||
|
name=op.f("fk_transactions_tagged_id_transactions"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["tag"],
|
||||||
|
["pfbudget.tags.name"],
|
||||||
|
name=op.f("fk_transactions_tagged_tag_tags"),
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", "tag", name=op.f("pk_transactions_tagged")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"category_selectors",
|
||||||
|
sa.Column("id", sa.BigInteger(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"selector",
|
||||||
|
sa.Enum(
|
||||||
|
"unknown",
|
||||||
|
"nullifier",
|
||||||
|
"vacations",
|
||||||
|
"rules",
|
||||||
|
"algorithm",
|
||||||
|
"manual",
|
||||||
|
name="selector_t",
|
||||||
|
schema="pfbudget",
|
||||||
|
inherit_schema=True,
|
||||||
|
),
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["pfbudget.transactions_categorized.id"],
|
||||||
|
name=op.f("fk_category_selectors_id_transactions_categorized"),
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name=op.f("pk_category_selectors")),
|
||||||
|
schema="pfbudget",
|
||||||
|
)
|
||||||
|
op.drop_table("notes", schema="transactions")
|
||||||
|
op.drop_table("rules", schema="tag")
|
||||||
|
op.drop_table("tagged", schema="transactions")
|
||||||
|
op.drop_table("available", schema="tag")
|
||||||
|
op.drop_table("nordigen", schema="bank")
|
||||||
|
op.drop_table("links", schema="transactions")
|
||||||
|
op.drop_table("selector", schema="transactions")
|
||||||
|
op.drop_table("categorized", schema="transactions")
|
||||||
|
op.drop_table("transactions", schema="transactions")
|
||||||
|
op.drop_table("banks", schema="bank")
|
||||||
|
op.drop_table("rules", schema="category")
|
||||||
|
op.drop_table("schedules", schema="category")
|
||||||
|
op.drop_table("rules", schema="transactions")
|
||||||
|
op.drop_table("available", schema="category")
|
||||||
|
op.drop_table("groups", schema="category")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"transactions",
|
||||||
|
sa.Column(
|
||||||
|
"id",
|
||||||
|
sa.BIGINT(),
|
||||||
|
server_default=sa.text(
|
||||||
|
"nextval('transactions.originals_id_seq'::regclass)"
|
||||||
|
),
|
||||||
|
autoincrement=True,
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("date", sa.DATE(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("bank", sa.TEXT(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"amount",
|
||||||
|
sa.NUMERIC(precision=16, scale=2),
|
||||||
|
autoincrement=False,
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("type", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("split", sa.BOOLEAN(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("original", sa.BIGINT(), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["bank"], ["bank.banks.name"], name="fk_originals_bank_banks"
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["original"],
|
||||||
|
["transactions.transactions.id"],
|
||||||
|
name="fk_originals_original_originals",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_originals"),
|
||||||
|
schema="transactions",
|
||||||
|
postgresql_ignore_search_path=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"nordigen",
|
||||||
|
sa.Column("name", sa.TEXT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("bank_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("requisition_id", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("invert", sa.BOOLEAN(), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"], ["bank.banks.name"], name="fk_nordigen_name_banks"
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("name", name="pk_nordigen"),
|
||||||
|
schema="bank",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"tagged",
|
||||||
|
sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("tag", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["transactions.transactions.id"],
|
||||||
|
name="fk_tags_id_originals",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["tag"], ["tag.available.name"], name="fk_tags_tag_available"
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", "tag", name="pk_tags"),
|
||||||
|
schema="transactions",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"available",
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("group", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["group"], ["category.groups.name"], name="fk_available_group_groups"
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("name", name="pk_available"),
|
||||||
|
schema="category",
|
||||||
|
postgresql_ignore_search_path=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"rules",
|
||||||
|
sa.Column(
|
||||||
|
"id",
|
||||||
|
sa.BIGINT(),
|
||||||
|
server_default=sa.text("nextval('transactions.rules_id_seq'::regclass)"),
|
||||||
|
autoincrement=True,
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.Column("end", sa.DATE(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("regex", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column("bank", sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||||
|
sa.Column(
|
||||||
|
"min", sa.NUMERIC(precision=16, scale=2), autoincrement=False, nullable=True
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"max", sa.NUMERIC(precision=16, scale=2), autoincrement=False, nullable=True
|
||||||
|
),
|
||||||
|
sa.Column("type", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("start", sa.DATE(), autoincrement=False, nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_rules"),
|
||||||
|
schema="transactions",
|
||||||
|
postgresql_ignore_search_path=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"groups",
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("name", name="pk_groups"),
|
||||||
|
schema="category",
|
||||||
|
postgresql_ignore_search_path=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"rules",
|
||||||
|
sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("tag", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["transactions.rules.id"],
|
||||||
|
name="fk_rules_id_rules",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["tag"],
|
||||||
|
["tag.available.name"],
|
||||||
|
name="fk_rules_tag_available",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_rules"),
|
||||||
|
schema="tag",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"schedules",
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"period",
|
||||||
|
postgresql.ENUM(
|
||||||
|
"daily", "weekly", "monthly", "yearly", name="period", schema="category"
|
||||||
|
),
|
||||||
|
autoincrement=False,
|
||||||
|
nullable=True,
|
||||||
|
),
|
||||||
|
sa.Column(
|
||||||
|
"period_multiplier", sa.INTEGER(), autoincrement=False, nullable=True
|
||||||
|
),
|
||||||
|
sa.Column("amount", sa.INTEGER(), autoincrement=False, nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"],
|
||||||
|
["category.available.name"],
|
||||||
|
name="fk_schedules_name_available",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("name", name="pk_schedules"),
|
||||||
|
schema="category",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"rules",
|
||||||
|
sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["transactions.rules.id"],
|
||||||
|
name="fk_rules_id_rules",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"],
|
||||||
|
["category.available.name"],
|
||||||
|
name="fk_rules_name_available",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_rules"),
|
||||||
|
schema="category",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"links",
|
||||||
|
sa.Column("original", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("link", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["link"],
|
||||||
|
["transactions.transactions.id"],
|
||||||
|
name="fk_links_link_originals",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["original"],
|
||||||
|
["transactions.transactions.id"],
|
||||||
|
name="fk_links_original_originals",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("original", "link", name="pk_links"),
|
||||||
|
schema="transactions",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"selector",
|
||||||
|
sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"selector",
|
||||||
|
postgresql.ENUM(
|
||||||
|
"unknown",
|
||||||
|
"nullifier",
|
||||||
|
"vacations",
|
||||||
|
"rules",
|
||||||
|
"algorithm",
|
||||||
|
"manual",
|
||||||
|
name="selector_t",
|
||||||
|
schema="transactions",
|
||||||
|
),
|
||||||
|
autoincrement=False,
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["transactions.categorized.id"],
|
||||||
|
name="fk_selector_id_categorized",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_selector"),
|
||||||
|
schema="transactions",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"banks",
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("BIC", sa.VARCHAR(length=8), autoincrement=False, nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"type",
|
||||||
|
postgresql.ENUM(
|
||||||
|
"checking",
|
||||||
|
"savings",
|
||||||
|
"investment",
|
||||||
|
"mealcard",
|
||||||
|
"VISA",
|
||||||
|
"MASTERCARD",
|
||||||
|
name="accounttype",
|
||||||
|
schema="bank",
|
||||||
|
),
|
||||||
|
autoincrement=False,
|
||||||
|
nullable=False,
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("BIC", "type", name="pk_banks"),
|
||||||
|
sa.UniqueConstraint("name", name="uq_banks_name"),
|
||||||
|
schema="bank",
|
||||||
|
postgresql_ignore_search_path=False,
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"available",
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("name", name="pk_available"),
|
||||||
|
schema="tag",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"notes",
|
||||||
|
sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("note", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["transactions.transactions.id"],
|
||||||
|
name="fk_notes_id_originals",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_notes"),
|
||||||
|
schema="transactions",
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"categorized",
|
||||||
|
sa.Column("id", sa.BIGINT(), autoincrement=False, nullable=False),
|
||||||
|
sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["id"],
|
||||||
|
["transactions.transactions.id"],
|
||||||
|
name="fk_categorized_id_originals",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["name"],
|
||||||
|
["category.available.name"],
|
||||||
|
name="fk_categorized_name_available",
|
||||||
|
ondelete="CASCADE",
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id", name="pk_categorized"),
|
||||||
|
schema="transactions",
|
||||||
|
)
|
||||||
|
op.drop_table("category_selectors", schema="pfbudget")
|
||||||
|
op.drop_table("transactions_tagged", schema="pfbudget")
|
||||||
|
op.drop_table("transactions_categorized", schema="pfbudget")
|
||||||
|
op.drop_table("notes", schema="pfbudget")
|
||||||
|
op.drop_table("links", schema="pfbudget")
|
||||||
|
op.drop_table("category_schedules", schema="pfbudget")
|
||||||
|
op.drop_table("category_rules", schema="pfbudget")
|
||||||
|
op.drop_table("transactions", schema="pfbudget")
|
||||||
|
op.drop_table("tag_rules", schema="pfbudget")
|
||||||
|
op.drop_table("categories", schema="pfbudget")
|
||||||
|
op.drop_table("banks_nordigen", schema="pfbudget")
|
||||||
|
op.drop_table("tags", schema="pfbudget")
|
||||||
|
op.drop_table("rules", schema="pfbudget")
|
||||||
|
op.drop_table("category_groups", schema="pfbudget")
|
||||||
|
op.drop_table("banks", schema="pfbudget")
|
||||||
|
# ### end Alembic commands ###
|
||||||
@ -53,7 +53,7 @@ if __name__ == "__main__":
|
|||||||
if not args["all"]:
|
if not args["all"]:
|
||||||
params.append(args["banks"])
|
params.append(args["banks"])
|
||||||
else:
|
else:
|
||||||
params.append([])
|
params.append(None)
|
||||||
|
|
||||||
case Operation.BankAdd:
|
case Operation.BankAdd:
|
||||||
keys = {"bank", "bic", "type"}
|
keys = {"bank", "bic", "type"}
|
||||||
@ -130,12 +130,12 @@ if __name__ == "__main__":
|
|||||||
keys = {"category", "group"}
|
keys = {"category", "group"}
|
||||||
assert args.keys() >= keys, f"missing {args.keys() - keys}"
|
assert args.keys() >= keys, f"missing {args.keys() - keys}"
|
||||||
|
|
||||||
params = [type.Category(cat) for cat in args["category"]]
|
params = [{"name": cat, "group": args["group"]} for cat in args["category"]]
|
||||||
params.append(args["group"])
|
|
||||||
|
|
||||||
case Operation.CategoryRemove:
|
case Operation.CategoryRemove:
|
||||||
assert "category" in args, "argparser ill defined"
|
assert "category" in args, "argparser ill defined"
|
||||||
params = [type.Category(cat) for cat in args["category"]]
|
|
||||||
|
params = args["category"]
|
||||||
|
|
||||||
case Operation.CategorySchedule:
|
case Operation.CategorySchedule:
|
||||||
keys = {"category", "period", "frequency"}
|
keys = {"category", "period", "frequency"}
|
||||||
@ -246,7 +246,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
case Operation.GroupRemove:
|
case Operation.GroupRemove:
|
||||||
assert "group" in args, "argparser ill defined"
|
assert "group" in args, "argparser ill defined"
|
||||||
params = [type.CategoryGroup(group) for group in args["group"]]
|
params = args["group"]
|
||||||
|
|
||||||
case Operation.Forge | Operation.Dismantle:
|
case Operation.Forge | Operation.Dismantle:
|
||||||
keys = {"original", "links"}
|
keys = {"original", "links"}
|
||||||
|
|||||||
@ -67,7 +67,7 @@ def argparser() -> argparse.ArgumentParser:
|
|||||||
|
|
||||||
pimport = subparsers.add_parser("import")
|
pimport = subparsers.add_parser("import")
|
||||||
pimport.set_defaults(op=Operation.Import)
|
pimport.set_defaults(op=Operation.Import)
|
||||||
pimport.add_argument("file", nargs=1, type=str)
|
file_options(pimport)
|
||||||
|
|
||||||
# Parse from .csv
|
# Parse from .csv
|
||||||
parse = subparsers.add_parser("parse")
|
parse = subparsers.add_parser("parse")
|
||||||
|
|||||||
@ -57,7 +57,7 @@ class Interactive:
|
|||||||
|
|
||||||
case "split":
|
case "split":
|
||||||
new = self.split(next)
|
new = self.split(next)
|
||||||
session.add(new)
|
session.insert(new)
|
||||||
|
|
||||||
case other:
|
case other:
|
||||||
if not other:
|
if not other:
|
||||||
@ -84,7 +84,7 @@ class Interactive:
|
|||||||
)
|
)
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
if tag not in [t.name for t in self.tags]:
|
if tag not in [t.name for t in self.tags]:
|
||||||
session.add([Tag(tag)])
|
session.insert([Tag(tag)])
|
||||||
self.tags = session.get(Tag)
|
self.tags = session.get(Tag)
|
||||||
|
|
||||||
next.tags.add(TransactionTag(tag))
|
next.tags.add(TransactionTag(tag))
|
||||||
|
|||||||
@ -1,10 +1,12 @@
|
|||||||
import csv
|
import csv
|
||||||
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import pickle
|
import pickle
|
||||||
|
from typing import Optional
|
||||||
import webbrowser
|
import webbrowser
|
||||||
|
|
||||||
from pfbudget.common.types import Operation
|
from pfbudget.common.types import Operation
|
||||||
from pfbudget.db.client import DbClient
|
from pfbudget.db.client import Client
|
||||||
from pfbudget.db.model import (
|
from pfbudget.db.model import (
|
||||||
Bank,
|
Bank,
|
||||||
BankTransaction,
|
BankTransaction,
|
||||||
@ -27,6 +29,7 @@ from pfbudget.db.model import (
|
|||||||
from pfbudget.extract.nordigen import NordigenClient, NordigenCredentialsManager
|
from pfbudget.extract.nordigen import NordigenClient, NordigenCredentialsManager
|
||||||
from pfbudget.extract.parsers import parse_data
|
from pfbudget.extract.parsers import parse_data
|
||||||
from pfbudget.extract.psd2 import PSD2Extractor
|
from pfbudget.extract.psd2 import PSD2Extractor
|
||||||
|
from pfbudget.load.database import DatabaseLoader
|
||||||
from pfbudget.transform.categorizer import Categorizer
|
from pfbudget.transform.categorizer import Categorizer
|
||||||
from pfbudget.transform.nullifier import Nullifier
|
from pfbudget.transform.nullifier import Nullifier
|
||||||
from pfbudget.transform.tagger import Tagger
|
from pfbudget.transform.tagger import Tagger
|
||||||
@ -35,6 +38,7 @@ from pfbudget.transform.tagger import Tagger
|
|||||||
class Manager:
|
class Manager:
|
||||||
def __init__(self, db: str, verbosity: int = 0):
|
def __init__(self, db: str, verbosity: int = 0):
|
||||||
self._db = db
|
self._db = db
|
||||||
|
self._database: Optional[Client] = None
|
||||||
self._verbosity = verbosity
|
self._verbosity = verbosity
|
||||||
|
|
||||||
def action(self, op: Operation, params=None):
|
def action(self, op: Operation, params=None):
|
||||||
@ -49,10 +53,7 @@ class Manager:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
case Operation.Transactions:
|
case Operation.Transactions:
|
||||||
with self.db.session() as session:
|
return [t.format for t in self.database.select(Transaction)]
|
||||||
transactions = session.get(Transaction)
|
|
||||||
ret = [t.format for t in transactions]
|
|
||||||
return ret
|
|
||||||
|
|
||||||
case Operation.Parse:
|
case Operation.Parse:
|
||||||
# Adapter for the parse_data method. Can be refactored.
|
# Adapter for the parse_data method. Can be refactored.
|
||||||
@ -71,37 +72,36 @@ class Manager:
|
|||||||
len(transactions) > 0
|
len(transactions) > 0
|
||||||
and input(f"{transactions[:5]}\nCommit? (y/n)") == "y"
|
and input(f"{transactions[:5]}\nCommit? (y/n)") == "y"
|
||||||
):
|
):
|
||||||
with self.db.session() as session:
|
self.database.insert(sorted(transactions))
|
||||||
session.add(sorted(transactions))
|
|
||||||
|
|
||||||
case Operation.Download:
|
case Operation.Download:
|
||||||
client = Manager.nordigen_client()
|
if params[3]:
|
||||||
with self.db.session() as session:
|
values = params[3]
|
||||||
if len(params[3]) == 0:
|
banks = self.database.select(Bank, lambda: Bank.name.in_(values))
|
||||||
banks = session.get(Bank, Bank.nordigen)
|
|
||||||
else:
|
else:
|
||||||
banks = session.get(Bank, Bank.name, params[3])
|
banks = self.database.select(Bank, Bank.nordigen)
|
||||||
session.expunge_all()
|
|
||||||
|
extractor = PSD2Extractor(Manager.nordigen_client())
|
||||||
|
|
||||||
extractor = PSD2Extractor(client)
|
|
||||||
transactions = []
|
transactions = []
|
||||||
for bank in banks:
|
for bank in banks:
|
||||||
transactions.extend(extractor.extract(bank, params[0], params[1]))
|
transactions.extend(extractor.extract(bank, params[0], params[1]))
|
||||||
|
|
||||||
# dry-run
|
# dry-run
|
||||||
if not params[2]:
|
if params[2]:
|
||||||
with self.db.session() as session:
|
|
||||||
session.add(sorted(transactions))
|
|
||||||
else:
|
|
||||||
print(sorted(transactions))
|
print(sorted(transactions))
|
||||||
|
return
|
||||||
|
|
||||||
|
loader = DatabaseLoader(self.database)
|
||||||
|
loader.load(sorted(transactions))
|
||||||
|
|
||||||
case Operation.Categorize:
|
case Operation.Categorize:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
uncategorized = session.get(
|
uncategorized = session.select(
|
||||||
BankTransaction, ~BankTransaction.category.has()
|
BankTransaction, lambda: ~BankTransaction.category.has()
|
||||||
)
|
)
|
||||||
categories = session.get(Category)
|
categories = session.select(Category)
|
||||||
tags = session.get(Tag)
|
tags = session.select(Tag)
|
||||||
|
|
||||||
rules = [cat.rules for cat in categories if cat.name == "null"]
|
rules = [cat.rules for cat in categories if cat.name == "null"]
|
||||||
Nullifier(rules).transform_inplace(uncategorized)
|
Nullifier(rules).transform_inplace(uncategorized)
|
||||||
@ -113,20 +113,16 @@ class Manager:
|
|||||||
Tagger(rules).transform_inplace(uncategorized)
|
Tagger(rules).transform_inplace(uncategorized)
|
||||||
|
|
||||||
case Operation.BankMod:
|
case Operation.BankMod:
|
||||||
with self.db.session() as session:
|
self.database.update(Bank, params)
|
||||||
session.update(Bank, params)
|
|
||||||
|
|
||||||
case Operation.PSD2Mod:
|
case Operation.PSD2Mod:
|
||||||
with self.db.session() as session:
|
self.database.update(Nordigen, params)
|
||||||
session.update(Nordigen, params)
|
|
||||||
|
|
||||||
case Operation.BankDel:
|
case Operation.BankDel:
|
||||||
with self.db.session() as session:
|
self.database.delete(Bank, Bank.name, params)
|
||||||
session.remove_by_name(Bank, params)
|
|
||||||
|
|
||||||
case Operation.PSD2Del:
|
case Operation.PSD2Del:
|
||||||
with self.db.session() as session:
|
self.database.delete(Nordigen, Nordigen.name, params)
|
||||||
session.remove_by_name(Nordigen, params)
|
|
||||||
|
|
||||||
case Operation.Token:
|
case Operation.Token:
|
||||||
Manager.nordigen_client().generate_token()
|
Manager.nordigen_client().generate_token()
|
||||||
@ -143,53 +139,37 @@ class Manager:
|
|||||||
case (
|
case (
|
||||||
Operation.BankAdd
|
Operation.BankAdd
|
||||||
| Operation.CategoryAdd
|
| Operation.CategoryAdd
|
||||||
|
| Operation.GroupAdd
|
||||||
| Operation.PSD2Add
|
| Operation.PSD2Add
|
||||||
| Operation.RuleAdd
|
| Operation.RuleAdd
|
||||||
| Operation.TagAdd
|
| Operation.TagAdd
|
||||||
| Operation.TagRuleAdd
|
| Operation.TagRuleAdd
|
||||||
):
|
):
|
||||||
with self.db.session() as session:
|
self.database.insert(params)
|
||||||
session.add(params)
|
|
||||||
|
|
||||||
case Operation.CategoryUpdate:
|
case Operation.CategoryUpdate:
|
||||||
with self.db.session() as session:
|
self.database.update(Category, params)
|
||||||
session.updategroup(*params)
|
|
||||||
|
|
||||||
case Operation.CategoryRemove:
|
case Operation.CategoryRemove:
|
||||||
with self.db.session() as session:
|
self.database.delete(Category, Category.name, params)
|
||||||
session.remove_by_name(Category, params)
|
|
||||||
|
|
||||||
case Operation.CategorySchedule:
|
case Operation.CategorySchedule:
|
||||||
with self.db.session() as session:
|
raise NotImplementedError
|
||||||
session.updateschedules(params)
|
|
||||||
|
|
||||||
case Operation.RuleRemove:
|
case Operation.RuleRemove:
|
||||||
assert all(isinstance(param, int) for param in params)
|
self.database.delete(CategoryRule, CategoryRule.id, params)
|
||||||
with self.db.session() as session:
|
|
||||||
session.remove_by_id(CategoryRule, params)
|
|
||||||
|
|
||||||
case Operation.TagRemove:
|
case Operation.TagRemove:
|
||||||
with self.db.session() as session:
|
self.database.delete(Tag, Tag.name, params)
|
||||||
session.remove_by_name(Tag, params)
|
|
||||||
|
|
||||||
case Operation.TagRuleRemove:
|
case Operation.TagRuleRemove:
|
||||||
assert all(isinstance(param, int) for param in params)
|
self.database.delete(TagRule, TagRule.id, params)
|
||||||
with self.db.session() as session:
|
|
||||||
session.remove_by_id(TagRule, params)
|
|
||||||
|
|
||||||
case Operation.RuleModify | Operation.TagRuleModify:
|
case Operation.RuleModify | Operation.TagRuleModify:
|
||||||
assert all(isinstance(param, dict) for param in params)
|
self.database.update(Rule, params)
|
||||||
with self.db.session() as session:
|
|
||||||
session.update(Rule, params)
|
|
||||||
|
|
||||||
case Operation.GroupAdd:
|
|
||||||
with self.db.session() as session:
|
|
||||||
session.add(params)
|
|
||||||
|
|
||||||
case Operation.GroupRemove:
|
case Operation.GroupRemove:
|
||||||
assert all(isinstance(param, CategoryGroup) for param in params)
|
self.database.delete(CategoryGroup, CategoryGroup.name, params)
|
||||||
with self.db.session() as session:
|
|
||||||
session.remove_by_name(CategoryGroup, params)
|
|
||||||
|
|
||||||
case Operation.Forge:
|
case Operation.Forge:
|
||||||
if not (
|
if not (
|
||||||
@ -198,9 +178,14 @@ class Manager:
|
|||||||
):
|
):
|
||||||
raise TypeError("f{params} are not transaction ids")
|
raise TypeError("f{params} are not transaction ids")
|
||||||
|
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
original = session.get(Transaction, Transaction.id, params[0])[0]
|
id = params[0]
|
||||||
links = session.get(Transaction, Transaction.id, params[1])
|
original = session.select(
|
||||||
|
Transaction, lambda: Transaction.id == id
|
||||||
|
)[0]
|
||||||
|
|
||||||
|
ids = params[1]
|
||||||
|
links = session.select(Transaction, lambda: Transaction.id.in_(ids))
|
||||||
|
|
||||||
if not original.category:
|
if not original.category:
|
||||||
original.category = self.askcategory(original)
|
original.category = self.askcategory(original)
|
||||||
@ -217,15 +202,10 @@ class Manager:
|
|||||||
link.category = original.category
|
link.category = original.category
|
||||||
|
|
||||||
tobelinked = [Link(original.id, link.id) for link in links]
|
tobelinked = [Link(original.id, link.id) for link in links]
|
||||||
session.add(tobelinked)
|
session.insert(tobelinked)
|
||||||
|
|
||||||
case Operation.Dismantle:
|
case Operation.Dismantle:
|
||||||
assert all(isinstance(param, Link) for param in params)
|
raise NotImplementedError
|
||||||
|
|
||||||
with self.db.session() as session:
|
|
||||||
original = params[0].original
|
|
||||||
links = [link.link for link in params]
|
|
||||||
session.remove_links(original, links)
|
|
||||||
|
|
||||||
case Operation.Split:
|
case Operation.Split:
|
||||||
if len(params) < 1 and not all(
|
if len(params) < 1 and not all(
|
||||||
@ -240,8 +220,10 @@ class Manager:
|
|||||||
f"{original.amount}€ != {sum(v for v, _ in params[1:])}€"
|
f"{original.amount}€ != {sum(v for v, _ in params[1:])}€"
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
originals = session.get(Transaction, Transaction.id, [original.id])
|
originals = session.select(
|
||||||
|
Transaction, lambda: Transaction.id == original.id
|
||||||
|
)
|
||||||
assert len(originals) == 1, ">1 transactions matched {original.id}!"
|
assert len(originals) == 1, ">1 transactions matched {original.id}!"
|
||||||
|
|
||||||
originals[0].split = True
|
originals[0].split = True
|
||||||
@ -260,11 +242,13 @@ class Manager:
|
|||||||
splitted.category = t.category
|
splitted.category = t.category
|
||||||
transactions.append(splitted)
|
transactions.append(splitted)
|
||||||
|
|
||||||
session.add(transactions)
|
session.insert(transactions)
|
||||||
|
|
||||||
case Operation.Export:
|
case Operation.Export:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
self.dump(params[0], params[1], sorted(session.get(Transaction)))
|
self.dump(
|
||||||
|
params[0], params[1], self.database.select(Transaction, session)
|
||||||
|
)
|
||||||
|
|
||||||
case Operation.Import:
|
case Operation.Import:
|
||||||
transactions = []
|
transactions = []
|
||||||
@ -297,12 +281,11 @@ class Manager:
|
|||||||
transactions.append(transaction)
|
transactions.append(transaction)
|
||||||
|
|
||||||
if self.certify(transactions):
|
if self.certify(transactions):
|
||||||
with self.db.session() as session:
|
self.database.insert(transactions)
|
||||||
session.add(transactions)
|
|
||||||
|
|
||||||
case Operation.ExportBanks:
|
case Operation.ExportBanks:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
self.dump(params[0], params[1], session.get(Bank))
|
self.dump(params[0], params[1], self.database.select(Bank, session))
|
||||||
|
|
||||||
case Operation.ImportBanks:
|
case Operation.ImportBanks:
|
||||||
banks = []
|
banks = []
|
||||||
@ -313,34 +296,39 @@ class Manager:
|
|||||||
banks.append(bank)
|
banks.append(bank)
|
||||||
|
|
||||||
if self.certify(banks):
|
if self.certify(banks):
|
||||||
with self.db.session() as session:
|
self.database.insert(banks)
|
||||||
session.add(banks)
|
|
||||||
|
|
||||||
case Operation.ExportCategoryRules:
|
case Operation.ExportCategoryRules:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
self.dump(params[0], params[1], session.get(CategoryRule))
|
self.dump(
|
||||||
|
params[0],
|
||||||
|
params[1],
|
||||||
|
self.database.select(CategoryRule, session),
|
||||||
|
)
|
||||||
|
|
||||||
case Operation.ImportCategoryRules:
|
case Operation.ImportCategoryRules:
|
||||||
rules = [CategoryRule(**row) for row in self.load(params[0], params[1])]
|
rules = [CategoryRule(**row) for row in self.load(params[0], params[1])]
|
||||||
|
|
||||||
if self.certify(rules):
|
if self.certify(rules):
|
||||||
with self.db.session() as session:
|
self.database.insert(rules)
|
||||||
session.add(rules)
|
|
||||||
|
|
||||||
case Operation.ExportTagRules:
|
case Operation.ExportTagRules:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
self.dump(params[0], params[1], session.get(TagRule))
|
self.dump(
|
||||||
|
params[0], params[1], self.database.select(TagRule, session)
|
||||||
|
)
|
||||||
|
|
||||||
case Operation.ImportTagRules:
|
case Operation.ImportTagRules:
|
||||||
rules = [TagRule(**row) for row in self.load(params[0], params[1])]
|
rules = [TagRule(**row) for row in self.load(params[0], params[1])]
|
||||||
|
|
||||||
if self.certify(rules):
|
if self.certify(rules):
|
||||||
with self.db.session() as session:
|
self.database.insert(rules)
|
||||||
session.add(rules)
|
|
||||||
|
|
||||||
case Operation.ExportCategories:
|
case Operation.ExportCategories:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
self.dump(params[0], params[1], session.get(Category))
|
self.dump(
|
||||||
|
params[0], params[1], self.database.select(Category, session)
|
||||||
|
)
|
||||||
|
|
||||||
case Operation.ImportCategories:
|
case Operation.ImportCategories:
|
||||||
# rules = [Category(**row) for row in self.load(params[0])]
|
# rules = [Category(**row) for row in self.load(params[0])]
|
||||||
@ -359,12 +347,15 @@ class Manager:
|
|||||||
categories.append(category)
|
categories.append(category)
|
||||||
|
|
||||||
if self.certify(categories):
|
if self.certify(categories):
|
||||||
with self.db.session() as session:
|
self.database.insert(categories)
|
||||||
session.add(categories)
|
|
||||||
|
|
||||||
case Operation.ExportCategoryGroups:
|
case Operation.ExportCategoryGroups:
|
||||||
with self.db.session() as session:
|
with self.database.session as session:
|
||||||
self.dump(params[0], params[1], session.get(CategoryGroup))
|
self.dump(
|
||||||
|
params[0],
|
||||||
|
params[1],
|
||||||
|
self.database.select(CategoryGroup, session),
|
||||||
|
)
|
||||||
|
|
||||||
case Operation.ImportCategoryGroups:
|
case Operation.ImportCategoryGroups:
|
||||||
groups = [
|
groups = [
|
||||||
@ -372,8 +363,7 @@ class Manager:
|
|||||||
]
|
]
|
||||||
|
|
||||||
if self.certify(groups):
|
if self.certify(groups):
|
||||||
with self.db.session() as session:
|
self.database.insert(groups)
|
||||||
session.add(groups)
|
|
||||||
|
|
||||||
def parse(self, filename: Path, args: dict):
|
def parse(self, filename: Path, args: dict):
|
||||||
return parse_data(filename, args)
|
return parse_data(filename, args)
|
||||||
@ -381,8 +371,7 @@ class Manager:
|
|||||||
def askcategory(self, transaction: Transaction):
|
def askcategory(self, transaction: Transaction):
|
||||||
selector = CategorySelector(Selector_T.manual)
|
selector = CategorySelector(Selector_T.manual)
|
||||||
|
|
||||||
with self.db.session() as session:
|
categories = self.database.select(Category)
|
||||||
categories = session.get(Category)
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
category = input(f"{transaction}: ")
|
category = input(f"{transaction}: ")
|
||||||
@ -397,6 +386,9 @@ class Manager:
|
|||||||
elif format == "csv":
|
elif format == "csv":
|
||||||
with open(fn, "w", newline="") as f:
|
with open(fn, "w", newline="") as f:
|
||||||
csv.writer(f).writerows([e.format.values() for e in sequence])
|
csv.writer(f).writerows([e.format.values() for e in sequence])
|
||||||
|
elif format == "json":
|
||||||
|
with open(fn, "w", newline="") as f:
|
||||||
|
json.dump([e.format for e in sequence], f, indent=4, default=str)
|
||||||
else:
|
else:
|
||||||
print("format not well specified")
|
print("format not well specified")
|
||||||
|
|
||||||
@ -418,12 +410,10 @@ class Manager:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def db(self) -> DbClient:
|
def database(self) -> Client:
|
||||||
return DbClient(self._db, self._verbosity > 2)
|
if not self._database:
|
||||||
|
self._database = Client(self._db, echo=self._verbosity > 2)
|
||||||
@db.setter
|
return self._database
|
||||||
def db(self, url: str):
|
|
||||||
self._db = url
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def nordigen_client() -> NordigenClient:
|
def nordigen_client() -> NordigenClient:
|
||||||
|
|||||||
@ -1,123 +1,69 @@
|
|||||||
from dataclasses import asdict
|
from collections.abc import Sequence
|
||||||
from sqlalchemy import create_engine, delete, select, update
|
from copy import deepcopy
|
||||||
from sqlalchemy.dialects.postgresql import insert
|
from sqlalchemy import Engine, create_engine, delete, select, update
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session, sessionmaker
|
||||||
from sqlalchemy.sql.expression import false
|
from typing import Any, Mapping, Optional, Type, TypeVar
|
||||||
from typing import Sequence, Type, TypeVar
|
|
||||||
|
|
||||||
from pfbudget.db.model import (
|
# from pfbudget.db.exceptions import InsertError, SelectError
|
||||||
Category,
|
|
||||||
CategoryGroup,
|
|
||||||
CategorySchedule,
|
|
||||||
Link,
|
|
||||||
Transaction,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DbClient:
|
class DatabaseSession:
|
||||||
"""
|
def __init__(self, session: Session):
|
||||||
General database client using sqlalchemy
|
self.__session = session
|
||||||
"""
|
|
||||||
|
|
||||||
__sessions: list[Session]
|
|
||||||
|
|
||||||
def __init__(self, url: str, echo=False) -> None:
|
|
||||||
self._engine = create_engine(url, echo=echo)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def engine(self):
|
|
||||||
return self._engine
|
|
||||||
|
|
||||||
class ClientSession:
|
|
||||||
def __init__(self, engine):
|
|
||||||
self.__engine = engine
|
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self.__session = Session(self.__engine)
|
self.__session.begin()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any):
|
||||||
self.commit()
|
if exc_type:
|
||||||
|
self.__session.rollback()
|
||||||
|
else:
|
||||||
|
self.__session.commit()
|
||||||
self.__session.close()
|
self.__session.close()
|
||||||
|
|
||||||
def commit(self):
|
def insert(self, sequence: Sequence[Any]) -> None:
|
||||||
self.__session.commit()
|
self.__session.add_all(sequence)
|
||||||
|
|
||||||
def expunge_all(self):
|
|
||||||
self.__session.expunge_all()
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
def get(self, type: Type[T], column=None, values=None) -> Sequence[T]:
|
def select(self, what: Type[T], exists: Optional[Any] = None) -> Sequence[T]:
|
||||||
if column is not None:
|
if exists:
|
||||||
if values:
|
stmt = select(what).filter(exists)
|
||||||
if isinstance(values, Sequence):
|
|
||||||
stmt = select(type).where(column.in_(values))
|
|
||||||
else:
|
else:
|
||||||
stmt = select(type).where(column == values)
|
stmt = select(what)
|
||||||
else:
|
|
||||||
stmt = select(type).where(column)
|
|
||||||
else:
|
|
||||||
stmt = select(type)
|
|
||||||
|
|
||||||
return self.__session.scalars(stmt).all()
|
return self.__session.scalars(stmt).all()
|
||||||
|
|
||||||
def uncategorized(self) -> Sequence[Transaction]:
|
|
||||||
"""Selects all valid uncategorized transactions
|
|
||||||
At this moment that includes:
|
|
||||||
- Categories w/o category
|
|
||||||
- AND non-split categories
|
|
||||||
|
|
||||||
Returns:
|
class Client:
|
||||||
Sequence[Transaction]: transactions left uncategorized
|
def __init__(self, url: str, **kwargs: Any):
|
||||||
"""
|
assert url, "Database URL is empty!"
|
||||||
stmt = (
|
self._engine = create_engine(url, **kwargs)
|
||||||
select(Transaction)
|
self._sessionmaker = sessionmaker(self._engine)
|
||||||
.where(~Transaction.category.has())
|
|
||||||
.where(Transaction.split == false())
|
|
||||||
)
|
|
||||||
return self.__session.scalars(stmt).all()
|
|
||||||
|
|
||||||
def add(self, rows: list):
|
def insert(self, sequence: Sequence[Any]) -> None:
|
||||||
self.__session.add_all(rows)
|
new = deepcopy(sequence)
|
||||||
|
with self.session as session:
|
||||||
|
session.insert(new)
|
||||||
|
|
||||||
def remove_by_name(self, type, rows: list):
|
T = TypeVar("T")
|
||||||
stmt = delete(type).where(type.name.in_([row.name for row in rows]))
|
|
||||||
self.__session.execute(stmt)
|
|
||||||
|
|
||||||
def updategroup(self, categories: list[Category], group: CategoryGroup):
|
def select(self, what: Type[T], exists: Optional[Any] = None) -> Sequence[T]:
|
||||||
stmt = (
|
return self.session.select(what, exists)
|
||||||
update(Category)
|
|
||||||
.where(Category.name.in_([cat.name for cat in categories]))
|
|
||||||
.values(group=group)
|
|
||||||
)
|
|
||||||
self.__session.execute(stmt)
|
|
||||||
|
|
||||||
def updateschedules(self, schedules: list[CategorySchedule]):
|
def update(self, what: Type[Any], values: Sequence[Mapping[str, Any]]) -> None:
|
||||||
stmt = insert(CategorySchedule).values([asdict(s) for s in schedules])
|
with self._sessionmaker() as session, session.begin():
|
||||||
stmt = stmt.on_conflict_do_update(
|
session.execute(update(what), values)
|
||||||
index_elements=[CategorySchedule.name],
|
|
||||||
set_=dict(
|
|
||||||
recurring=stmt.excluded.recurring,
|
|
||||||
period=stmt.excluded.period,
|
|
||||||
period_multiplier=stmt.excluded.period_multiplier,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
self.__session.execute(stmt)
|
|
||||||
|
|
||||||
def remove_by_id(self, type, ids: list[int]):
|
def delete(self, what: Type[Any], column: Any, values: Sequence[str]) -> None:
|
||||||
stmt = delete(type).where(type.id.in_(ids))
|
with self._sessionmaker() as session, session.begin():
|
||||||
self.__session.execute(stmt)
|
session.execute(delete(what).where(column.in_(values)))
|
||||||
|
|
||||||
def update(self, type, values: list[dict]):
|
@property
|
||||||
print(type, values)
|
def engine(self) -> Engine:
|
||||||
self.__session.execute(update(type), values)
|
return self._engine
|
||||||
|
|
||||||
def remove_links(self, original: int, links: list[int]):
|
@property
|
||||||
stmt = delete(Link).where(
|
def session(self) -> DatabaseSession:
|
||||||
Link.original == original, Link.link.in_(link for link in links)
|
return DatabaseSession(self._sessionmaker())
|
||||||
)
|
|
||||||
self.__session.execute(stmt)
|
|
||||||
|
|
||||||
def session(self) -> ClientSession:
|
|
||||||
return self.ClientSession(self.engine)
|
|
||||||
|
|||||||
6
pfbudget/db/exceptions.py
Normal file
6
pfbudget/db/exceptions.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
class InsertError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SelectError(Exception):
|
||||||
|
pass
|
||||||
@ -9,6 +9,7 @@ from sqlalchemy import (
|
|||||||
BigInteger,
|
BigInteger,
|
||||||
Enum,
|
Enum,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
|
Integer,
|
||||||
MetaData,
|
MetaData,
|
||||||
Numeric,
|
Numeric,
|
||||||
String,
|
String,
|
||||||
@ -25,7 +26,7 @@ from sqlalchemy.orm import (
|
|||||||
|
|
||||||
class Base(MappedAsDataclass, DeclarativeBase):
|
class Base(MappedAsDataclass, DeclarativeBase):
|
||||||
metadata = MetaData(
|
metadata = MetaData(
|
||||||
schema="transactions",
|
schema="pfbudget",
|
||||||
naming_convention={
|
naming_convention={
|
||||||
"ix": "ix_%(column_0_label)s",
|
"ix": "ix_%(column_0_label)s",
|
||||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||||
@ -58,12 +59,11 @@ class Export:
|
|||||||
|
|
||||||
|
|
||||||
class Bank(Base, Export):
|
class Bank(Base, Export):
|
||||||
__table_args__ = {"schema": "bank"}
|
|
||||||
__tablename__ = "banks"
|
__tablename__ = "banks"
|
||||||
|
|
||||||
name: Mapped[str] = mapped_column(unique=True)
|
name: Mapped[str] = mapped_column(primary_key=True)
|
||||||
BIC: Mapped[str] = mapped_column(String(8), primary_key=True)
|
BIC: Mapped[str] = mapped_column(String(8))
|
||||||
type: Mapped[accounttype] = mapped_column(primary_key=True)
|
type: Mapped[accounttype]
|
||||||
|
|
||||||
nordigen: Mapped[Optional[Nordigen]] = relationship(lazy="joined", init=False)
|
nordigen: Mapped[Optional[Nordigen]] = relationship(lazy="joined", init=False)
|
||||||
|
|
||||||
@ -79,7 +79,14 @@ class Bank(Base, Export):
|
|||||||
|
|
||||||
bankfk = Annotated[str, mapped_column(Text, ForeignKey(Bank.name))]
|
bankfk = Annotated[str, mapped_column(Text, ForeignKey(Bank.name))]
|
||||||
|
|
||||||
idpk = Annotated[int, mapped_column(BigInteger, primary_key=True, autoincrement=True)]
|
idpk = Annotated[
|
||||||
|
int,
|
||||||
|
mapped_column(
|
||||||
|
BigInteger().with_variant(Integer, "sqlite"),
|
||||||
|
primary_key=True,
|
||||||
|
autoincrement=True,
|
||||||
|
),
|
||||||
|
]
|
||||||
money = Annotated[decimal.Decimal, mapped_column(Numeric(16, 2))]
|
money = Annotated[decimal.Decimal, mapped_column(Numeric(16, 2))]
|
||||||
|
|
||||||
|
|
||||||
@ -151,8 +158,7 @@ class SplitTransaction(Transaction):
|
|||||||
|
|
||||||
|
|
||||||
class CategoryGroup(Base, Export):
|
class CategoryGroup(Base, Export):
|
||||||
__table_args__ = {"schema": "category"}
|
__tablename__ = "category_groups"
|
||||||
__tablename__ = "groups"
|
|
||||||
|
|
||||||
name: Mapped[str] = mapped_column(primary_key=True)
|
name: Mapped[str] = mapped_column(primary_key=True)
|
||||||
|
|
||||||
@ -162,8 +168,7 @@ class CategoryGroup(Base, Export):
|
|||||||
|
|
||||||
|
|
||||||
class Category(Base, Export):
|
class Category(Base, Export):
|
||||||
__table_args__ = {"schema": "category"}
|
__tablename__ = "categories"
|
||||||
__tablename__ = "available"
|
|
||||||
|
|
||||||
name: Mapped[str] = mapped_column(primary_key=True)
|
name: Mapped[str] = mapped_column(primary_key=True)
|
||||||
group: Mapped[Optional[str]] = mapped_column(
|
group: Mapped[Optional[str]] = mapped_column(
|
||||||
@ -200,7 +205,7 @@ catfk = Annotated[
|
|||||||
|
|
||||||
|
|
||||||
class TransactionCategory(Base, Export):
|
class TransactionCategory(Base, Export):
|
||||||
__tablename__ = "categorized"
|
__tablename__ = "transactions_categorized"
|
||||||
|
|
||||||
id: Mapped[idfk] = mapped_column(primary_key=True, init=False)
|
id: Mapped[idfk] = mapped_column(primary_key=True, init=False)
|
||||||
name: Mapped[catfk]
|
name: Mapped[catfk]
|
||||||
@ -211,7 +216,9 @@ class TransactionCategory(Base, Export):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def format(self):
|
def format(self):
|
||||||
return dict(name=self.name, selector=self.selector.format)
|
return dict(
|
||||||
|
name=self.name, selector=self.selector.format if self.selector else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Note(Base):
|
class Note(Base):
|
||||||
@ -222,8 +229,7 @@ class Note(Base):
|
|||||||
|
|
||||||
|
|
||||||
class Nordigen(Base, Export):
|
class Nordigen(Base, Export):
|
||||||
__table_args__ = {"schema": "bank"}
|
__tablename__ = "banks_nordigen"
|
||||||
__tablename__ = "nordigen"
|
|
||||||
|
|
||||||
name: Mapped[bankfk] = mapped_column(primary_key=True)
|
name: Mapped[bankfk] = mapped_column(primary_key=True)
|
||||||
bank_id: Mapped[Optional[str]]
|
bank_id: Mapped[Optional[str]]
|
||||||
@ -241,8 +247,7 @@ class Nordigen(Base, Export):
|
|||||||
|
|
||||||
|
|
||||||
class Tag(Base):
|
class Tag(Base):
|
||||||
__table_args__ = {"schema": "tag"}
|
__tablename__ = "tags"
|
||||||
__tablename__ = "available"
|
|
||||||
|
|
||||||
name: Mapped[str] = mapped_column(primary_key=True)
|
name: Mapped[str] = mapped_column(primary_key=True)
|
||||||
|
|
||||||
@ -252,7 +257,7 @@ class Tag(Base):
|
|||||||
|
|
||||||
|
|
||||||
class TransactionTag(Base, Export):
|
class TransactionTag(Base, Export):
|
||||||
__tablename__ = "tagged"
|
__tablename__ = "transactions_tagged"
|
||||||
|
|
||||||
id: Mapped[idfk] = mapped_column(primary_key=True, init=False)
|
id: Mapped[idfk] = mapped_column(primary_key=True, init=False)
|
||||||
tag: Mapped[str] = mapped_column(ForeignKey(Tag.name), primary_key=True)
|
tag: Mapped[str] = mapped_column(ForeignKey(Tag.name), primary_key=True)
|
||||||
@ -281,7 +286,7 @@ categoryselector = Annotated[
|
|||||||
|
|
||||||
|
|
||||||
class CategorySelector(Base, Export):
|
class CategorySelector(Base, Export):
|
||||||
__tablename__ = "selector"
|
__tablename__ = "category_selectors"
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@ -307,8 +312,7 @@ scheduleperiod = Annotated[Selector_T, mapped_column(Enum(Period, inherit_schema
|
|||||||
|
|
||||||
|
|
||||||
class CategorySchedule(Base, Export):
|
class CategorySchedule(Base, Export):
|
||||||
__table_args__ = {"schema": "category"}
|
__tablename__ = "category_schedules"
|
||||||
__tablename__ = "schedules"
|
|
||||||
|
|
||||||
name: Mapped[catfk] = mapped_column(primary_key=True)
|
name: Mapped[catfk] = mapped_column(primary_key=True)
|
||||||
period: Mapped[Optional[scheduleperiod]]
|
period: Mapped[Optional[scheduleperiod]]
|
||||||
@ -393,8 +397,7 @@ class Rule(Base, Export):
|
|||||||
|
|
||||||
|
|
||||||
class CategoryRule(Rule):
|
class CategoryRule(Rule):
|
||||||
__table_args__ = {"schema": "category"}
|
__tablename__ = "category_rules"
|
||||||
__tablename__ = "rules"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger,
|
BigInteger,
|
||||||
@ -417,8 +420,7 @@ class CategoryRule(Rule):
|
|||||||
|
|
||||||
|
|
||||||
class TagRule(Rule):
|
class TagRule(Rule):
|
||||||
__table_args__ = {"schema": "tag"}
|
__tablename__ = "tag_rules"
|
||||||
__tablename__ = "rules"
|
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(
|
id: Mapped[int] = mapped_column(
|
||||||
BigInteger,
|
BigInteger,
|
||||||
|
|||||||
@ -1,9 +1,13 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from datetime import date
|
||||||
|
from typing import Sequence
|
||||||
|
|
||||||
from pfbudget.db.model import Transaction
|
from pfbudget.db.model import Bank, Transaction
|
||||||
|
|
||||||
|
|
||||||
class Extract(ABC):
|
class Extractor(ABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def extract(self) -> list[Transaction]:
|
def extract(
|
||||||
return NotImplementedError
|
self, bank: Bank, start: date = date.min, end: date = date.max
|
||||||
|
) -> Sequence[Transaction]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|||||||
@ -1,21 +1,20 @@
|
|||||||
import datetime as dt
|
from datetime import date
|
||||||
|
|
||||||
from typing import Sequence
|
from typing import Sequence
|
||||||
|
|
||||||
from pfbudget.db.model import Bank, BankTransaction
|
from pfbudget.db.model import Bank, BankTransaction
|
||||||
from pfbudget.utils.converters import convert
|
from pfbudget.utils.converters import convert
|
||||||
|
|
||||||
from .exceptions import BankError, DownloadError, ExtractError
|
from .exceptions import BankError, DownloadError, ExtractError
|
||||||
from .extract import Extract
|
from .extract import Extractor
|
||||||
from .nordigen import NordigenClient
|
from .nordigen import NordigenClient
|
||||||
|
|
||||||
|
|
||||||
class PSD2Extractor(Extract):
|
class PSD2Extractor(Extractor):
|
||||||
def __init__(self, client: NordigenClient):
|
def __init__(self, client: NordigenClient):
|
||||||
self.__client = client
|
self.__client = client
|
||||||
|
|
||||||
def extract(
|
def extract(
|
||||||
self, bank: Bank, start=dt.date.min, end=dt.date.max
|
self, bank: Bank, start: date = date.min, end: date = date.max
|
||||||
) -> Sequence[BankTransaction]:
|
) -> Sequence[BankTransaction]:
|
||||||
if not bank.nordigen:
|
if not bank.nordigen:
|
||||||
raise BankError("Bank doesn't have Nordigen info")
|
raise BankError("Bank doesn't have Nordigen info")
|
||||||
|
|||||||
0
pfbudget/load/__init__.py
Normal file
0
pfbudget/load/__init__.py
Normal file
14
pfbudget/load/database.py
Normal file
14
pfbudget/load/database.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from typing import Sequence
|
||||||
|
|
||||||
|
from pfbudget.db.client import Client
|
||||||
|
from pfbudget.db.model import Transaction
|
||||||
|
|
||||||
|
from .load import Loader
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseLoader(Loader):
|
||||||
|
def __init__(self, client: Client) -> None:
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
def load(self, transactions: Sequence[Transaction]) -> None:
|
||||||
|
self.client.insert(transactions)
|
||||||
10
pfbudget/load/load.py
Normal file
10
pfbudget/load/load.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Sequence
|
||||||
|
|
||||||
|
from pfbudget.db.model import Transaction
|
||||||
|
|
||||||
|
|
||||||
|
class Loader(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def load(self, transactions: Sequence[Transaction]) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
@ -1,5 +1,5 @@
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Sequence
|
from typing import Iterable, Sequence
|
||||||
|
|
||||||
from pfbudget.db.model import (
|
from pfbudget.db.model import (
|
||||||
CategoryRule,
|
CategoryRule,
|
||||||
@ -13,7 +13,7 @@ from .transform import Transformer
|
|||||||
|
|
||||||
|
|
||||||
class Categorizer(Transformer):
|
class Categorizer(Transformer):
|
||||||
def __init__(self, rules: Sequence[CategoryRule]):
|
def __init__(self, rules: Iterable[CategoryRule]):
|
||||||
self.rules = rules
|
self.rules = rules
|
||||||
|
|
||||||
def transform(self, transactions: Sequence[Transaction]) -> Sequence[Transaction]:
|
def transform(self, transactions: Sequence[Transaction]) -> Sequence[Transaction]:
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Sequence
|
from typing import Iterable, Sequence
|
||||||
|
|
||||||
from pfbudget.db.model import TagRule, Transaction, TransactionTag
|
from pfbudget.db.model import TagRule, Transaction, TransactionTag
|
||||||
from .transform import Transformer
|
from .transform import Transformer
|
||||||
|
|
||||||
|
|
||||||
class Tagger(Transformer):
|
class Tagger(Transformer):
|
||||||
def __init__(self, rules: Sequence[TagRule]):
|
def __init__(self, rules: Iterable[TagRule]):
|
||||||
self.rules = rules
|
self.rules = rules
|
||||||
|
|
||||||
def transform(self, transactions: Sequence[Transaction]) -> Sequence[Transaction]:
|
def transform(self, transactions: Sequence[Transaction]) -> Sequence[Transaction]:
|
||||||
@ -18,7 +18,7 @@ class Tagger(Transformer):
|
|||||||
def transform_inplace(self, transactions: Sequence[Transaction]) -> None:
|
def transform_inplace(self, transactions: Sequence[Transaction]) -> None:
|
||||||
for rule in self.rules:
|
for rule in self.rules:
|
||||||
for transaction in transactions:
|
for transaction in transactions:
|
||||||
if rule.tag in transaction.tags:
|
if rule.tag in [tag.tag for tag in transaction.tags]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not rule.matches(transaction):
|
if not rule.matches(transaction):
|
||||||
|
|||||||
@ -6,9 +6,9 @@ from pfbudget.db.model import Transaction
|
|||||||
|
|
||||||
class Transformer(ABC):
|
class Transformer(ABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def transform(self, _: Sequence[Transaction]) -> Sequence[Transaction]:
|
def transform(self, transactions: Sequence[Transaction]) -> Sequence[Transaction]:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def transform_inplace(self, _: Sequence[Transaction]) -> None:
|
def transform_inplace(self, transactions: Sequence[Transaction]) -> None:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|||||||
102
poetry.lock
generated
102
poetry.lock
generated
@ -1,5 +1,25 @@
|
|||||||
# This file is automatically @generated by Poetry and should not be changed by hand.
|
# This file is automatically @generated by Poetry and should not be changed by hand.
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "alembic"
|
||||||
|
version = "1.10.3"
|
||||||
|
description = "A database migration tool for SQLAlchemy."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "alembic-1.10.3-py3-none-any.whl", hash = "sha256:b2e0a6cfd3a8ce936a1168320bcbe94aefa3f4463cd773a968a55071beb3cd37"},
|
||||||
|
{file = "alembic-1.10.3.tar.gz", hash = "sha256:32a69b13a613aeb7e8093f242da60eff9daed13c0df02fff279c1b06c32965d2"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
Mako = "*"
|
||||||
|
SQLAlchemy = ">=1.3.0"
|
||||||
|
typing-extensions = ">=4"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
tz = ["python-dateutil"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "23.3.0"
|
version = "23.3.0"
|
||||||
@ -577,6 +597,86 @@ files = [
|
|||||||
{file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"},
|
{file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mako"
|
||||||
|
version = "1.2.4"
|
||||||
|
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"},
|
||||||
|
{file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
MarkupSafe = ">=0.9.2"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
babel = ["Babel"]
|
||||||
|
lingua = ["lingua"]
|
||||||
|
testing = ["pytest"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markupsafe"
|
||||||
|
version = "2.1.2"
|
||||||
|
description = "Safely add untrusted strings to HTML/XML markup."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
|
||||||
|
{file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
|
||||||
|
{file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matplotlib"
|
name = "matplotlib"
|
||||||
version = "3.7.1"
|
version = "3.7.1"
|
||||||
@ -1258,4 +1358,4 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "cc31ed1ff1c9e446fcadda094494251b015a103df8024c45e442436856977bbb"
|
content-hash = "e38058bfbc3f73450af9c72519ab32b96dc12406712ba18688e31deb39e2f2a1"
|
||||||
|
|||||||
@ -12,7 +12,7 @@ python = "^3.10"
|
|||||||
codetiming = "^1.4.0"
|
codetiming = "^1.4.0"
|
||||||
matplotlib = "^3.7.1"
|
matplotlib = "^3.7.1"
|
||||||
nordigen = "^1.3.1"
|
nordigen = "^1.3.1"
|
||||||
psycopg2 = "^2.9.6"
|
psycopg2 = {extras = ["binary"], version = "^2.9.6"}
|
||||||
python-dateutil = "^2.8.2"
|
python-dateutil = "^2.8.2"
|
||||||
python-dotenv = "^1.0.0"
|
python-dotenv = "^1.0.0"
|
||||||
pyyaml = "^6.0"
|
pyyaml = "^6.0"
|
||||||
@ -20,6 +20,7 @@ sqlalchemy = "^2.0.9"
|
|||||||
|
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
alembic = "^1.10.3"
|
||||||
black = "^23.3.0"
|
black = "^23.3.0"
|
||||||
flake8 = "^6.0.0"
|
flake8 = "^6.0.0"
|
||||||
mypy = "^1.2.0"
|
mypy = "^1.2.0"
|
||||||
|
|||||||
149
tests/test_database.py
Normal file
149
tests/test_database.py
Normal file
@ -0,0 +1,149 @@
|
|||||||
|
from datetime import date
|
||||||
|
from decimal import Decimal
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from pfbudget.db.client import Client
|
||||||
|
from pfbudget.db.model import (
|
||||||
|
AccountType,
|
||||||
|
Bank,
|
||||||
|
Base,
|
||||||
|
CategorySelector,
|
||||||
|
Nordigen,
|
||||||
|
Selector_T,
|
||||||
|
Transaction,
|
||||||
|
TransactionCategory,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client() -> Client:
|
||||||
|
url = "sqlite://"
|
||||||
|
client = Client(url, execution_options={"schema_translate_map": {"pfbudget": None}})
|
||||||
|
Base.metadata.create_all(client.engine)
|
||||||
|
return client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def banks(client: Client) -> list[Bank]:
|
||||||
|
banks = [
|
||||||
|
Bank("bank", "BANK", AccountType.checking),
|
||||||
|
Bank("broker", "BROKER", AccountType.investment),
|
||||||
|
Bank("creditcard", "CC", AccountType.MASTERCARD),
|
||||||
|
]
|
||||||
|
banks[0].nordigen = Nordigen("bank", None, "req", None)
|
||||||
|
|
||||||
|
client.insert(banks)
|
||||||
|
return banks
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def transactions(client: Client) -> list[Transaction]:
|
||||||
|
transactions = [
|
||||||
|
Transaction(date(2023, 1, 1), "", Decimal("-10")),
|
||||||
|
Transaction(date(2023, 1, 2), "", Decimal("-50")),
|
||||||
|
]
|
||||||
|
transactions[0].category = TransactionCategory(
|
||||||
|
"name", CategorySelector(Selector_T.algorithm)
|
||||||
|
)
|
||||||
|
|
||||||
|
client.insert(transactions)
|
||||||
|
for i, transaction in enumerate(transactions):
|
||||||
|
transaction.id = i + 1
|
||||||
|
transaction.split = False # default
|
||||||
|
transactions[0].category.id = 1
|
||||||
|
transactions[0].category.selector.id = 1
|
||||||
|
|
||||||
|
return transactions
|
||||||
|
|
||||||
|
|
||||||
|
class TestDatabase:
|
||||||
|
def test_initialization(self, client: Client):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_insert_with_session(self, client: Client):
|
||||||
|
transactions = [
|
||||||
|
Transaction(date(2023, 1, 1), "", Decimal("-10")),
|
||||||
|
Transaction(date(2023, 1, 2), "", Decimal("-50")),
|
||||||
|
]
|
||||||
|
|
||||||
|
with client.session as session:
|
||||||
|
session.insert(transactions)
|
||||||
|
assert session.select(Transaction) == transactions
|
||||||
|
|
||||||
|
def test_insert_transactions(self, client: Client, transactions: list[Transaction]):
|
||||||
|
result = client.select(Transaction)
|
||||||
|
assert result == transactions
|
||||||
|
|
||||||
|
def test_select_transactions_without_category(
|
||||||
|
self, client: Client, transactions: list[Transaction]
|
||||||
|
):
|
||||||
|
result = client.select(Transaction, lambda: ~Transaction.category.has())
|
||||||
|
assert result == [transactions[1]]
|
||||||
|
|
||||||
|
def test_select_banks(self, client: Client, banks: list[Bank]):
|
||||||
|
result = client.select(Bank)
|
||||||
|
assert result == banks
|
||||||
|
|
||||||
|
def test_select_banks_with_nordigen(self, client: Client, banks: list[Bank]):
|
||||||
|
result = client.select(Bank, Bank.nordigen)
|
||||||
|
assert result == [banks[0]]
|
||||||
|
|
||||||
|
def test_select_banks_by_name(self, client: Client, banks: list[Bank]):
|
||||||
|
name = banks[0].name
|
||||||
|
result = client.select(Bank, lambda: Bank.name == name)
|
||||||
|
assert result == [banks[0]]
|
||||||
|
|
||||||
|
names = [banks[0].name, banks[1].name]
|
||||||
|
result = client.select(Bank, lambda: Bank.name.in_(names))
|
||||||
|
assert result == [banks[0], banks[1]]
|
||||||
|
|
||||||
|
def test_update_bank_with_session(self, client: Client, banks: list[Bank]):
|
||||||
|
with client.session as session:
|
||||||
|
name = banks[0].name
|
||||||
|
bank = session.select(Bank, lambda: Bank.name == name)[0]
|
||||||
|
bank.name = "anotherbank"
|
||||||
|
|
||||||
|
result = client.select(Bank, lambda: Bank.name == "anotherbank")
|
||||||
|
assert len(result) == 1
|
||||||
|
|
||||||
|
def test_update_bank(self, client: Client, banks: list[Bank]):
|
||||||
|
name = banks[0].name
|
||||||
|
|
||||||
|
result = client.select(Bank, lambda: Bank.name == name)
|
||||||
|
assert result[0].type == AccountType.checking
|
||||||
|
|
||||||
|
update = {"name": name, "type": AccountType.savings}
|
||||||
|
client.update(Bank, [update])
|
||||||
|
|
||||||
|
result = client.select(Bank, lambda: Bank.name == name)
|
||||||
|
assert result[0].type == AccountType.savings
|
||||||
|
|
||||||
|
def test_update_nordigen(self, client: Client, banks: list[Bank]):
|
||||||
|
name = banks[0].name
|
||||||
|
|
||||||
|
result = client.select(Nordigen, lambda: Nordigen.name == name)
|
||||||
|
assert result[0].requisition_id == "req"
|
||||||
|
|
||||||
|
update = {"name": name, "requisition_id": "anotherreq"}
|
||||||
|
client.update(Nordigen, [update])
|
||||||
|
|
||||||
|
result = client.select(Nordigen, lambda: Nordigen.name == name)
|
||||||
|
assert result[0].requisition_id == "anotherreq"
|
||||||
|
|
||||||
|
result = client.select(Bank, lambda: Bank.name == name)
|
||||||
|
assert getattr(result[0].nordigen, "requisition_id", None) == "anotherreq"
|
||||||
|
|
||||||
|
def test_remove_bank(self, client: Client, banks: list[Bank]):
|
||||||
|
name = banks[0].name
|
||||||
|
|
||||||
|
result = client.select(Bank)
|
||||||
|
assert len(result) == 3
|
||||||
|
|
||||||
|
client.delete(Bank, Bank.name, [name])
|
||||||
|
result = client.select(Bank)
|
||||||
|
assert len(result) == 2
|
||||||
|
|
||||||
|
names = [banks[1].name, banks[2].name]
|
||||||
|
client.delete(Bank, Bank.name, names)
|
||||||
|
result = client.select(Bank)
|
||||||
|
assert len(result) == 0
|
||||||
38
tests/test_load.py
Normal file
38
tests/test_load.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from datetime import date
|
||||||
|
from decimal import Decimal
|
||||||
|
from typing import Sequence
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from pfbudget.db.client import Client
|
||||||
|
from pfbudget.db.model import BankTransaction, Transaction
|
||||||
|
from pfbudget.load.database import DatabaseLoader
|
||||||
|
from pfbudget.load.load import Loader
|
||||||
|
|
||||||
|
|
||||||
|
class FakeDatabaseClient(Client):
|
||||||
|
def __init__(self, url: str) -> None:
|
||||||
|
super().__init__(url)
|
||||||
|
|
||||||
|
def insert(self, transactions: Sequence[Transaction]) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def loader() -> Loader:
|
||||||
|
url = "postgresql://user:pass@127.0.0.1:5432/db"
|
||||||
|
client = FakeDatabaseClient(url)
|
||||||
|
return DatabaseLoader(client)
|
||||||
|
|
||||||
|
|
||||||
|
class TestDatabaseLoad:
|
||||||
|
def test_empty_url(self):
|
||||||
|
with pytest.raises(AssertionError):
|
||||||
|
_ = FakeDatabaseClient("")
|
||||||
|
|
||||||
|
def test_insert(self, loader: Loader):
|
||||||
|
transactions = [
|
||||||
|
BankTransaction(date(2023, 1, 1), "", Decimal("-500"), "Bank#1"),
|
||||||
|
BankTransaction(date(2023, 1, 2), "", Decimal("500"), "Bank#2"),
|
||||||
|
]
|
||||||
|
|
||||||
|
loader.load(transactions)
|
||||||
@ -1,30 +1,32 @@
|
|||||||
import datetime as dt
|
import datetime as dt
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
from typing import Any, Optional
|
||||||
import pytest
|
import pytest
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
import mocks.nordigen as mock
|
import mocks.nordigen as mock
|
||||||
|
|
||||||
from pfbudget.db.model import Bank, BankTransaction, Nordigen
|
from pfbudget.db.model import AccountType, Bank, BankTransaction, Nordigen
|
||||||
from pfbudget.extract.exceptions import BankError, CredentialsError
|
from pfbudget.extract.exceptions import BankError, CredentialsError
|
||||||
|
from pfbudget.extract.extract import Extractor
|
||||||
from pfbudget.extract.nordigen import NordigenClient, NordigenCredentials
|
from pfbudget.extract.nordigen import NordigenClient, NordigenCredentials
|
||||||
from pfbudget.extract.psd2 import PSD2Extractor
|
from pfbudget.extract.psd2 import PSD2Extractor
|
||||||
|
|
||||||
|
|
||||||
class MockGet:
|
class MockGet:
|
||||||
def __init__(self, mock_exception=None):
|
def __init__(self, mock_exception: Optional[Exception] = None):
|
||||||
self._status_code = 200
|
self._status_code = 200
|
||||||
self._mock_exception = mock_exception
|
self._mock_exception = mock_exception
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args: Any, **kwargs: Any):
|
||||||
if self._mock_exception:
|
if self._mock_exception:
|
||||||
raise self._mock_exception
|
raise self._mock_exception
|
||||||
|
|
||||||
self._headers = kwargs["headers"]
|
self._headers: dict[str, str] = kwargs["headers"]
|
||||||
if "Authorization" not in self._headers or not self._headers["Authorization"]:
|
if "Authorization" not in self._headers or not self._headers["Authorization"]:
|
||||||
self._status_code = 401
|
self._status_code = 401
|
||||||
|
|
||||||
self.url = kwargs["url"]
|
self.url: str = kwargs["url"]
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -47,7 +49,7 @@ class MockGet:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def mock_requests(monkeypatch):
|
def mock_requests(monkeypatch: pytest.MonkeyPatch):
|
||||||
monkeypatch.setattr("requests.get", MockGet())
|
monkeypatch.setattr("requests.get", MockGet())
|
||||||
monkeypatch.delattr("requests.post")
|
monkeypatch.delattr("requests.post")
|
||||||
monkeypatch.delattr("requests.put")
|
monkeypatch.delattr("requests.put")
|
||||||
@ -55,14 +57,14 @@ def mock_requests(monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def extractor() -> NordigenClient:
|
def extractor() -> Extractor:
|
||||||
credentials = NordigenCredentials("ID", "KEY", "TOKEN")
|
credentials = NordigenCredentials("ID", "KEY", "TOKEN")
|
||||||
return PSD2Extractor(NordigenClient(credentials))
|
return PSD2Extractor(NordigenClient(credentials))
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def bank() -> list[Bank]:
|
def bank() -> Bank:
|
||||||
bank = Bank("Bank#1", "", "")
|
bank = Bank("Bank#1", "", AccountType.checking)
|
||||||
bank.nordigen = Nordigen("", "", mock.id, False)
|
bank.nordigen = Nordigen("", "", mock.id, False)
|
||||||
return bank
|
return bank
|
||||||
|
|
||||||
@ -73,18 +75,20 @@ class TestExtractPSD2:
|
|||||||
with pytest.raises(CredentialsError):
|
with pytest.raises(CredentialsError):
|
||||||
NordigenClient(cred)
|
NordigenClient(cred)
|
||||||
|
|
||||||
def test_no_psd2_bank(self, extractor):
|
def test_no_psd2_bank(self, extractor: Extractor):
|
||||||
with pytest.raises(BankError):
|
with pytest.raises(BankError):
|
||||||
extractor.extract(Bank("", "", ""))
|
extractor.extract(Bank("", "", AccountType.checking))
|
||||||
|
|
||||||
def test_timeout(self, monkeypatch, extractor, bank):
|
def test_timeout(
|
||||||
|
self, monkeypatch: pytest.MonkeyPatch, extractor: Extractor, bank: Bank
|
||||||
|
):
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"requests.get", MockGet(mock_exception=requests.ReadTimeout)
|
"requests.get", MockGet(mock_exception=requests.ReadTimeout())
|
||||||
)
|
)
|
||||||
with pytest.raises(requests.Timeout):
|
with pytest.raises(requests.Timeout):
|
||||||
extractor.extract(bank)
|
extractor.extract(bank)
|
||||||
|
|
||||||
def test_extract(self, extractor, bank):
|
def test_extract(self, extractor: Extractor, bank: Bank):
|
||||||
assert extractor.extract(bank) == [
|
assert extractor.extract(bank) == [
|
||||||
BankTransaction(
|
BankTransaction(
|
||||||
dt.date(2023, 1, 14), "string", Decimal("328.18"), "Bank#1"
|
dt.date(2023, 1, 14), "string", Decimal("328.18"), "Bank#1"
|
||||||
|
|||||||
@ -4,7 +4,6 @@ from decimal import Decimal
|
|||||||
import mocks.categories as mock
|
import mocks.categories as mock
|
||||||
|
|
||||||
from pfbudget.db.model import (
|
from pfbudget.db.model import (
|
||||||
Bank,
|
|
||||||
BankTransaction,
|
BankTransaction,
|
||||||
CategoryRule,
|
CategoryRule,
|
||||||
CategorySelector,
|
CategorySelector,
|
||||||
@ -102,7 +101,7 @@ class TestTransform:
|
|||||||
assert not t.category
|
assert not t.category
|
||||||
|
|
||||||
categorizer: Transformer = Categorizer(mock.category1.rules)
|
categorizer: Transformer = Categorizer(mock.category1.rules)
|
||||||
transactions: Transformer = categorizer.transform(transactions)
|
transactions = categorizer.transform(transactions)
|
||||||
|
|
||||||
for t in transactions:
|
for t in transactions:
|
||||||
assert t.category == TransactionCategory(
|
assert t.category == TransactionCategory(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user