Compare commits

..

No commits in common. "fd6793b4f426a52b40fde385243a5ddfe6878ad8" and "1cce7d421e8a18f2614b06e7347df94275e0639f" have entirely different histories.

15 changed files with 280 additions and 503 deletions

View File

@ -1,152 +0,0 @@
"""Rule inheritance
Revision ID: 6b293f78cc97
Revises: 37d80de801a7
Create Date: 2023-01-22 20:05:32.887092+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "6b293f78cc97"
down_revision = "37d80de801a7"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"rules",
sa.Column("id", sa.BigInteger(), autoincrement=True, nullable=False),
sa.Column("date", sa.Date(), nullable=True),
sa.Column("description", sa.String(), nullable=True),
sa.Column("regex", sa.String(), nullable=True),
sa.Column("bank", sa.String(), nullable=True),
sa.Column("min", sa.Numeric(precision=16, scale=2), nullable=True),
sa.Column("max", sa.Numeric(precision=16, scale=2), nullable=True),
sa.Column("type", sa.String(), nullable=False),
sa.PrimaryKeyConstraint("id", name=op.f("pk_rules")),
schema="transactions",
)
op.create_foreign_key(
op.f("fk_categories_rules_id_rules"),
"categories_rules",
"rules",
["id"],
["id"],
source_schema="transactions",
referent_schema="transactions",
ondelete="CASCADE",
)
op.drop_column("categories_rules", "bank", schema="transactions")
op.drop_column("categories_rules", "min", schema="transactions")
op.drop_column("categories_rules", "date", schema="transactions")
op.drop_column("categories_rules", "regex", schema="transactions")
op.drop_column("categories_rules", "description", schema="transactions")
op.drop_column("categories_rules", "max", schema="transactions")
op.create_foreign_key(
op.f("fk_tag_rules_id_rules"),
"tag_rules",
"rules",
["id"],
["id"],
source_schema="transactions",
referent_schema="transactions",
ondelete="CASCADE",
)
op.drop_column("tag_rules", "bank", schema="transactions")
op.drop_column("tag_rules", "min", schema="transactions")
op.drop_column("tag_rules", "date", schema="transactions")
op.drop_column("tag_rules", "regex", schema="transactions")
op.drop_column("tag_rules", "description", schema="transactions")
op.drop_column("tag_rules", "max", schema="transactions")
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"tag_rules",
sa.Column(
"max", sa.NUMERIC(precision=16, scale=2), autoincrement=False, nullable=True
),
schema="transactions",
)
op.add_column(
"tag_rules",
sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=True),
schema="transactions",
)
op.add_column(
"tag_rules",
sa.Column("regex", sa.VARCHAR(), autoincrement=False, nullable=True),
schema="transactions",
)
op.add_column(
"tag_rules",
sa.Column("date", sa.DATE(), autoincrement=False, nullable=True),
schema="transactions",
)
op.add_column(
"tag_rules",
sa.Column(
"min", sa.NUMERIC(precision=16, scale=2), autoincrement=False, nullable=True
),
schema="transactions",
)
op.add_column(
"tag_rules",
sa.Column("bank", sa.VARCHAR(), autoincrement=False, nullable=True),
schema="transactions",
)
op.drop_constraint(
op.f("fk_tag_rules_id_rules"),
"tag_rules",
schema="transactions",
type_="foreignkey",
)
op.add_column(
"categories_rules",
sa.Column(
"max", sa.NUMERIC(precision=16, scale=2), autoincrement=False, nullable=True
),
schema="transactions",
)
op.add_column(
"categories_rules",
sa.Column("description", sa.VARCHAR(), autoincrement=False, nullable=True),
schema="transactions",
)
op.add_column(
"categories_rules",
sa.Column("regex", sa.VARCHAR(), autoincrement=False, nullable=True),
schema="transactions",
)
op.add_column(
"categories_rules",
sa.Column("date", sa.DATE(), autoincrement=False, nullable=True),
schema="transactions",
)
op.add_column(
"categories_rules",
sa.Column(
"min", sa.NUMERIC(precision=16, scale=2), autoincrement=False, nullable=True
),
schema="transactions",
)
op.add_column(
"categories_rules",
sa.Column("bank", sa.VARCHAR(), autoincrement=False, nullable=True),
schema="transactions",
)
op.drop_constraint(
op.f("fk_categories_rules_id_rules"),
"categories_rules",
schema="transactions",
type_="foreignkey",
)
op.drop_table("rules", schema="transactions")
# ### end Alembic commands ###

View File

@ -9,4 +9,4 @@ from pfbudget.cli.runnable import argparser
from pfbudget.input.parsers import parse_data
from pfbudget.utils.utils import parse_args_period
import pfbudget.db.model as t
import pfbudget.db.model as types

View File

@ -14,7 +14,7 @@ if __name__ == "__main__":
assert "verbose" in args, "No verbose level specified"
verbosity = args.pop("verbose")
params = []
params = None
match (op):
case pfbudget.Operation.Parse:
keys = {"path", "bank", "creditcard"}
@ -45,7 +45,7 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.Bank(
pfbudget.types.Bank(
args["bank"][0],
args["bic"][0],
args["type"][0],
@ -73,7 +73,7 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.Nordigen(
pfbudget.types.Nordigen(
args["bank"][0],
args["bank_id"][0] if args["bank_id"] else None,
args["requisition_id"][0] if args["requisition_id"] else None,
@ -110,27 +110,27 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.Category(cat, args["group"]) for cat in args["category"]
pfbudget.types.Category(cat, args["group"]) for cat in args["category"]
]
case pfbudget.Operation.CategoryUpdate:
keys = {"category", "group"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [pfbudget.t.Category(cat) for cat in args["category"]]
params = [pfbudget.types.Category(cat) for cat in args["category"]]
params.append(args["group"])
case pfbudget.Operation.CategoryRemove:
assert "category" in args, "argparser ill defined"
params = [pfbudget.t.Category(cat) for cat in args["category"]]
params = [pfbudget.types.Category(cat) for cat in args["category"]]
case pfbudget.Operation.CategorySchedule:
keys = {"category", "period", "frequency"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.CategorySchedule(
cat, args["period"][0], args["frequency"][0], None
pfbudget.types.CategorySchedule(
cat, True, args["period"][0], args["frequency"][0]
)
for cat in args["category"]
]
@ -140,7 +140,7 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.CategoryRule(
pfbudget.types.CategoryRule(
args["date"][0] if args["date"] else None,
args["description"][0] if args["description"] else None,
args["regex"][0] if args["regex"] else None,
@ -184,14 +184,14 @@ if __name__ == "__main__":
keys = {"tag"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [pfbudget.t.Tag(tag) for tag in args["tag"]]
params = [pfbudget.types.Tag(tag) for tag in args["tag"]]
case pfbudget.Operation.TagRuleAdd:
keys = {"tag", "date", "description", "bank", "min", "max"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.TagRule(
pfbudget.types.TagRule(
args["date"][0] if args["date"] else None,
args["description"][0] if args["description"] else None,
args["regex"][0] if args["regex"] else None,
@ -218,21 +218,31 @@ if __name__ == "__main__":
case pfbudget.Operation.GroupAdd:
assert "group" in args, "argparser ill defined"
params = [pfbudget.t.CategoryGroup(group) for group in args["group"]]
params = [pfbudget.types.CategoryGroup(group) for group in args["group"]]
case pfbudget.Operation.GroupRemove:
assert "group" in args, "argparser ill defined"
params = [pfbudget.t.CategoryGroup(group) for group in args["group"]]
params = [pfbudget.types.CategoryGroup(group) for group in args["group"]]
case pfbudget.Operation.Forge | pfbudget.Operation.Dismantle:
keys = {"original", "links"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.t.Link(args["original"][0], link) for link in args["links"]
pfbudget.types.Link(args["original"][0], link) for link in args["links"]
]
case pfbudget.Operation.Export | pfbudget.Operation.Import | pfbudget.Operation.ExportCategoryRules | pfbudget.Operation.ImportCategoryRules | pfbudget.Operation.ExportTagRules | pfbudget.Operation.ImportTagRules:
case pfbudget.Operation.Export:
keys = {"interval", "start", "end", "year", "all", "banks", "file"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
start, end = pfbudget.parse_args_period(args)
params = [start, end]
if not args["all"]:
params.append(args["banks"])
params.append(args["file"][0])
case pfbudget.Operation.Import:
keys = {"file"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"

View File

@ -1,13 +1,12 @@
from dotenv import load_dotenv
import argparse
import datetime as dt
import decimal
from dotenv import load_dotenv
import os
import re
from pfbudget.common.types import Operation
from pfbudget.db.model import AccountType, Period
from pfbudget.db.sqlite import DatabaseClient
import pfbudget.reporting.graph
import pfbudget.reporting.report
@ -39,43 +38,50 @@ def argparser() -> argparse.ArgumentParser:
help="select current database",
default=DEFAULT_DB,
)
universal.add_argument("-v", "--verbose", action="count", default=0)
period = argparse.ArgumentParser(add_help=False)
period_group = period.add_mutually_exclusive_group()
period_group.add_argument(
period = argparse.ArgumentParser(add_help=False).add_mutually_exclusive_group()
period.add_argument(
"--interval", type=str, nargs=2, help="graph interval", metavar=("START", "END")
)
period_group.add_argument("--start", type=str, nargs=1, help="graph start date")
period_group.add_argument("--end", type=str, nargs=1, help="graph end date")
period_group.add_argument("--year", type=str, nargs=1, help="graph year")
period.add_argument("--start", type=str, nargs=1, help="graph start date")
period.add_argument("--end", type=str, nargs=1, help="graph end date")
period.add_argument("--year", type=str, nargs=1, help="graph year")
parser = argparse.ArgumentParser(
description="does cool finance stuff",
parents=[universal],
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
if version := re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]', open("pfbudget/__init__.py").read()
):
parser.add_argument(
"--version",
action="version",
version=version.group(1),
version=re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
open("pfbudget/__init__.py").read(),
).group(1),
)
subparsers = parser.add_subparsers(required=True)
# TODO Init
# init = subparsers.add_parser("init")
# init.set_defaults(op=Operation.Init)
"""
Init
"""
p_init = subparsers.add_parser(
"init",
description="Initializes the SQLite3 database",
parents=[universal],
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
p_init.set_defaults(command=Operation.Init)
# Exports transactions to .csv file
export = subparsers.add_parser("export")
export = subparsers.add_parser("export", parents=[period])
export.set_defaults(op=Operation.Export)
export_args(export)
export.add_argument("file", nargs=1, type=str)
export_banks = export.add_mutually_exclusive_group()
export_banks.add_argument("--all", action="store_true")
export_banks.add_argument("--banks", nargs="+", type=str)
pimport = subparsers.add_parser("import")
pimport.set_defaults(op=Operation.Import)
@ -203,6 +209,11 @@ def report(args):
pfbudget.reporting.report.detailed(DatabaseClient(args.database), start, end)
# def nordigen_banks(manager: Manager, args):
# input = NordigenInput(manager)
# input.list(vars(args)["country"][0])
def bank(parser: argparse.ArgumentParser):
commands = parser.add_subparsers(required=True)
@ -310,14 +321,6 @@ def category_rule(parser: argparse.ArgumentParser):
rules(modify)
modify.add_argument("--remove", nargs="*", default=[], type=str)
export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportCategoryRules)
export_args(export)
pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportCategoryRules)
export_args(pimport)
def tags(parser: argparse.ArgumentParser):
commands = parser.add_subparsers(required=True)
@ -352,14 +355,6 @@ def tag_rule(parser: argparse.ArgumentParser):
modify.add_argument("--tag", nargs=1, type=str)
rules(modify)
export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportTagRules)
export_args(export)
pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportTagRules)
export_args(pimport)
def rules(parser: argparse.ArgumentParser):
parser.add_argument("--date", nargs=1, type=dt.date.fromisoformat)
@ -382,7 +377,3 @@ def link(parser: argparse.ArgumentParser):
dismantle.set_defaults(op=Operation.Dismantle)
dismantle.add_argument("original", nargs=1, type=int)
dismantle.add_argument("links", nargs="+", type=int)
def export_args(parser: argparse.ArgumentParser):
parser.add_argument("file", nargs=1, type=str)

View File

@ -37,10 +37,6 @@ class Operation(Enum):
NordigenCountryBanks = auto()
Export = auto()
Import = auto()
ExportCategoryRules = auto()
ImportCategoryRules = auto()
ExportTagRules = auto()
ImportTagRules = auto()
class TransactionError(Exception):

View File

@ -1,8 +1,17 @@
from pfbudget.db.model import (
Category,
CategorySelector,
Selector,
Tag,
Transaction,
TransactionCategory,
TransactionTag,
)
from codetiming import Timer
from datetime import timedelta
from typing import Sequence
import pfbudget.db.model as t
Transactions = list[Transaction]
class Categorizer:
@ -13,9 +22,9 @@ class Categorizer:
def rules(
self,
transactions: Sequence[t.BankTransaction],
categories: Sequence[t.Category],
tags: Sequence[t.Tag],
transactions: Transactions,
categories: list[Category],
tags: list[Tag],
):
"""Overarching categorization tool
@ -23,9 +32,9 @@ class Categorizer:
to the rules defined for each category
Args:
transactions (Sequence[BankTransaction]): uncategorized transactions
categories (Sequence[Category]): available categories
tags (Sequence[Tag]): currently available tags
transactions (list[Transaction]): uncategorized transactions
categories (list[Category]): available categories
tags (list[Tag]): currently available tags
"""
self._nullify(transactions)
@ -35,21 +44,21 @@ class Categorizer:
def manual(
self,
transactions: Sequence[t.Transaction],
categories: Sequence[t.Category],
tags: Sequence[t.Tag],
transactions: Transactions,
categories: list[Category],
tags: list[Tag],
):
"""Manual categorization input
Args:
transactions (Sequence[Transaction]): uncategorized transactions
categories (Sequence[Category]): available categories
tags (Sequence[Tag]): currently available tags
transactions (list[Transaction]): uncategorized transactions
categories (list[Category]): available categories
tags (list[Tag]): currently available tags
"""
self._manual(transactions)
@Timer(name="nullify")
def _nullify(self, transactions: Sequence[t.BankTransaction]):
def _nullify(self, transactions: Transactions):
count = 0
matching = []
for transaction in transactions:
@ -67,13 +76,11 @@ class Categorizer:
and cancel.amount == -transaction.amount
)
):
transaction.category = t.TransactionCategory(
name="null",
selector=t.CategorySelector(t.Selector.nullifier),
transaction.category = TransactionCategory(
name="null", selector=CategorySelector(Selector.nullifier)
)
cancel.category = t.TransactionCategory(
name="null",
selector=t.CategorySelector(t.Selector.nullifier),
cancel.category = TransactionCategory(
name="null", selector=CategorySelector(Selector.nullifier)
)
matching.extend([transaction, cancel])
count += 2
@ -83,9 +90,7 @@ class Categorizer:
@Timer(name="categoryrules")
def _rule_based_categories(
self,
transactions: Sequence[t.BankTransaction],
categories: Sequence[t.Category],
self, transactions: Transactions, categories: list[Category]
):
d = {}
for category in [c for c in categories if c.rules]:
@ -109,10 +114,10 @@ class Categorizer:
== "y"
):
transaction.category.name = category.name
transaction.category.selector.selector = t.Selector.rules
transaction.category.selector.selector = Selector.rules
else:
transaction.category = t.TransactionCategory(
category.name, t.CategorySelector(t.Selector.rules)
transaction.category = TransactionCategory(
category.name, CategorySelector(Selector.rules)
)
if rule in d:
@ -124,11 +129,9 @@ class Categorizer:
print(f"{v}: {k}")
@Timer(name="tagrules")
def _rule_based_tags(
self, transactions: Sequence[t.BankTransaction], tags: Sequence[t.Tag]
):
def _rule_based_tags(self, transactions: Transactions, tags: list[Tag]):
d = {}
for tag in [t for t in tags if len(t.rules) > 0]:
for tag in [t for t in tags if t.rules]:
for rule in tag.rules:
# for transaction in [t for t in transactions if not t.category]:
for transaction in [
@ -140,9 +143,9 @@ class Categorizer:
continue
if not transaction.tags:
transaction.tags = {t.TransactionTag(tag.name)}
transaction.tags = {TransactionTag(tag.name)}
else:
transaction.tags.add(t.TransactionTag(tag.name))
transaction.tags.add(TransactionTag(tag.name))
if rule in d:
d[rule] += 1
@ -152,7 +155,7 @@ class Categorizer:
for k, v in d.items():
print(f"{v}: {k}")
def _manual(self, transactions: Sequence[t.Transaction]):
def _manual(self, transactions: Transactions):
uncategorized = [t for t in transactions if not t.category]
print(f"{len(uncategorized)} transactions left to categorize")
@ -164,8 +167,8 @@ class Categorizer:
if not category:
print("{category} doesn't exist")
continue
transaction.category = t.TransactionCategory(
category, t.CategorySelector(t.Selector.manual)
transaction.category = TransactionCategory(
category, CategorySelector(Selector.manual)
)
break

View File

@ -1,5 +1,4 @@
from pathlib import Path
import pickle
import webbrowser
from pfbudget.common.types import Operation
@ -7,22 +6,19 @@ from pfbudget.core.categorizer import Categorizer
from pfbudget.db.client import DbClient
from pfbudget.db.model import (
Bank,
BankTransaction,
Category,
CategoryGroup,
CategoryRule,
CategorySelector,
Link,
MoneyTransaction,
Nordigen,
Rule,
Tag,
TagRule,
Transaction,
TransactionCategory,
)
from pfbudget.input.nordigen import NordigenInput
from pfbudget.input.parsers import parse_data
from pfbudget.output.csv import CSV
from pfbudget.output.output import Output
class Manager:
@ -42,21 +38,18 @@ class Manager:
# Adapter for the parse_data method. Can be refactored.
args = {"bank": params[1], "creditcard": params[2], "category": None}
transactions = []
for path in [Path(p) for p in params[0]]:
if path.is_dir():
for file in path.iterdir():
for path in params[0]:
if (dir := Path(path)).is_dir():
for file in dir.iterdir():
transactions.extend(self.parse(file, args))
elif path.is_file():
elif Path(path).is_file():
transactions.extend(self.parse(path, args))
else:
raise FileNotFoundError(path)
if (
len(transactions) > 0
and input(f"{transactions[:5]}\nCommit? (y/n)") == "y"
):
with self.db.session() as session:
session.add(sorted(transactions))
print(transactions)
if len(transactions) > 0 and input("Commit? (y/n)") == "y":
self.add_transactions(sorted(transactions))
case Operation.Download:
client = NordigenInput()
@ -72,15 +65,14 @@ class Manager:
# dry-run
if not params[2]:
with self.db.session() as session:
session.add(sorted(transactions))
self.add_transactions(transactions)
else:
print(transactions)
case Operation.Categorize:
with self.db.session() as session:
uncategorized = session.get(
BankTransaction, ~BankTransaction.category.has()
Transaction, ~Transaction.category.has()
)
categories = session.get(Category)
tags = session.get(Tag)
@ -160,7 +152,7 @@ class Manager:
case Operation.GroupAdd:
with self.db.session() as session:
session.add(params)
session.add(CategoryGroup(params))
case Operation.GroupRemove:
assert all(isinstance(param, CategoryGroup) for param in params)
@ -172,8 +164,6 @@ class Manager:
session.add(params)
case Operation.Dismantle:
assert all(isinstance(param, Link) for param in params)
with self.db.session() as session:
original = params[0].original
links = [link.link for link in params]
@ -181,37 +171,20 @@ class Manager:
case Operation.Export:
with self.db.session() as session:
self.dump(params[0], sorted(session.get(Transaction)))
if len(params) < 4:
banks = [bank.name for bank in session.get(Bank)]
transactions = session.transactions(params[0], params[1], banks)
else:
transactions = session.transactions(
params[0], params[1], params[2]
)
csvwriter: Output = CSV(params[-1])
csvwriter.report(transactions)
case Operation.Import:
transactions = []
for row in self.load(params[0]):
match row["type"]:
case "bank":
transaction = BankTransaction(
row["date"],
row["description"],
row["amount"],
row["bank"],
False,
)
case "money":
transaction = MoneyTransaction(
row["date"], row["description"], row["amount"], False
)
# TODO case "split" how to match to original transaction?? also save ids?
case _:
continue
if category := row.pop("category", None):
transaction.category = TransactionCategory(
category["name"],
CategorySelector(category["selector"]["selector"]),
)
transactions.append(transaction)
csvwriter: Output = CSV(params[0]) # Output is strange here
transactions = csvwriter.load()
if (
len(transactions) > 0
@ -223,46 +196,33 @@ class Manager:
with self.db.session() as session:
session.add(transactions)
case Operation.ExportCategoryRules:
with self.db.session() as session:
self.dump(params[0], session.get(CategoryRule))
# def init(self):
# client = DatabaseClient(self.__db)
# client.init()
case Operation.ImportCategoryRules:
rules = [CategoryRule(**row) for row in self.load(params[0])]
# def register(self):
# bank = Bank(self.args["bank"][0], "", self.args["requisition"][0], self.args["invert"])
# client = DatabaseClient(self.__db)
# client.register_bank(convert(bank))
if (
len(rules) > 0
and input(f"{rules[:5]}\nDoes the import seem correct? (y/n)")
== "y"
):
with self.db.session() as session:
session.add(rules)
# def unregister(self):
# client = DatabaseClient(self.__db)
# client.unregister_bank(self.args["bank"][0])
case Operation.ExportTagRules:
with self.db.session() as session:
self.dump(params[0], session.get(TagRule))
case Operation.ImportTagRules:
rules = [TagRule(**row) for row in self.load(params[0])]
if (
len(rules) > 0
and input(f"{rules[:5]}\nDoes the import seem correct? (y/n)")
== "y"
):
with self.db.session() as session:
session.add(rules)
def parse(self, filename: Path, args: dict):
def parse(self, filename: str, args: dict):
return parse_data(filename, args)
def dump(self, fn, sequence):
with open(fn, "wb") as f:
pickle.dump([e.format for e in sequence], f)
# def transactions() -> list[Transaction]:
# pass
def load(self, fn):
with open(fn, "rb") as f:
return pickle.load(f)
def add_transactions(self, transactions):
with self.db.session() as session:
session.add(transactions)
# def get_bank_by(self, key: str, value: str) -> Bank:
# client = DatabaseClient(self.__db)
# bank = client.get_bank(key, value)
# return convert(bank)
@property
def db(self) -> DbClient:

View File

@ -1,16 +1,25 @@
from dataclasses import asdict
from datetime import date
from sqlalchemy import create_engine, delete, select, update
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import Session
from typing import Sequence, Type, TypeVar
from pfbudget.db.model import (
Category,
CategoryGroup,
CategoryRule,
CategorySchedule,
Link,
Tag,
TagRule,
Transaction,
)
# import logging
# logging.basicConfig()
# logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
class DbClient:
"""
@ -44,9 +53,7 @@ class DbClient:
def expunge_all(self):
self.__session.expunge_all()
T = TypeVar("T")
def get(self, type: Type[T], column=None, values=None) -> Sequence[T]:
def get(self, type, column=None, values=None):
if column is not None:
if values:
stmt = select(type).where(column.in_(values))
@ -60,7 +67,7 @@ class DbClient:
def add(self, rows: list):
self.__session.add_all(rows)
def remove_by_name(self, type, rows: list):
def remove_by_name(self, type: Category | Tag | Transaction, rows: list):
stmt = delete(type).where(type.name.in_([row.name for row in rows]))
self.__session.execute(stmt)
@ -84,7 +91,7 @@ class DbClient:
)
self.__session.execute(stmt)
def remove_by_id(self, type, ids: list[int]):
def remove_by_id(self, type: CategoryRule | TagRule, ids: list[int]):
stmt = delete(type).where(type.id.in_(ids))
self.__session.execute(stmt)
@ -92,11 +99,19 @@ class DbClient:
print(type, values)
self.__session.execute(update(type), values)
def remove_links(self, original: int, links: list[int]):
def remove_links(self, original, links: list):
stmt = delete(Link).where(
Link.original == original, Link.link.in_(link for link in links)
)
self.__session.execute(stmt)
def transactions(self, min: date, max: date, banks: list[str]):
stmt = select(Transaction).where(
Transaction.date >= min,
Transaction.date <= max,
Transaction.bank.in_(banks),
)
return self.__session.scalars(stmt).all()
def session(self) -> ClientSession:
return self.ClientSession(self.engine)

View File

@ -1,9 +1,4 @@
from __future__ import annotations
import datetime as dt
import decimal
import enum
import re
from typing import Annotated, Any, Optional
from sqlalchemy import (
BigInteger,
@ -22,6 +17,12 @@ from sqlalchemy.orm import (
relationship,
)
from decimal import Decimal
from typing import Annotated, Optional
import datetime as dt
import enum
import re
class Base(MappedAsDataclass, DeclarativeBase):
__table_args__ = {"schema": "transactions"}
@ -51,12 +52,6 @@ accounttype = Annotated[
]
class Export:
@property
def format(self) -> dict[str, Any]:
raise NotImplementedError
class Bank(Base):
__tablename__ = "banks"
@ -64,16 +59,16 @@ class Bank(Base):
BIC: Mapped[str] = mapped_column(String(8), primary_key=True)
type: Mapped[accounttype] = mapped_column(primary_key=True)
nordigen: Mapped[Optional[Nordigen]] = relationship(lazy="joined", init=False)
nordigen: Mapped[Optional[Nordigen]] = relationship(lazy="joined")
bankfk = Annotated[str, mapped_column(Text, ForeignKey(Bank.name))]
idpk = Annotated[int, mapped_column(BigInteger, primary_key=True, autoincrement=True)]
money = Annotated[decimal.Decimal, mapped_column(Numeric(16, 2))]
money = Annotated[Decimal, mapped_column(Numeric(16, 2))]
class Transaction(Base, Export):
class Transaction(Base):
__tablename__ = "originals"
id: Mapped[idpk] = mapped_column(init=False)
@ -85,22 +80,10 @@ class Transaction(Base, Export):
category: Mapped[Optional[TransactionCategory]] = relationship(init=False)
note: Mapped[Optional[Note]] = relationship(init=False)
tags: Mapped[set[TransactionTag]] = relationship(init=False)
tags: Mapped[Optional[set[TransactionTag]]] = relationship(init=False)
__mapper_args__ = {"polymorphic_on": "type", "polymorphic_identity": "transaction"}
@property
def format(self) -> dict[str, Any]:
return dict(
date=self.date,
description=self.description,
amount=self.amount,
type=self.type,
category=self.category.format if self.category else None,
# TODO note
tags=[tag.format for tag in self.tags] if self.tags else None,
)
def __lt__(self, other: Transaction):
return self.date < other.date
@ -110,20 +93,17 @@ idfk = Annotated[
]
class BankTransaction(Transaction):
bank: Mapped[bankfk] = mapped_column(nullable=True)
class IsSplit:
split: Mapped[bool] = mapped_column(use_existing_column=True, nullable=True)
class BankTransaction(IsSplit, Transaction):
bank: Mapped[bankfk] = mapped_column(nullable=True)
__mapper_args__ = {"polymorphic_identity": "bank", "polymorphic_load": "inline"}
@property
def format(self) -> dict[str, Any]:
return super().format | dict(bank=self.bank)
class MoneyTransaction(Transaction):
split: Mapped[bool] = mapped_column(use_existing_column=True, nullable=True)
class MoneyTransaction(IsSplit, Transaction):
__mapper_args__ = {"polymorphic_identity": "money"}
@ -132,10 +112,6 @@ class SplitTransaction(Transaction):
__mapper_args__ = {"polymorphic_identity": "split", "polymorphic_load": "inline"}
@property
def format(self) -> dict[str, Any]:
return super().format | dict(original=self.original)
class CategoryGroup(Base):
__tablename__ = "categories_groups"
@ -151,11 +127,11 @@ class Category(Base):
ForeignKey(CategoryGroup.name), default=None
)
rules: Mapped[set[CategoryRule]] = relationship(
rules: Mapped[Optional[set[CategoryRule]]] = relationship(
cascade="all, delete-orphan", passive_deletes=True, default_factory=set
)
schedule: Mapped[Optional[CategorySchedule]] = relationship(
cascade="all, delete-orphan", passive_deletes=True, default=None
schedule: Mapped[CategorySchedule] = relationship(
back_populates="category", default=None
)
def __repr__(self) -> str:
@ -168,19 +144,16 @@ catfk = Annotated[
]
class TransactionCategory(Base, Export):
class TransactionCategory(Base):
__tablename__ = "categorized"
id: Mapped[idfk] = mapped_column(primary_key=True, init=False)
name: Mapped[catfk]
selector: Mapped[CategorySelector] = relationship(
cascade="all, delete-orphan", lazy="joined"
)
selector: Mapped[CategorySelector] = relationship(cascade="all, delete-orphan")
@property
def format(self):
return dict(name=self.name, selector=self.selector.format)
def __repr__(self) -> str:
return f"Category({self.name})"
class Note(Base):
@ -204,21 +177,17 @@ class Tag(Base):
name: Mapped[str] = mapped_column(primary_key=True)
rules: Mapped[set[TagRule]] = relationship(
rules: Mapped[Optional[set[TagRule]]] = relationship(
cascade="all, delete-orphan", passive_deletes=True, default_factory=set
)
class TransactionTag(Base, Export):
class TransactionTag(Base):
__tablename__ = "tags"
id: Mapped[idfk] = mapped_column(primary_key=True, init=False)
tag: Mapped[str] = mapped_column(ForeignKey(Tag.name), primary_key=True)
@property
def format(self):
return dict(tag=self.tag)
def __hash__(self):
return hash(self.id)
@ -238,7 +207,7 @@ categoryselector = Annotated[
]
class CategorySelector(Base, Export):
class CategorySelector(Base):
__tablename__ = "categories_selector"
id: Mapped[int] = mapped_column(
@ -249,10 +218,6 @@ class CategorySelector(Base, Export):
)
selector: Mapped[categoryselector]
@property
def format(self):
return dict(selector=self.selector)
class Period(enum.Enum):
daily = "daily"
@ -272,6 +237,8 @@ class CategorySchedule(Base):
period_multiplier: Mapped[Optional[int]]
amount: Mapped[Optional[int]]
category: Mapped[Category] = relationship(back_populates="schedule")
class Link(Base):
__tablename__ = "links"
@ -280,10 +247,7 @@ class Link(Base):
link: Mapped[idfk] = mapped_column(primary_key=True)
class Rule(Base, Export):
__tablename__ = "rules"
id: Mapped[idpk] = mapped_column(init=False)
class Rule:
date: Mapped[Optional[dt.date]]
description: Mapped[Optional[str]]
regex: Mapped[Optional[str]]
@ -291,14 +255,7 @@ class Rule(Base, Export):
min: Mapped[Optional[money]]
max: Mapped[Optional[money]]
type: Mapped[str] = mapped_column(init=False)
__mapper_args__ = {
"polymorphic_identity": "rule",
"polymorphic_on": "type",
}
def matches(self, transaction: BankTransaction) -> bool:
def matches(self, transaction: Transaction) -> bool:
if (
(self.date and self.date < transaction.date)
or (
@ -320,60 +277,22 @@ class Rule(Base, Export):
return False
return True
@property
def format(self) -> dict[str, Any]:
return dict(
date=self.date,
description=self.description,
regex=self.regex,
bank=self.bank,
min=self.min,
max=self.max,
type=self.type,
)
class CategoryRule(Rule):
class CategoryRule(Base, Rule):
__tablename__ = "categories_rules"
id: Mapped[int] = mapped_column(
BigInteger,
ForeignKey(Rule.id, ondelete="CASCADE"),
primary_key=True,
init=False,
)
id: Mapped[idpk] = mapped_column(init=False)
name: Mapped[catfk]
__mapper_args__ = {
"polymorphic_identity": "category_rule",
}
@property
def format(self) -> dict[str, Any]:
return super().format | dict(name=self.name)
def __hash__(self):
return hash(self.id)
class TagRule(Rule):
class TagRule(Base, Rule):
__tablename__ = "tag_rules"
id: Mapped[int] = mapped_column(
BigInteger,
ForeignKey(Rule.id, ondelete="CASCADE"),
primary_key=True,
init=False,
)
id: Mapped[idpk] = mapped_column(init=False)
tag: Mapped[str] = mapped_column(ForeignKey(Tag.name, ondelete="CASCADE"))
__mapper_args__ = {
"polymorphic_identity": "tag_rule",
}
@property
def format(self) -> dict[str, Any]:
return super().format | dict(tag=self.tag)
def __hash__(self):
return hash(self.id)

View File

@ -1,18 +1,18 @@
import datetime as dt
import dotenv
from datetime import date
from time import sleep
from requests import HTTPError, ReadTimeout
from dotenv import load_dotenv
from nordigen import NordigenClient
from uuid import uuid4
import json
import nordigen
import os
import requests
import time
import uuid
import pfbudget.db.model as t
import pfbudget.utils as utils
from pfbudget.db.model import BankTransaction
from pfbudget.utils import convert
from .input import Input
dotenv.load_dotenv()
load_dotenv()
class NordigenInput(Input):
@ -20,22 +20,16 @@ class NordigenInput(Input):
def __init__(self):
super().__init__()
if not (key := os.environ.get("SECRET_KEY")) or not (
id := os.environ.get("SECRET_ID")
):
raise
self._client = nordigen.NordigenClient(
secret_key=key,
secret_id=id,
self._client = NordigenClient(
secret_key=os.environ.get("SECRET_KEY"),
secret_id=os.environ.get("SECRET_ID"),
)
self._client.token = self.__token()
self._start = dt.date.min
self._end = dt.date.max
self._start = date.min
self._end = date.max
def parse(self) -> list[t.BankTransaction]:
def parse(self) -> list[BankTransaction]:
transactions = []
assert len(self._banks) > 0
@ -55,14 +49,14 @@ class NordigenInput(Input):
try:
downloaded = account.get_transactions()
break
except requests.ReadTimeout:
except ReadTimeout:
retries += 1
print(f"Request #{retries} timed-out, retrying in 1s")
time.sleep(1)
except requests.HTTPError as e:
sleep(1)
except HTTPError as e:
retries += 1
print(f"Request #{retries} failed with {e}, retrying in 1s")
time.sleep(1)
sleep(1)
if not downloaded:
print(f"Couldn't download transactions for {account}")
@ -72,7 +66,7 @@ class NordigenInput(Input):
json.dump(downloaded, f)
converted = [
utils.convert(t, bank) for t in downloaded["transactions"]["booked"]
convert(t, bank) for t in downloaded["transactions"]["booked"]
]
transactions.extend(
@ -88,12 +82,11 @@ class NordigenInput(Input):
def requisition(self, institution: str, country: str = "PT"):
id = self._client.institution.get_institution_id_by_name(country, institution)
requisition = self._client.initialize_session(
return self._client.initialize_session(
redirect_uri=self.redirect_url,
institution_id=id,
reference_id=str(uuid.uuid4()),
reference_id=str(uuid4()),
)
return requisition.link, requisition.requisition_id
def country_banks(self, country: str):
return self._client.institution.get_institutions(country)
@ -132,4 +125,4 @@ class NordigenInput(Input):
else:
token = self._client.generate_token()
print(f"New access token: {token}")
return token["access"]
return token

View File

@ -1,7 +1,6 @@
from collections import namedtuple
from decimal import Decimal
from importlib import import_module
from pathlib import Path
import datetime as dt
import yaml
@ -45,7 +44,7 @@ Options = namedtuple(
)
def parse_data(filename: Path, args: dict) -> list[Transaction]:
def parse_data(filename: str, args: dict) -> list[Transaction]:
cfg: dict = yaml.safe_load(open("parsers.yaml"))
assert (
"Banks" in cfg
@ -86,7 +85,7 @@ def parse_data(filename: Path, args: dict) -> list[Transaction]:
class Parser:
def __init__(self, filename: Path, bank: str, options: dict):
def __init__(self, filename: str, bank: str, options: dict):
self.filename = filename
self.bank = bank

View File

@ -0,0 +1 @@
__all__ = ["csv", "output"]

35
pfbudget/output/csv.py Normal file
View File

@ -0,0 +1,35 @@
from csv import DictReader, writer
from pfbudget.db.model import (
BankTransaction,
MoneyTransaction,
Transaction,
)
from .output import Output
class CSV(Output):
def __init__(self, filename: str):
self.fn = filename
def load(self) -> list[Transaction]:
with open(self.fn, "r", newline="") as f:
r = DictReader(f)
return [
BankTransaction(
row["date"], row["description"], row["amount"], False, row["bank"]
)
if row["bank"]
else MoneyTransaction(
row["date"], row["description"], False, row["amount"]
)
for row in r
]
def report(self, transactions: list[Transaction]):
with open(self.fn, "w", newline="") as f:
w = writer(f, delimiter="\t")
w.writerows(
[(t.date, t.description, t.amount, t.bank) for t in transactions]
)

View File

@ -0,0 +1,9 @@
from abc import ABC, abstractmethod
from pfbudget.db.model import Transaction
class Output(ABC):
@abstractmethod
def report(self, transactions: list[Transaction]):
raise NotImplementedError

View File

@ -1,25 +1,23 @@
import datetime as dt
import functools
from typing import Any
from datetime import date
from functools import singledispatch
from pfbudget.common.types import TransactionError
import pfbudget.db.model as t
from pfbudget.db.model import Bank, BankTransaction
from .utils import parse_decimal
@functools.singledispatch
def convert(t) -> Any:
print("No converter has been found")
@singledispatch
def convert(t):
print("No converter as been found")
pass
@convert.register
def _(json: dict, bank: t.Bank) -> t.BankTransaction | None:
i = -1 if bank.nordigen and bank.nordigen.invert else 1
def _(json: dict, bank: Bank) -> BankTransaction:
i = -1 if bank.nordigen.invert else 1
try:
transaction = t.BankTransaction(
date=dt.date.fromisoformat(json["bookingDate"]),
transaction = BankTransaction(
date=date.fromisoformat(json["bookingDate"]),
description=json["remittanceInformationUnstructured"],
bank=bank.name,
amount=i * parse_decimal(json["transactionAmount"]["amount"]),