Compare commits

..

No commits in common. "ed2dda63e949198e20c7aa672c9cc7954da4194e" and "23eb2c80bdfb61cb05ca35f46ad64821bcb84338" have entirely different histories.

8 changed files with 81 additions and 196 deletions

View File

@ -1,32 +0,0 @@
"""Start/End date rule
Revision ID: 952de57a3c43
Revises: 18572111d9ff
Create Date: 2023-02-06 21:57:57.545327+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "952de57a3c43"
down_revision = "18572111d9ff"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"rules", sa.Column("start", sa.Date(), nullable=True), schema="transactions"
)
op.alter_column("rules", column_name="date", new_column_name="end", schema="transactions")
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column("rules", column_name="end", new_column_name="date", schema="transactions")
op.drop_column("rules", "start", schema="transactions")
# ### end Alembic commands ###

View File

@ -1,3 +1,6 @@
from decimal import Decimal
from typing import Sequence
from pfbudget.cli.argparser import argparser from pfbudget.cli.argparser import argparser
from pfbudget.cli.interactive import Interactive from pfbudget.cli.interactive import Interactive
from pfbudget.common.types import Operation from pfbudget.common.types import Operation
@ -143,13 +146,12 @@ if __name__ == "__main__":
] ]
case Operation.RuleAdd: case Operation.RuleAdd:
keys = {"category", "start", "end", "description", "regex", "bank", "min", "max"} keys = {"category", "date", "description", "bank", "min", "max"}
assert args.keys() >= keys, f"missing {args.keys() - keys}" assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [ params = [
type.CategoryRule( type.CategoryRule(
args["start"][0] if args["start"] else None, args["date"][0] if args["date"] else None,
args["end"][0] if args["end"] else None,
args["description"][0] if args["description"] else None, args["description"][0] if args["description"] else None,
args["regex"][0] if args["regex"] else None, args["regex"][0] if args["regex"] else None,
args["bank"][0] if args["bank"] else None, args["bank"][0] if args["bank"] else None,
@ -195,13 +197,12 @@ if __name__ == "__main__":
params = [type.Tag(tag) for tag in args["tag"]] params = [type.Tag(tag) for tag in args["tag"]]
case Operation.TagRuleAdd: case Operation.TagRuleAdd:
keys = {"tag", "start", "end", "description", "regex", "bank", "min", "max"} keys = {"tag", "date", "description", "bank", "min", "max"}
assert args.keys() >= keys, f"missing {args.keys() - keys}" assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [ params = [
type.TagRule( type.TagRule(
args["start"][0] if args["start"] else None, args["date"][0] if args["date"] else None,
args["end"][0] if args["end"] else None,
args["description"][0] if args["description"] else None, args["description"][0] if args["description"] else None,
args["regex"][0] if args["regex"] else None, args["regex"][0] if args["regex"] else None,
args["bank"][0] if args["bank"] else None, args["bank"][0] if args["bank"] else None,
@ -237,7 +238,7 @@ if __name__ == "__main__":
keys = {"original", "links"} keys = {"original", "links"}
assert args.keys() >= keys, f"missing {args.keys() - keys}" assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [args["original"][0], args["links"]] params = [type.Link(args["original"][0], link) for link in args["links"]]
case ( case (
Operation.Export Operation.Export
@ -253,9 +254,9 @@ if __name__ == "__main__":
| Operation.ExportCategoryGroups | Operation.ExportCategoryGroups
| Operation.ImportCategoryGroups | Operation.ImportCategoryGroups
): ):
keys = {"file", "format"} keys = {"file"}
assert args.keys() >= keys, f"missing {args.keys() - keys}" assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [args["file"][0], args["format"][0]] params = args["file"]
Manager(db, verbosity).action(op, params) Manager(db, verbosity).action(op, params)

View File

@ -63,7 +63,7 @@ def argparser() -> argparse.ArgumentParser:
# Exports transactions to .csv file # Exports transactions to .csv file
export = subparsers.add_parser("export") export = subparsers.add_parser("export")
export.set_defaults(op=Operation.Export) export.set_defaults(op=Operation.Export)
file_options(export) export_args(export)
pimport = subparsers.add_parser("import") pimport = subparsers.add_parser("import")
pimport.set_defaults(op=Operation.Import) pimport.set_defaults(op=Operation.Import)
@ -215,11 +215,11 @@ def bank(parser: argparse.ArgumentParser):
export = commands.add_parser("export") export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportBanks) export.set_defaults(op=Operation.ExportBanks)
file_options(export) export_args(export)
pimport = commands.add_parser("import") pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportBanks) pimport.set_defaults(op=Operation.ImportBanks)
file_options(pimport) export_args(pimport)
def nordigen(parser: argparse.ArgumentParser): def nordigen(parser: argparse.ArgumentParser):
@ -276,11 +276,11 @@ def category(parser: argparse.ArgumentParser):
export = commands.add_parser("export") export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportCategories) export.set_defaults(op=Operation.ExportCategories)
file_options(export) export_args(export)
pimport = commands.add_parser("import") pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportCategories) pimport.set_defaults(op=Operation.ImportCategories)
file_options(pimport) export_args(pimport)
def category_group(parser: argparse.ArgumentParser): def category_group(parser: argparse.ArgumentParser):
@ -296,11 +296,11 @@ def category_group(parser: argparse.ArgumentParser):
export = commands.add_parser("export") export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportCategoryGroups) export.set_defaults(op=Operation.ExportCategoryGroups)
file_options(export) export_args(export)
pimport = commands.add_parser("import") pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportCategoryGroups) pimport.set_defaults(op=Operation.ImportCategoryGroups)
file_options(pimport) export_args(pimport)
def category_rule(parser: argparse.ArgumentParser): def category_rule(parser: argparse.ArgumentParser):
@ -324,11 +324,11 @@ def category_rule(parser: argparse.ArgumentParser):
export = commands.add_parser("export") export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportCategoryRules) export.set_defaults(op=Operation.ExportCategoryRules)
file_options(export) export_args(export)
pimport = commands.add_parser("import") pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportCategoryRules) pimport.set_defaults(op=Operation.ImportCategoryRules)
file_options(pimport) export_args(pimport)
def tags(parser: argparse.ArgumentParser): def tags(parser: argparse.ArgumentParser):
@ -366,16 +366,15 @@ def tag_rule(parser: argparse.ArgumentParser):
export = commands.add_parser("export") export = commands.add_parser("export")
export.set_defaults(op=Operation.ExportTagRules) export.set_defaults(op=Operation.ExportTagRules)
file_options(export) export_args(export)
pimport = commands.add_parser("import") pimport = commands.add_parser("import")
pimport.set_defaults(op=Operation.ImportTagRules) pimport.set_defaults(op=Operation.ImportTagRules)
file_options(pimport) export_args(pimport)
def rules(parser: argparse.ArgumentParser): def rules(parser: argparse.ArgumentParser):
parser.add_argument("--start", nargs=1, type=dt.date.fromisoformat) parser.add_argument("--date", nargs=1, type=dt.date.fromisoformat)
parser.add_argument("--end", nargs=1, type=dt.date.fromisoformat)
parser.add_argument("--description", nargs=1, type=str) parser.add_argument("--description", nargs=1, type=str)
parser.add_argument("--regex", nargs=1, type=str) parser.add_argument("--regex", nargs=1, type=str)
parser.add_argument("--bank", nargs=1, type=str) parser.add_argument("--bank", nargs=1, type=str)
@ -397,6 +396,5 @@ def link(parser: argparse.ArgumentParser):
dismantle.add_argument("links", nargs="+", type=int) dismantle.add_argument("links", nargs="+", type=int)
def file_options(parser: argparse.ArgumentParser): def export_args(parser: argparse.ArgumentParser):
parser.add_argument("file", nargs=1, type=str) parser.add_argument("file", nargs=1, type=str)
parser.add_argument("format", nargs=1, default="pickle")

View File

@ -6,7 +6,6 @@ from enum import Enum, auto
class Operation(Enum): class Operation(Enum):
Init = auto() Init = auto()
Transactions = auto()
Parse = auto() Parse = auto()
Download = auto() Download = auto()
Categorize = auto() Categorize = auto()

View File

@ -9,7 +9,7 @@ class Categorizer:
options = {} options = {}
def __init__(self): def __init__(self):
self.options["null_days"] = 3 self.options["null_days"] = 4
def rules( def rules(
self, self,
@ -28,20 +28,14 @@ class Categorizer:
tags (Sequence[Tag]): currently available tags tags (Sequence[Tag]): currently available tags
""" """
try: self._nullify(transactions)
null = next(cat for cat in categories if cat.name == "null")
print("Nullifying")
self._nullify(transactions, null)
categories = [cat for cat in categories if cat.name != "null"]
except StopIteration:
print("Null category not defined")
self._rule_based_categories(transactions, categories) self._rule_based_categories(transactions, categories)
self._rule_based_tags(transactions, tags) self._rule_based_tags(transactions, tags)
@Timer(name="nullify") @Timer(name="nullify")
def _nullify(self, transactions: Sequence[t.BankTransaction], null: t.Category): def _nullify(self, transactions: Sequence[t.BankTransaction]):
print(f"Nullifying {len(transactions)} transactions")
count = 0 count = 0
matching = [] matching = []
for transaction in transactions: for transaction in transactions:
@ -52,13 +46,11 @@ class Categorizer:
transaction.date - timedelta(days=self.options["null_days"]) transaction.date - timedelta(days=self.options["null_days"])
<= cancel.date <= cancel.date
<= transaction.date + timedelta(days=self.options["null_days"]) <= transaction.date + timedelta(days=self.options["null_days"])
and transaction not in matching
and cancel not in matching
and cancel != transaction and cancel != transaction
and cancel.bank != transaction.bank and cancel.bank != transaction.bank
and cancel.amount == -transaction.amount and cancel.amount == -transaction.amount
and transaction not in matching
and cancel not in matching
and all(r.matches(transaction) for r in null.rules)
and all(r.matches(cancel) for r in null.rules)
) )
): ):
transaction.category = t.TransactionCategory( transaction.category = t.TransactionCategory(
@ -73,7 +65,7 @@ class Categorizer:
count += 2 count += 2
break break
print(f"Nullified {count} of {len(transactions)} transactions") print(f"Nullified {count} transactions")
@Timer(name="categoryrules") @Timer(name="categoryrules")
def _rule_based_categories( def _rule_based_categories(
@ -95,14 +87,12 @@ class Categorizer:
continue continue
# passed all conditions, assign category # passed all conditions, assign category
if transaction.category:
if transaction.category.name == category.name:
continue
if ( if (
input( transaction.category
f"Overwrite {transaction} with {category.name}? (y/n)" and transaction.category.name == category.name
) ):
if (
input(f"Overwrite {transaction} with {category}? (y/n)")
== "y" == "y"
): ):
transaction.category.name = category.name transaction.category.name = category.name

View File

@ -1,4 +1,3 @@
import csv
from pathlib import Path from pathlib import Path
import pickle import pickle
import webbrowser import webbrowser
@ -18,7 +17,6 @@ from pfbudget.db.model import (
MoneyTransaction, MoneyTransaction,
Nordigen, Nordigen,
Rule, Rule,
Selector_T,
SplitTransaction, SplitTransaction,
Tag, Tag,
TagRule, TagRule,
@ -34,23 +32,14 @@ class Manager:
self._db = db self._db = db
self._verbosity = verbosity self._verbosity = verbosity
def action(self, op: Operation, params=None): def action(self, op: Operation, params: list):
if self._verbosity > 0: if self._verbosity > 0:
print(f"op={op}, params={params}") print(f"op={op}, params={params}")
if params is None:
params = []
match (op): match (op):
case Operation.Init: case Operation.Init:
pass pass
case Operation.Transactions:
with self.db.session() as session:
transactions = session.get(Transaction)
ret = [t.format for t in transactions]
return ret
case Operation.Parse: case Operation.Parse:
# Adapter for the parse_data method. Can be refactored. # Adapter for the parse_data method. Can be refactored.
args = {"bank": params[1], "creditcard": params[2], "category": None} args = {"bank": params[1], "creditcard": params[2], "category": None}
@ -179,32 +168,8 @@ class Manager:
session.remove_by_name(CategoryGroup, params) session.remove_by_name(CategoryGroup, params)
case Operation.Forge: case Operation.Forge:
if not (
isinstance(params[0], int)
and all(isinstance(p, int) for p in params[1])
):
raise TypeError("f{params} are not transaction ids")
with self.db.session() as session: with self.db.session() as session:
original = session.get(Transaction, Transaction.id, params[0])[0] session.add(params)
links = session.get(Transaction, Transaction.id, params[1])
if not original.category:
original.category = self.askcategory(original)
for link in links:
if (
not link.category
or link.category.name != original.category.name
):
print(
f"{link} category will change to"
f" {original.category.name}"
)
link.category = original.category
tobelinked = [Link(original.id, link.id) for link in links]
session.add(tobelinked)
case Operation.Dismantle: case Operation.Dismantle:
assert all(isinstance(param, Link) for param in params) assert all(isinstance(param, Link) for param in params)
@ -237,8 +202,7 @@ class Manager:
if originals[0].date != t.date: if originals[0].date != t.date:
t.date = originals[0].date t.date = originals[0].date
print( print(
f"{t.date} is different from original date" f"{t.date} is different from original date {originals[0].date}, using original"
f" {originals[0].date}, using original"
) )
splitted = SplitTransaction( splitted = SplitTransaction(
@ -251,11 +215,11 @@ class Manager:
case Operation.Export: case Operation.Export:
with self.db.session() as session: with self.db.session() as session:
self.dump(params[0], params[1], sorted(session.get(Transaction))) self.dump(params[0], sorted(session.get(Transaction)))
case Operation.Import: case Operation.Import:
transactions = [] transactions = []
for row in self.load(params[0], params[1]): for row in self.load(params[0]):
match row["type"]: match row["type"]:
case "bank": case "bank":
transaction = BankTransaction( transaction = BankTransaction(
@ -288,11 +252,11 @@ class Manager:
case Operation.ExportBanks: case Operation.ExportBanks:
with self.db.session() as session: with self.db.session() as session:
self.dump(params[0], params[1], session.get(Bank)) self.dump(params[0], session.get(Bank))
case Operation.ImportBanks: case Operation.ImportBanks:
banks = [] banks = []
for row in self.load(params[0], params[1]): for row in self.load(params[0]):
bank = Bank(row["name"], row["BIC"], row["type"]) bank = Bank(row["name"], row["BIC"], row["type"])
if row["nordigen"]: if row["nordigen"]:
bank.nordigen = Nordigen(**row["nordigen"]) bank.nordigen = Nordigen(**row["nordigen"])
@ -304,10 +268,10 @@ class Manager:
case Operation.ExportCategoryRules: case Operation.ExportCategoryRules:
with self.db.session() as session: with self.db.session() as session:
self.dump(params[0], params[1], session.get(CategoryRule)) self.dump(params[0], session.get(CategoryRule))
case Operation.ImportCategoryRules: case Operation.ImportCategoryRules:
rules = [CategoryRule(**row) for row in self.load(params[0], params[1])] rules = [CategoryRule(**row) for row in self.load(params[0])]
if self.certify(rules): if self.certify(rules):
with self.db.session() as session: with self.db.session() as session:
@ -315,10 +279,10 @@ class Manager:
case Operation.ExportTagRules: case Operation.ExportTagRules:
with self.db.session() as session: with self.db.session() as session:
self.dump(params[0], params[1], session.get(TagRule)) self.dump(params[0], session.get(TagRule))
case Operation.ImportTagRules: case Operation.ImportTagRules:
rules = [TagRule(**row) for row in self.load(params[0], params[1])] rules = [TagRule(**row) for row in self.load(params[0])]
if self.certify(rules): if self.certify(rules):
with self.db.session() as session: with self.db.session() as session:
@ -326,12 +290,12 @@ class Manager:
case Operation.ExportCategories: case Operation.ExportCategories:
with self.db.session() as session: with self.db.session() as session:
self.dump(params[0], params[1], session.get(Category)) self.dump(params[0], session.get(Category))
case Operation.ImportCategories: case Operation.ImportCategories:
# rules = [Category(**row) for row in self.load(params[0])] # rules = [Category(**row) for row in self.load(params[0])]
categories = [] categories = []
for row in self.load(params[0], params[1]): for row in self.load(params[0]):
category = Category(row["name"], row["group"]) category = Category(row["name"], row["group"])
if len(row["rules"]) > 0: if len(row["rules"]) > 0:
# Only category rules could have been created with a rule # Only category rules could have been created with a rule
@ -350,12 +314,10 @@ class Manager:
case Operation.ExportCategoryGroups: case Operation.ExportCategoryGroups:
with self.db.session() as session: with self.db.session() as session:
self.dump(params[0], params[1], session.get(CategoryGroup)) self.dump(params[0], session.get(CategoryGroup))
case Operation.ImportCategoryGroups: case Operation.ImportCategoryGroups:
groups = [ groups = [CategoryGroup(**row) for row in self.load(params[0])]
CategoryGroup(**row) for row in self.load(params[0], params[1])
]
if self.certify(groups): if self.certify(groups):
with self.db.session() as session: with self.db.session() as session:
@ -364,38 +326,15 @@ class Manager:
def parse(self, filename: Path, args: dict): def parse(self, filename: Path, args: dict):
return parse_data(filename, args) return parse_data(filename, args)
def askcategory(self, transaction: Transaction):
selector = CategorySelector(Selector_T.manual)
with self.db.session() as session:
categories = session.get(Category)
while True:
category = input(f"{transaction}: ")
if category in [c.name for c in categories]:
return TransactionCategory(category, selector)
@staticmethod @staticmethod
def dump(fn, format, sequence): def dump(fn, sequence):
if format == "pickle":
with open(fn, "wb") as f: with open(fn, "wb") as f:
pickle.dump([e.format for e in sequence], f) pickle.dump([e.format for e in sequence], f)
elif format == "csv":
with open(fn, "w", newline="") as f:
csv.writer(f).writerows([e.format.values() for e in sequence])
else:
print("format not well specified")
@staticmethod @staticmethod
def load(fn, format): def load(fn):
if format == "pickle":
with open(fn, "rb") as f: with open(fn, "rb") as f:
return pickle.load(f) return pickle.load(f)
elif format == "csv":
raise Exception("CSV import not supported")
else:
print("format not well specified")
return []
@staticmethod @staticmethod
def certify(imports: list) -> bool: def certify(imports: list) -> bool:

View File

@ -51,10 +51,7 @@ class DbClient:
def get(self, type: Type[T], column=None, values=None) -> Sequence[T]: def get(self, type: Type[T], column=None, values=None) -> Sequence[T]:
if column is not None: if column is not None:
if values: if values:
if isinstance(values, Sequence):
stmt = select(type).where(column.in_(values)) stmt = select(type).where(column.in_(values))
else:
stmt = select(type).where(column == values)
else: else:
stmt = select(type).where(column) stmt = select(type).where(column)
else: else:

View File

@ -335,8 +335,7 @@ class Rule(Base, Export):
__tablename__ = "rules" __tablename__ = "rules"
id: Mapped[idpk] = mapped_column(init=False) id: Mapped[idpk] = mapped_column(init=False)
start: Mapped[Optional[dt.date]] date: Mapped[Optional[dt.date]]
end: Mapped[Optional[dt.date]]
description: Mapped[Optional[str]] description: Mapped[Optional[str]]
regex: Mapped[Optional[str]] regex: Mapped[Optional[str]]
bank: Mapped[Optional[str]] bank: Mapped[Optional[str]]
@ -350,34 +349,32 @@ class Rule(Base, Export):
"polymorphic_on": "type", "polymorphic_on": "type",
} }
def matches(self, t: BankTransaction) -> bool: def matches(self, transaction: BankTransaction) -> bool:
valid = None if (
if self.regex: (self.date and self.date < transaction.date)
valid = re.compile(self.regex, re.IGNORECASE) or (
self.description
ops = ( and transaction.description
Rule.exists(self.start, lambda r: r < t.date), and self.description not in transaction.description
Rule.exists(self.end, lambda r: r > t.date),
Rule.exists(self.description, lambda r: r == t.description),
Rule.exists(
valid,
lambda r: r.search(t.description) if t.description else False,
),
Rule.exists(self.bank, lambda r: r == t.bank),
Rule.exists(self.min, lambda r: r < t.amount),
Rule.exists(self.max, lambda r: r > t.amount),
) )
or (
if all(ops): self.regex
return True and transaction.description
and not re.compile(self.regex, re.IGNORECASE).search(
transaction.description
)
)
or (self.bank and self.bank != transaction.bank)
or (self.min and self.min > transaction.amount)
or (self.max and self.max < transaction.amount)
):
return False return False
return True
@property @property
def format(self) -> dict[str, Any]: def format(self) -> dict[str, Any]:
return dict( return dict(
start=self.start, date=self.date,
end=self.end,
description=self.description, description=self.description,
regex=self.regex, regex=self.regex,
bank=self.bank, bank=self.bank,
@ -386,10 +383,6 @@ class Rule(Base, Export):
type=self.type, type=self.type,
) )
@staticmethod
def exists(r, op) -> bool:
return op(r) if r is not None else True
class CategoryRule(Rule): class CategoryRule(Rule):
__table_args__ = {"schema": "category"} __table_args__ = {"schema": "category"}