Turned on type checking

and as a result, had to fix a LOT of minor potential future issue.
It also reorders and clears unused imports.

When exporting transactions, it will sort by date.
This commit is contained in:
Luís Murta 2023-01-22 20:42:14 +00:00
parent d4b5f1f11a
commit fd6793b4f4
Signed by: satprog
GPG Key ID: 169EF1BBD7049F94
10 changed files with 155 additions and 192 deletions

View File

@ -9,4 +9,4 @@ from pfbudget.cli.runnable import argparser
from pfbudget.input.parsers import parse_data
from pfbudget.utils.utils import parse_args_period
import pfbudget.db.model as types
import pfbudget.db.model as t

View File

@ -14,7 +14,7 @@ if __name__ == "__main__":
assert "verbose" in args, "No verbose level specified"
verbosity = args.pop("verbose")
params = None
params = []
match (op):
case pfbudget.Operation.Parse:
keys = {"path", "bank", "creditcard"}
@ -45,7 +45,7 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.Bank(
pfbudget.t.Bank(
args["bank"][0],
args["bic"][0],
args["type"][0],
@ -73,7 +73,7 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.Nordigen(
pfbudget.t.Nordigen(
args["bank"][0],
args["bank_id"][0] if args["bank_id"] else None,
args["requisition_id"][0] if args["requisition_id"] else None,
@ -110,27 +110,27 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.Category(cat, args["group"]) for cat in args["category"]
pfbudget.t.Category(cat, args["group"]) for cat in args["category"]
]
case pfbudget.Operation.CategoryUpdate:
keys = {"category", "group"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [pfbudget.types.Category(cat) for cat in args["category"]]
params = [pfbudget.t.Category(cat) for cat in args["category"]]
params.append(args["group"])
case pfbudget.Operation.CategoryRemove:
assert "category" in args, "argparser ill defined"
params = [pfbudget.types.Category(cat) for cat in args["category"]]
params = [pfbudget.t.Category(cat) for cat in args["category"]]
case pfbudget.Operation.CategorySchedule:
keys = {"category", "period", "frequency"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.CategorySchedule(
cat, True, args["period"][0], args["frequency"][0]
pfbudget.t.CategorySchedule(
cat, args["period"][0], args["frequency"][0], None
)
for cat in args["category"]
]
@ -140,7 +140,7 @@ if __name__ == "__main__":
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.CategoryRule(
pfbudget.t.CategoryRule(
args["date"][0] if args["date"] else None,
args["description"][0] if args["description"] else None,
args["regex"][0] if args["regex"] else None,
@ -184,14 +184,14 @@ if __name__ == "__main__":
keys = {"tag"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [pfbudget.types.Tag(tag) for tag in args["tag"]]
params = [pfbudget.t.Tag(tag) for tag in args["tag"]]
case pfbudget.Operation.TagRuleAdd:
keys = {"tag", "date", "description", "bank", "min", "max"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.TagRule(
pfbudget.t.TagRule(
args["date"][0] if args["date"] else None,
args["description"][0] if args["description"] else None,
args["regex"][0] if args["regex"] else None,
@ -218,18 +218,18 @@ if __name__ == "__main__":
case pfbudget.Operation.GroupAdd:
assert "group" in args, "argparser ill defined"
params = [pfbudget.types.CategoryGroup(group) for group in args["group"]]
params = [pfbudget.t.CategoryGroup(group) for group in args["group"]]
case pfbudget.Operation.GroupRemove:
assert "group" in args, "argparser ill defined"
params = [pfbudget.types.CategoryGroup(group) for group in args["group"]]
params = [pfbudget.t.CategoryGroup(group) for group in args["group"]]
case pfbudget.Operation.Forge | pfbudget.Operation.Dismantle:
keys = {"original", "links"}
assert args.keys() >= keys, f"missing {args.keys() - keys}"
params = [
pfbudget.types.Link(args["original"][0], link) for link in args["links"]
pfbudget.t.Link(args["original"][0], link) for link in args["links"]
]
case pfbudget.Operation.Export | pfbudget.Operation.Import | pfbudget.Operation.ExportCategoryRules | pfbudget.Operation.ImportCategoryRules | pfbudget.Operation.ExportTagRules | pfbudget.Operation.ImportTagRules:

View File

@ -1,12 +1,13 @@
from dotenv import load_dotenv
import argparse
import datetime as dt
import decimal
from dotenv import load_dotenv
import os
import re
from pfbudget.common.types import Operation
from pfbudget.db.model import AccountType, Period
from pfbudget.db.sqlite import DatabaseClient
import pfbudget.reporting.graph
import pfbudget.reporting.report
@ -38,42 +39,38 @@ def argparser() -> argparse.ArgumentParser:
help="select current database",
default=DEFAULT_DB,
)
universal.add_argument("-v", "--verbose", action="count", default=0)
period = argparse.ArgumentParser(add_help=False).add_mutually_exclusive_group()
period.add_argument(
period = argparse.ArgumentParser(add_help=False)
period_group = period.add_mutually_exclusive_group()
period_group.add_argument(
"--interval", type=str, nargs=2, help="graph interval", metavar=("START", "END")
)
period.add_argument("--start", type=str, nargs=1, help="graph start date")
period.add_argument("--end", type=str, nargs=1, help="graph end date")
period.add_argument("--year", type=str, nargs=1, help="graph year")
period_group.add_argument("--start", type=str, nargs=1, help="graph start date")
period_group.add_argument("--end", type=str, nargs=1, help="graph end date")
period_group.add_argument("--year", type=str, nargs=1, help="graph year")
parser = argparse.ArgumentParser(
description="does cool finance stuff",
parents=[universal],
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--version",
action="version",
version=re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
open("pfbudget/__init__.py").read(),
).group(1),
)
if version := re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]', open("pfbudget/__init__.py").read()
):
parser.add_argument(
"--version",
action="version",
version=version.group(1),
)
subparsers = parser.add_subparsers(required=True)
"""
Init
"""
p_init = subparsers.add_parser(
"init",
description="Initializes the SQLite3 database",
parents=[universal],
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
p_init.set_defaults(command=Operation.Init)
# TODO Init
# init = subparsers.add_parser("init")
# init.set_defaults(op=Operation.Init)
# Exports transactions to .csv file
export = subparsers.add_parser("export")
@ -206,11 +203,6 @@ def report(args):
pfbudget.reporting.report.detailed(DatabaseClient(args.database), start, end)
# def nordigen_banks(manager: Manager, args):
# input = NordigenInput(manager)
# input.list(vars(args)["country"][0])
def bank(parser: argparse.ArgumentParser):
commands = parser.add_subparsers(required=True)

View File

@ -1,17 +1,8 @@
from pfbudget.db.model import (
Category,
CategorySelector,
Selector,
Tag,
Transaction,
TransactionCategory,
TransactionTag,
)
from codetiming import Timer
from datetime import timedelta
from typing import Sequence
Transactions = list[Transaction]
import pfbudget.db.model as t
class Categorizer:
@ -22,9 +13,9 @@ class Categorizer:
def rules(
self,
transactions: Transactions,
categories: list[Category],
tags: list[Tag],
transactions: Sequence[t.BankTransaction],
categories: Sequence[t.Category],
tags: Sequence[t.Tag],
):
"""Overarching categorization tool
@ -32,9 +23,9 @@ class Categorizer:
to the rules defined for each category
Args:
transactions (list[Transaction]): uncategorized transactions
categories (list[Category]): available categories
tags (list[Tag]): currently available tags
transactions (Sequence[BankTransaction]): uncategorized transactions
categories (Sequence[Category]): available categories
tags (Sequence[Tag]): currently available tags
"""
self._nullify(transactions)
@ -44,21 +35,21 @@ class Categorizer:
def manual(
self,
transactions: Transactions,
categories: list[Category],
tags: list[Tag],
transactions: Sequence[t.Transaction],
categories: Sequence[t.Category],
tags: Sequence[t.Tag],
):
"""Manual categorization input
Args:
transactions (list[Transaction]): uncategorized transactions
categories (list[Category]): available categories
tags (list[Tag]): currently available tags
transactions (Sequence[Transaction]): uncategorized transactions
categories (Sequence[Category]): available categories
tags (Sequence[Tag]): currently available tags
"""
self._manual(transactions)
@Timer(name="nullify")
def _nullify(self, transactions: Transactions):
def _nullify(self, transactions: Sequence[t.BankTransaction]):
count = 0
matching = []
for transaction in transactions:
@ -76,11 +67,13 @@ class Categorizer:
and cancel.amount == -transaction.amount
)
):
transaction.category = TransactionCategory(
name="null", selector=CategorySelector(Selector.nullifier)
transaction.category = t.TransactionCategory(
name="null",
selector=t.CategorySelector(t.Selector.nullifier),
)
cancel.category = TransactionCategory(
name="null", selector=CategorySelector(Selector.nullifier)
cancel.category = t.TransactionCategory(
name="null",
selector=t.CategorySelector(t.Selector.nullifier),
)
matching.extend([transaction, cancel])
count += 2
@ -90,7 +83,9 @@ class Categorizer:
@Timer(name="categoryrules")
def _rule_based_categories(
self, transactions: Transactions, categories: list[Category]
self,
transactions: Sequence[t.BankTransaction],
categories: Sequence[t.Category],
):
d = {}
for category in [c for c in categories if c.rules]:
@ -114,10 +109,10 @@ class Categorizer:
== "y"
):
transaction.category.name = category.name
transaction.category.selector.selector = Selector.rules
transaction.category.selector.selector = t.Selector.rules
else:
transaction.category = TransactionCategory(
category.name, CategorySelector(Selector.rules)
transaction.category = t.TransactionCategory(
category.name, t.CategorySelector(t.Selector.rules)
)
if rule in d:
@ -129,9 +124,11 @@ class Categorizer:
print(f"{v}: {k}")
@Timer(name="tagrules")
def _rule_based_tags(self, transactions: Transactions, tags: list[Tag]):
def _rule_based_tags(
self, transactions: Sequence[t.BankTransaction], tags: Sequence[t.Tag]
):
d = {}
for tag in [t for t in tags if t.rules]:
for tag in [t for t in tags if len(t.rules) > 0]:
for rule in tag.rules:
# for transaction in [t for t in transactions if not t.category]:
for transaction in [
@ -143,9 +140,9 @@ class Categorizer:
continue
if not transaction.tags:
transaction.tags = {TransactionTag(tag.name)}
transaction.tags = {t.TransactionTag(tag.name)}
else:
transaction.tags.add(TransactionTag(tag.name))
transaction.tags.add(t.TransactionTag(tag.name))
if rule in d:
d[rule] += 1
@ -155,7 +152,7 @@ class Categorizer:
for k, v in d.items():
print(f"{v}: {k}")
def _manual(self, transactions: Transactions):
def _manual(self, transactions: Sequence[t.Transaction]):
uncategorized = [t for t in transactions if not t.category]
print(f"{len(uncategorized)} transactions left to categorize")
@ -167,8 +164,8 @@ class Categorizer:
if not category:
print("{category} doesn't exist")
continue
transaction.category = TransactionCategory(
category, CategorySelector(Selector.manual)
transaction.category = t.TransactionCategory(
category, t.CategorySelector(t.Selector.manual)
)
break

View File

@ -12,6 +12,7 @@ from pfbudget.db.model import (
CategoryGroup,
CategoryRule,
CategorySelector,
Link,
MoneyTransaction,
Nordigen,
Rule,
@ -41,18 +42,21 @@ class Manager:
# Adapter for the parse_data method. Can be refactored.
args = {"bank": params[1], "creditcard": params[2], "category": None}
transactions = []
for path in params[0]:
if (dir := Path(path)).is_dir():
for file in dir.iterdir():
for path in [Path(p) for p in params[0]]:
if path.is_dir():
for file in path.iterdir():
transactions.extend(self.parse(file, args))
elif Path(path).is_file():
elif path.is_file():
transactions.extend(self.parse(path, args))
else:
raise FileNotFoundError(path)
print(transactions)
if len(transactions) > 0 and input("Commit? (y/n)") == "y":
self.add_transactions(sorted(transactions))
if (
len(transactions) > 0
and input(f"{transactions[:5]}\nCommit? (y/n)") == "y"
):
with self.db.session() as session:
session.add(sorted(transactions))
case Operation.Download:
client = NordigenInput()
@ -68,14 +72,15 @@ class Manager:
# dry-run
if not params[2]:
self.add_transactions(transactions)
with self.db.session() as session:
session.add(sorted(transactions))
else:
print(transactions)
case Operation.Categorize:
with self.db.session() as session:
uncategorized = session.get(
Transaction, ~Transaction.category.has()
BankTransaction, ~BankTransaction.category.has()
)
categories = session.get(Category)
tags = session.get(Tag)
@ -155,7 +160,7 @@ class Manager:
case Operation.GroupAdd:
with self.db.session() as session:
session.add(CategoryGroup(params))
session.add(params)
case Operation.GroupRemove:
assert all(isinstance(param, CategoryGroup) for param in params)
@ -167,6 +172,8 @@ class Manager:
session.add(params)
case Operation.Dismantle:
assert all(isinstance(param, Link) for param in params)
with self.db.session() as session:
original = params[0].original
links = [link.link for link in params]
@ -174,7 +181,7 @@ class Manager:
case Operation.Export:
with self.db.session() as session:
self.dump(params[0], session.get(Transaction))
self.dump(params[0], sorted(session.get(Transaction)))
case Operation.Import:
transactions = []
@ -246,34 +253,9 @@ class Manager:
with self.db.session() as session:
session.add(rules)
# def init(self):
# client = DatabaseClient(self.__db)
# client.init()
# def register(self):
# bank = Bank(self.args["bank"][0], "", self.args["requisition"][0], self.args["invert"])
# client = DatabaseClient(self.__db)
# client.register_bank(convert(bank))
# def unregister(self):
# client = DatabaseClient(self.__db)
# client.unregister_bank(self.args["bank"][0])
def parse(self, filename: str, args: dict):
def parse(self, filename: Path, args: dict):
return parse_data(filename, args)
# def transactions() -> list[Transaction]:
# pass
def add_transactions(self, transactions):
with self.db.session() as session:
session.add(transactions)
# def get_bank_by(self, key: str, value: str) -> Bank:
# client = DatabaseClient(self.__db)
# bank = client.get_bank(key, value)
# return convert(bank)
def dump(self, fn, sequence):
with open(fn, "wb") as f:
pickle.dump([e.format for e in sequence], f)

View File

@ -1,25 +1,16 @@
from dataclasses import asdict
from datetime import date
from sqlalchemy import create_engine, delete, select, update
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import Session
from typing import Sequence, Type, TypeVar
from pfbudget.db.model import (
Category,
CategoryGroup,
CategoryRule,
CategorySchedule,
Link,
Tag,
TagRule,
Transaction,
)
# import logging
# logging.basicConfig()
# logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
class DbClient:
"""
@ -53,7 +44,9 @@ class DbClient:
def expunge_all(self):
self.__session.expunge_all()
def get(self, type, column=None, values=None):
T = TypeVar("T")
def get(self, type: Type[T], column=None, values=None) -> Sequence[T]:
if column is not None:
if values:
stmt = select(type).where(column.in_(values))
@ -67,7 +60,7 @@ class DbClient:
def add(self, rows: list):
self.__session.add_all(rows)
def remove_by_name(self, type: Category | Tag | Transaction, rows: list):
def remove_by_name(self, type, rows: list):
stmt = delete(type).where(type.name.in_([row.name for row in rows]))
self.__session.execute(stmt)
@ -91,7 +84,7 @@ class DbClient:
)
self.__session.execute(stmt)
def remove_by_id(self, type: CategoryRule | TagRule, ids: list[int]):
def remove_by_id(self, type, ids: list[int]):
stmt = delete(type).where(type.id.in_(ids))
self.__session.execute(stmt)
@ -99,19 +92,11 @@ class DbClient:
print(type, values)
self.__session.execute(update(type), values)
def remove_links(self, original, links: list):
def remove_links(self, original: int, links: list[int]):
stmt = delete(Link).where(
Link.original == original, Link.link.in_(link for link in links)
)
self.__session.execute(stmt)
def transactions(self, min: date, max: date, banks: list[str]):
stmt = select(Transaction).where(
Transaction.date >= min,
Transaction.date <= max,
Transaction.bank.in_(banks),
)
return self.__session.scalars(stmt).all()
def session(self) -> ClientSession:
return self.ClientSession(self.engine)

View File

@ -1,4 +1,9 @@
from __future__ import annotations
import datetime as dt
import decimal
import enum
import re
from typing import Annotated, Any, Optional
from sqlalchemy import (
BigInteger,
@ -17,12 +22,6 @@ from sqlalchemy.orm import (
relationship,
)
from decimal import Decimal
from typing import Annotated, Optional
import datetime as dt
import enum
import re
class Base(MappedAsDataclass, DeclarativeBase):
__table_args__ = {"schema": "transactions"}
@ -65,13 +64,13 @@ class Bank(Base):
BIC: Mapped[str] = mapped_column(String(8), primary_key=True)
type: Mapped[accounttype] = mapped_column(primary_key=True)
nordigen: Mapped[Optional[Nordigen]] = relationship(lazy="joined")
nordigen: Mapped[Optional[Nordigen]] = relationship(lazy="joined", init=False)
bankfk = Annotated[str, mapped_column(Text, ForeignKey(Bank.name))]
idpk = Annotated[int, mapped_column(BigInteger, primary_key=True, autoincrement=True)]
money = Annotated[Decimal, mapped_column(Numeric(16, 2))]
money = Annotated[decimal.Decimal, mapped_column(Numeric(16, 2))]
class Transaction(Base, Export):
@ -86,7 +85,7 @@ class Transaction(Base, Export):
category: Mapped[Optional[TransactionCategory]] = relationship(init=False)
note: Mapped[Optional[Note]] = relationship(init=False)
tags: Mapped[Optional[set[TransactionTag]]] = relationship(init=False)
tags: Mapped[set[TransactionTag]] = relationship(init=False)
__mapper_args__ = {"polymorphic_on": "type", "polymorphic_identity": "transaction"}
@ -152,11 +151,11 @@ class Category(Base):
ForeignKey(CategoryGroup.name), default=None
)
rules: Mapped[Optional[set[CategoryRule]]] = relationship(
rules: Mapped[set[CategoryRule]] = relationship(
cascade="all, delete-orphan", passive_deletes=True, default_factory=set
)
schedule: Mapped[CategorySchedule] = relationship(
back_populates="category", default=None
schedule: Mapped[Optional[CategorySchedule]] = relationship(
cascade="all, delete-orphan", passive_deletes=True, default=None
)
def __repr__(self) -> str:
@ -205,7 +204,7 @@ class Tag(Base):
name: Mapped[str] = mapped_column(primary_key=True)
rules: Mapped[Optional[set[TagRule]]] = relationship(
rules: Mapped[set[TagRule]] = relationship(
cascade="all, delete-orphan", passive_deletes=True, default_factory=set
)
@ -273,8 +272,6 @@ class CategorySchedule(Base):
period_multiplier: Mapped[Optional[int]]
amount: Mapped[Optional[int]]
category: Mapped[Category] = relationship(back_populates="schedule")
class Link(Base):
__tablename__ = "links"

View File

@ -1,18 +1,18 @@
from datetime import date
from time import sleep
from requests import HTTPError, ReadTimeout
from dotenv import load_dotenv
from nordigen import NordigenClient
from uuid import uuid4
import datetime as dt
import dotenv
import json
import nordigen
import os
import requests
import time
import uuid
from pfbudget.db.model import BankTransaction
from pfbudget.utils import convert
import pfbudget.db.model as t
import pfbudget.utils as utils
from .input import Input
load_dotenv()
dotenv.load_dotenv()
class NordigenInput(Input):
@ -20,16 +20,22 @@ class NordigenInput(Input):
def __init__(self):
super().__init__()
self._client = NordigenClient(
secret_key=os.environ.get("SECRET_KEY"),
secret_id=os.environ.get("SECRET_ID"),
if not (key := os.environ.get("SECRET_KEY")) or not (
id := os.environ.get("SECRET_ID")
):
raise
self._client = nordigen.NordigenClient(
secret_key=key,
secret_id=id,
)
self._client.token = self.__token()
self._start = date.min
self._end = date.max
self._start = dt.date.min
self._end = dt.date.max
def parse(self) -> list[BankTransaction]:
def parse(self) -> list[t.BankTransaction]:
transactions = []
assert len(self._banks) > 0
@ -49,14 +55,14 @@ class NordigenInput(Input):
try:
downloaded = account.get_transactions()
break
except ReadTimeout:
except requests.ReadTimeout:
retries += 1
print(f"Request #{retries} timed-out, retrying in 1s")
sleep(1)
except HTTPError as e:
time.sleep(1)
except requests.HTTPError as e:
retries += 1
print(f"Request #{retries} failed with {e}, retrying in 1s")
sleep(1)
time.sleep(1)
if not downloaded:
print(f"Couldn't download transactions for {account}")
@ -66,7 +72,7 @@ class NordigenInput(Input):
json.dump(downloaded, f)
converted = [
convert(t, bank) for t in downloaded["transactions"]["booked"]
utils.convert(t, bank) for t in downloaded["transactions"]["booked"]
]
transactions.extend(
@ -82,11 +88,12 @@ class NordigenInput(Input):
def requisition(self, institution: str, country: str = "PT"):
id = self._client.institution.get_institution_id_by_name(country, institution)
return self._client.initialize_session(
requisition = self._client.initialize_session(
redirect_uri=self.redirect_url,
institution_id=id,
reference_id=str(uuid4()),
reference_id=str(uuid.uuid4()),
)
return requisition.link, requisition.requisition_id
def country_banks(self, country: str):
return self._client.institution.get_institutions(country)
@ -125,4 +132,4 @@ class NordigenInput(Input):
else:
token = self._client.generate_token()
print(f"New access token: {token}")
return token
return token["access"]

View File

@ -1,6 +1,7 @@
from collections import namedtuple
from decimal import Decimal
from importlib import import_module
from pathlib import Path
import datetime as dt
import yaml
@ -44,7 +45,7 @@ Options = namedtuple(
)
def parse_data(filename: str, args: dict) -> list[Transaction]:
def parse_data(filename: Path, args: dict) -> list[Transaction]:
cfg: dict = yaml.safe_load(open("parsers.yaml"))
assert (
"Banks" in cfg
@ -85,7 +86,7 @@ def parse_data(filename: str, args: dict) -> list[Transaction]:
class Parser:
def __init__(self, filename: str, bank: str, options: dict):
def __init__(self, filename: Path, bank: str, options: dict):
self.filename = filename
self.bank = bank

View File

@ -1,23 +1,25 @@
from datetime import date
from functools import singledispatch
import datetime as dt
import functools
from typing import Any
from pfbudget.common.types import TransactionError
from pfbudget.db.model import Bank, BankTransaction
import pfbudget.db.model as t
from .utils import parse_decimal
@singledispatch
def convert(t):
print("No converter as been found")
@functools.singledispatch
def convert(t) -> Any:
print("No converter has been found")
pass
@convert.register
def _(json: dict, bank: Bank) -> BankTransaction:
i = -1 if bank.nordigen.invert else 1
def _(json: dict, bank: t.Bank) -> t.BankTransaction | None:
i = -1 if bank.nordigen and bank.nordigen.invert else 1
try:
transaction = BankTransaction(
date=date.fromisoformat(json["bookingDate"]),
transaction = t.BankTransaction(
date=dt.date.fromisoformat(json["bookingDate"]),
description=json["remittanceInformationUnstructured"],
bank=bank.name,
amount=i * parse_decimal(json["transactionAmount"]["amount"]),