Moves initializer function to its own file
This commit is contained in:
parent
d165f566de
commit
fd1059ffb3
99
initializer.py
Normal file
99
initializer.py
Normal file
@ -0,0 +1,99 @@
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
import logging
|
||||
import matplotlib.pyplot as plt
|
||||
import pickle
|
||||
import sys
|
||||
from categories import Categories
|
||||
from transaction import Transaction as Tr, TransactionError, Transactions
|
||||
from parsers import Parser
|
||||
|
||||
|
||||
def get_transactions(data_dir):
|
||||
dfs = dict()
|
||||
for df in Path(data_dir).iterdir():
|
||||
try:
|
||||
trs = Tr.read_transactions(df)
|
||||
except TransactionError as e:
|
||||
print(f"{e} -> datafile {df}")
|
||||
sys.exit(-2)
|
||||
dfs[df.name] = trs
|
||||
|
||||
return dfs
|
||||
|
||||
|
||||
def initialize(raw_dir, data_dir, restart=False):
|
||||
dfs = get_transactions(data_dir)
|
||||
if restart:
|
||||
rfs = dict()
|
||||
logging.debug("rewriting both .raw and .transactions pickles")
|
||||
else:
|
||||
try:
|
||||
rfs = pickle.load(open(".raw.pickle", "rb"))
|
||||
assert (
|
||||
type(rfs) is dict
|
||||
), ".raw.pickle isn't a dictionary, so it could have been corrupted"
|
||||
logging.debug(".raw.pickle opened")
|
||||
except FileNotFoundError:
|
||||
rfs = dict()
|
||||
logging.debug("no .raw.pickle found")
|
||||
|
||||
updated_trs, update = dict(), False
|
||||
prompt = " has been modified since last update. Do you want to update the data files? (Yes/Update/No)"
|
||||
for rf in Path(raw_dir).iterdir():
|
||||
if rf.name in rfs and rfs[rf.name][0] == rf.stat().st_mtime:
|
||||
logging.debug(f"{rf.name} hasn't been modified since last access")
|
||||
elif (
|
||||
rf.name not in rfs
|
||||
or (answer := input(f"{rf.name}" + prompt).lower()) == "yes"
|
||||
):
|
||||
trs = Parser.parse_csv(rf)
|
||||
updated_trs[rf.name] = trs
|
||||
try:
|
||||
rfs[rf.name][0] = rf.stat().st_mtime
|
||||
except KeyError:
|
||||
rfs[rf.name] = [rf.stat().st_mtime, []]
|
||||
update = True
|
||||
logging.info(f"{rf.name} parsed")
|
||||
elif answer == "update":
|
||||
rfs[rf.name][0] = rf.stat().st_mtime
|
||||
update = True
|
||||
else: # prompt = no
|
||||
update = True
|
||||
|
||||
if update:
|
||||
for rf_name, updated_trs in updated_trs.items():
|
||||
filename_set = set(
|
||||
(t.date.year, f"{t.date.year}_{t.bank}.csv") for t in updated_trs
|
||||
)
|
||||
for year, filename in filename_set:
|
||||
trs = [t for t in updated_trs if t.date.year == year]
|
||||
if filename in dfs.keys():
|
||||
new_trs = [tr for tr in trs if tr not in rfs[rf_name][1]]
|
||||
rem_trs = [tr for tr in rfs[rf_name][1] if tr not in trs]
|
||||
|
||||
if new_trs:
|
||||
dfs[filename].extend(new_trs)
|
||||
dfs[filename].sort()
|
||||
|
||||
for rem in rem_trs:
|
||||
dfs[filename].remove(rem)
|
||||
|
||||
else:
|
||||
dfs[filename] = trs
|
||||
|
||||
Tr.write_transactions(Path(data_dir) / filename, dfs[filename])
|
||||
rfs[rf_name][1] = updated_trs
|
||||
logging.debug(f"{filename} written")
|
||||
|
||||
pickle.dump(rfs, open(".raw.pickle", "wb"))
|
||||
logging.debug(".raw.pickle written to disk")
|
||||
|
||||
if restart:
|
||||
for df in Path(data_dir).iterdir():
|
||||
if df.name not in dfs:
|
||||
dfs[df.name] = Tr.read_transactions(df)
|
||||
for t in dfs[df.name]:
|
||||
t.category = ""
|
||||
|
||||
return dfs
|
||||
97
main.py
97
main.py
@ -5,101 +5,12 @@ import matplotlib.pyplot as plt
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
from initializer import initialize
|
||||
from categories import Categories
|
||||
from transaction import Transaction as Tr, TransactionError, Transactions
|
||||
from parsers import Parser
|
||||
|
||||
|
||||
def get_transactions(data_dir):
|
||||
dfs = dict()
|
||||
for df in Path(data_dir).iterdir():
|
||||
try:
|
||||
trs = Tr.read_transactions(df)
|
||||
except TransactionError as e:
|
||||
print(f"{e} -> datafile {df}")
|
||||
sys.exit(-2)
|
||||
dfs[df.name] = trs
|
||||
|
||||
return dfs
|
||||
|
||||
|
||||
def initialize(raw_dir, data_dir, restart=False):
|
||||
dfs = get_transactions(data_dir)
|
||||
if restart:
|
||||
rfs = dict()
|
||||
logging.debug("rewriting both .raw and .transactions pickles")
|
||||
else:
|
||||
try:
|
||||
rfs = pickle.load(open(".raw.pickle", "rb"))
|
||||
assert (
|
||||
type(rfs) is dict
|
||||
), ".raw.pickle isn't a dictionary, so it could have been corrupted"
|
||||
logging.debug(".raw.pickle opened")
|
||||
except FileNotFoundError:
|
||||
rfs = dict()
|
||||
logging.debug("no .raw.pickle found")
|
||||
|
||||
updated_trs, update = dict(), False
|
||||
prompt = " has been modified since last update. Do you want to update the data files? (Yes/Update/No)"
|
||||
for rf in Path(raw_dir).iterdir():
|
||||
if rf.name in rfs and rfs[rf.name][0] == rf.stat().st_mtime:
|
||||
logging.debug(f"{rf.name} hasn't been modified since last access")
|
||||
elif (
|
||||
rf.name not in rfs
|
||||
or (answer := input(f"{rf.name}" + prompt).lower()) == "yes"
|
||||
):
|
||||
trs = Parser.parse_csv(rf)
|
||||
updated_trs[rf.name] = trs
|
||||
try:
|
||||
rfs[rf.name][0] = rf.stat().st_mtime
|
||||
except KeyError:
|
||||
rfs[rf.name] = [rf.stat().st_mtime, []]
|
||||
update = True
|
||||
logging.info(f"{rf.name} parsed")
|
||||
elif answer == "update":
|
||||
rfs[rf.name][0] = rf.stat().st_mtime
|
||||
update = True
|
||||
else: # prompt = no
|
||||
update = True
|
||||
|
||||
if update:
|
||||
for rf_name, updated_trs in updated_trs.items():
|
||||
filename_set = set(
|
||||
(t.date.year, f"{t.date.year}_{t.bank}.csv") for t in updated_trs
|
||||
)
|
||||
for year, filename in filename_set:
|
||||
trs = [t for t in updated_trs if t.date.year == year]
|
||||
if filename in dfs.keys():
|
||||
new_trs = [tr for tr in trs if tr not in rfs[rf_name][1]]
|
||||
rem_trs = [tr for tr in rfs[rf_name][1] if tr not in trs]
|
||||
|
||||
if new_trs:
|
||||
dfs[filename].extend(new_trs)
|
||||
dfs[filename].sort()
|
||||
|
||||
for rem in rem_trs:
|
||||
dfs[filename].remove(rem)
|
||||
|
||||
else:
|
||||
dfs[filename] = trs
|
||||
|
||||
Tr.write_transactions(Path(data_dir) / filename, dfs[filename])
|
||||
rfs[rf_name][1] = updated_trs
|
||||
logging.debug(f"{filename} written")
|
||||
|
||||
pickle.dump(rfs, open(".raw.pickle", "wb"))
|
||||
logging.debug(".raw.pickle written to disk")
|
||||
|
||||
if restart:
|
||||
for df in Path(data_dir).iterdir():
|
||||
if df.name not in dfs:
|
||||
dfs[df.name] = Tr.read_transactions(df)
|
||||
for t in dfs[df.name]:
|
||||
t.category = ""
|
||||
|
||||
return dfs
|
||||
|
||||
|
||||
def manual_categorization(trs):
|
||||
trs.sort_by_bank()
|
||||
for i, transaction in enumerate(trs):
|
||||
@ -141,7 +52,7 @@ if __name__ == "__main__":
|
||||
Tr.write_transactions("transactions.csv", transactions)
|
||||
|
||||
monthly_transactions = transactions.get_transactions_by_month(
|
||||
start=date(2020, 1, 1), end=date(2020, 11, 30)
|
||||
start=date(2019, 1, 1), end=date(2020, 11, 30)
|
||||
)
|
||||
monthly_transactions_by_cat = []
|
||||
for month_transactions in monthly_transactions.values():
|
||||
@ -161,7 +72,7 @@ if __name__ == "__main__":
|
||||
*Categories.get_discretionary_expenses(),
|
||||
]
|
||||
|
||||
if False:
|
||||
if True:
|
||||
t = list(monthly_transactions.keys())
|
||||
income = [
|
||||
float(
|
||||
@ -200,7 +111,7 @@ if __name__ == "__main__":
|
||||
|
||||
plt.plot(t, income, label="Income")
|
||||
plt.stackplot(t, expenses, labels=expense_categories)
|
||||
plt.legend(loc="upper left")
|
||||
plt.legend(bbox_to_anchor=(1, 1), loc="upper left")
|
||||
plt.show()
|
||||
|
||||
income = [
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user