ledger: Only display accounts requested with --account.
Now that we're accepting classifications, it's possible to specify account options that select some but not all accounts at the same level of the hierarchy. This commit tracks requested account names separately from sheet names to do that correctly.
This commit is contained in:
parent
aaa26e9e61
commit
52e7f3a221
3 changed files with 120 additions and 42 deletions
|
@ -115,19 +115,54 @@ class LedgerODS(core.BaseODS[data.Posting, data.Account]):
|
|||
def __init__(self,
|
||||
start_date: datetime.date,
|
||||
stop_date: datetime.date,
|
||||
sheet_names: Optional[Sequence[str]]=None,
|
||||
accounts: Optional[Sequence[str]]=None,
|
||||
rt_wrapper: Optional[rtutil.RT]=None,
|
||||
sheet_size: Optional[int]=None,
|
||||
) -> None:
|
||||
if sheet_names is None:
|
||||
sheet_names = list(self.ACCOUNT_COLUMNS)
|
||||
if sheet_size is None:
|
||||
sheet_size = self.SHEET_SIZE
|
||||
super().__init__(rt_wrapper)
|
||||
self.date_range = ranges.DateRange(start_date, stop_date)
|
||||
self.required_sheet_names = sheet_names
|
||||
self.sheet_size = sheet_size
|
||||
|
||||
if accounts is None:
|
||||
self.accounts = set(data.Account.iter_accounts())
|
||||
self.required_sheet_names = list(self.ACCOUNT_COLUMNS)
|
||||
else:
|
||||
self.accounts = set()
|
||||
self.required_sheet_names = []
|
||||
for acct_spec in accounts:
|
||||
subaccounts = frozenset(data.Account.iter_accounts_by_hierarchy(acct_spec))
|
||||
if subaccounts:
|
||||
self.accounts.update(subaccounts)
|
||||
self._require_sheet(acct_spec)
|
||||
else:
|
||||
account_roots_map = collections.defaultdict(list)
|
||||
for account in data.Account.iter_accounts_by_classification(acct_spec):
|
||||
self.accounts.add(account)
|
||||
account_roots_map[account.root_part()].append(account)
|
||||
if not account_roots_map:
|
||||
raise ValueError("unknown account name or classification", acct_spec)
|
||||
for root_part, accounts in account_roots_map.items():
|
||||
start_count = min(account.count_parts() for account in accounts)
|
||||
for count in range(start_count, 1, -1):
|
||||
target = accounts[0].root_part(count)
|
||||
if all(acct.root_part(count) == target for acct in accounts):
|
||||
self._require_sheet(target)
|
||||
break
|
||||
else:
|
||||
self._require_sheet(root_part)
|
||||
|
||||
def _require_sheet(self, new_sheet: str) -> None:
|
||||
for index, sheet in enumerate(self.required_sheet_names):
|
||||
if new_sheet == sheet:
|
||||
break
|
||||
elif new_sheet.startswith(sheet):
|
||||
self.required_sheet_names.insert(index, new_sheet)
|
||||
break
|
||||
else:
|
||||
self.required_sheet_names.append(new_sheet)
|
||||
|
||||
def init_styles(self) -> None:
|
||||
super().init_styles()
|
||||
self.amount_column = self.column_style(1.2)
|
||||
|
@ -349,16 +384,17 @@ class LedgerODS(core.BaseODS[data.Posting, data.Account]):
|
|||
self.account_groups = dict(related_cls.group_by_account(
|
||||
post for post in rows if post.meta.date < self.date_range.stop
|
||||
))
|
||||
for empty_acct in self.accounts.difference(self.account_groups):
|
||||
self.account_groups[empty_acct] = related_cls()
|
||||
self.write_balance_sheet()
|
||||
tally_by_account_iter = (
|
||||
(account, len(related))
|
||||
for account, related in self.account_groups.items()
|
||||
(account, len(self.account_groups[account]))
|
||||
for account in self.accounts
|
||||
)
|
||||
tally_by_account = {
|
||||
# 3 for the rows generated by start_section+end_section
|
||||
account: count + 3
|
||||
for account, count in tally_by_account_iter
|
||||
if count or account.keeps_balance()
|
||||
}
|
||||
sheet_names = self.plan_sheets(
|
||||
tally_by_account, self.required_sheet_names, self.sheet_size,
|
||||
|
@ -373,7 +409,7 @@ class LedgerODS(core.BaseODS[data.Posting, data.Account]):
|
|||
postings = self.account_groups[account]
|
||||
if postings:
|
||||
super().write(postings)
|
||||
elif account.keeps_balance() and account.is_open_on_date(self.date_range.start):
|
||||
elif account.is_open_on_date(self.date_range.start):
|
||||
self.start_section(account)
|
||||
self.end_section(account)
|
||||
for index in range(using_sheet_index + 1, len(sheet_names)):
|
||||
|
@ -449,9 +485,9 @@ metadata to match. A single ticket number is a shortcut for
|
|||
'Income',
|
||||
'Expenses',
|
||||
'Assets:Receivable',
|
||||
'Liabilities:Payable',
|
||||
'Assets:Prepaid',
|
||||
'Liabilities:UnearnedIncome',
|
||||
'Liabilities:Payable',
|
||||
]
|
||||
else:
|
||||
args.accounts = list(LedgerODS.ACCOUNT_COLUMNS)
|
||||
|
@ -498,33 +534,26 @@ def main(arglist: Optional[Sequence[str]]=None,
|
|||
returncode |= ReturnFlag.LOAD_ERRORS
|
||||
|
||||
data.Account.load_from_books(entries, options)
|
||||
accounts: Set[data.Account] = set()
|
||||
sheet_names: Dict[str, None] = collections.OrderedDict()
|
||||
for acct_arg in args.accounts:
|
||||
for account in data.Account.iter_accounts(acct_arg):
|
||||
accounts.add(account)
|
||||
if not account.is_under(*sheet_names):
|
||||
new_sheet = account.is_under(*LedgerODS.ACCOUNT_COLUMNS)
|
||||
assert new_sheet is not None
|
||||
sheet_names[new_sheet] = None
|
||||
|
||||
postings = (post for post in data.Posting.from_entries(entries)
|
||||
if post.account in accounts)
|
||||
postings = data.Posting.from_entries(entries)
|
||||
for search_term in args.search_terms:
|
||||
postings = search_term.filter_postings(postings)
|
||||
|
||||
rt_wrapper = config.rt_wrapper()
|
||||
if rt_wrapper is None:
|
||||
logger.warning("could not initialize RT client; spreadsheet links will be broken")
|
||||
report = LedgerODS(
|
||||
args.start_date,
|
||||
args.stop_date,
|
||||
list(sheet_names),
|
||||
rt_wrapper,
|
||||
args.sheet_size,
|
||||
)
|
||||
try:
|
||||
report = LedgerODS(
|
||||
args.start_date,
|
||||
args.stop_date,
|
||||
args.accounts,
|
||||
rt_wrapper,
|
||||
args.sheet_size,
|
||||
)
|
||||
except ValueError as error:
|
||||
logger.error("%s: %r", *error.args)
|
||||
return 2
|
||||
report.write(postings)
|
||||
if not report.account_groups:
|
||||
if not any(report.account_groups.values()):
|
||||
logger.warning("no matching postings found to report")
|
||||
returncode |= ReturnFlag.NOTHING_TO_REPORT
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
2018-01-01 open Equity:OpeningBalance
|
||||
2018-01-01 open Assets:Checking
|
||||
classification: "Cash"
|
||||
2018-01-01 open Assets:PayPal
|
||||
classification: "Cash"
|
||||
2018-01-01 open Assets:Prepaid
|
||||
classification: "Prepaid expenses"
|
||||
2018-01-01 open Assets:Receivable:Accounts
|
||||
classification: "Accounts receivable"
|
||||
2018-01-01 open Expenses:Other
|
||||
|
@ -11,6 +15,8 @@
|
|||
classification: "Accounts payable"
|
||||
2018-01-01 open Liabilities:Payable:Accounts
|
||||
classification: "Accounts payable"
|
||||
2018-01-01 open Liabilities:UnearnedIncome
|
||||
classification: "Unearned income"
|
||||
|
||||
2018-02-28 * "Opening balance"
|
||||
Equity:OpeningBalance -10,000 USD
|
||||
|
|
|
@ -46,10 +46,15 @@ DEFAULT_REPORT_SHEETS = [
|
|||
'Equity',
|
||||
'Assets:Receivable',
|
||||
'Liabilities:Payable',
|
||||
'Assets:PayPal',
|
||||
'Assets',
|
||||
'Liabilities',
|
||||
]
|
||||
PROJECT_REPORT_SHEETS = DEFAULT_REPORT_SHEETS[:6]
|
||||
PROJECT_REPORT_SHEETS = DEFAULT_REPORT_SHEETS[:5] + [
|
||||
'Assets:Prepaid',
|
||||
'Liabilities:UnearnedIncome',
|
||||
'Liabilities:Payable',
|
||||
]
|
||||
del PROJECT_REPORT_SHEETS[3]
|
||||
OVERSIZE_RE = re.compile(
|
||||
r'^([A-Za-z0-9:]+) has ([0-9,]+) rows, over size ([0-9,]+)$'
|
||||
|
@ -107,7 +112,7 @@ class ExpectedPostings(core.RelatedPostings):
|
|||
else:
|
||||
return
|
||||
closing_bal = norm_func(expect_posts.balance_at_cost())
|
||||
if account.is_under('Assets', 'Equity', 'Liabilities'):
|
||||
if account.is_under('Assets', 'Liabilities'):
|
||||
opening_row = testutil.ODSCell.from_row(next(rows))
|
||||
assert opening_row[0].value == start_date
|
||||
assert opening_row[4].text == open_bal.format(None, empty='0', sep='\0')
|
||||
|
@ -125,7 +130,8 @@ class ExpectedPostings(core.RelatedPostings):
|
|||
assert next(cells).value == norm_func(expected.at_cost().number)
|
||||
closing_row = testutil.ODSCell.from_row(next(rows))
|
||||
assert closing_row[0].value == end_date
|
||||
assert closing_row[4].text == closing_bal.format(None, empty='$0.00', sep='\0')
|
||||
empty = '$0.00' if expect_posts else '0'
|
||||
assert closing_row[4].text == closing_bal.format(None, empty=empty, sep='\0')
|
||||
|
||||
|
||||
def get_sheet_names(ods):
|
||||
|
@ -238,22 +244,26 @@ def test_plan_sheets_full_split_required(caplog):
|
|||
(STOP_DATE, STOP_DATE.replace(month=12)),
|
||||
])
|
||||
def test_date_range_report(ledger_entries, start_date, stop_date):
|
||||
postings = list(data.Posting.from_entries(ledger_entries))
|
||||
report = ledger.LedgerODS(start_date, stop_date)
|
||||
report.write(iter(postings))
|
||||
postings = list(data.Posting.from_entries(iter(ledger_entries)))
|
||||
with clean_account_meta():
|
||||
data.Account.load_openings_and_closings(iter(ledger_entries))
|
||||
report = ledger.LedgerODS(start_date, stop_date)
|
||||
report.write(iter(postings))
|
||||
for _, expected in ExpectedPostings.group_by_account(postings):
|
||||
expected.check_report(report.document, start_date, stop_date)
|
||||
|
||||
@pytest.mark.parametrize('sheet_names', [
|
||||
@pytest.mark.parametrize('accounts', [
|
||||
('Income', 'Expenses'),
|
||||
('Assets:Receivable', 'Liabilities:Payable'),
|
||||
])
|
||||
def test_account_names_report(ledger_entries, sheet_names):
|
||||
postings = list(data.Posting.from_entries(ledger_entries))
|
||||
report = ledger.LedgerODS(START_DATE, STOP_DATE, sheet_names=sheet_names)
|
||||
report.write(iter(postings))
|
||||
def test_account_names_report(ledger_entries, accounts):
|
||||
postings = list(data.Posting.from_entries(iter(ledger_entries)))
|
||||
with clean_account_meta():
|
||||
data.Account.load_openings_and_closings(iter(ledger_entries))
|
||||
report = ledger.LedgerODS(START_DATE, STOP_DATE, accounts=accounts)
|
||||
report.write(iter(postings))
|
||||
for key, expected in ExpectedPostings.group_by_account(postings):
|
||||
should_find = key.startswith(sheet_names)
|
||||
should_find = key.startswith(accounts)
|
||||
try:
|
||||
expected.check_report(report.document, START_DATE, STOP_DATE)
|
||||
except NotFound:
|
||||
|
@ -280,6 +290,7 @@ def test_main(ledger_entries):
|
|||
'-b', START_DATE.isoformat(),
|
||||
'-e', STOP_DATE.isoformat(),
|
||||
])
|
||||
output.seek(0)
|
||||
assert not errors.getvalue()
|
||||
assert retcode == 0
|
||||
ods = odf.opendocument.load(output)
|
||||
|
@ -304,7 +315,10 @@ def test_main_account_limit(ledger_entries, acct_arg):
|
|||
assert get_sheet_names(ods) == ['Balance', 'Liabilities']
|
||||
postings = data.Posting.from_entries(ledger_entries)
|
||||
for account, expected in ExpectedPostings.group_by_account(postings):
|
||||
should_find = account.startswith('Liabilities')
|
||||
if account == 'Liabilities:UnearnedIncome':
|
||||
should_find = acct_arg == 'Liabilities'
|
||||
else:
|
||||
should_find = account.startswith('Liabilities')
|
||||
try:
|
||||
expected.check_report(ods, START_DATE, STOP_DATE)
|
||||
except NotFound:
|
||||
|
@ -312,6 +326,26 @@ def test_main_account_limit(ledger_entries, acct_arg):
|
|||
else:
|
||||
assert should_find
|
||||
|
||||
def test_main_account_classification_splits_hierarchy(ledger_entries):
|
||||
retcode, output, errors = run_main([
|
||||
'-a', 'Cash',
|
||||
'-b', START_DATE.isoformat(),
|
||||
'-e', STOP_DATE.isoformat(),
|
||||
])
|
||||
assert not errors.getvalue()
|
||||
assert retcode == 0
|
||||
ods = odf.opendocument.load(output)
|
||||
assert get_sheet_names(ods) == ['Balance', 'Assets']
|
||||
postings = data.Posting.from_entries(ledger_entries)
|
||||
for account, expected in ExpectedPostings.group_by_account(postings):
|
||||
should_find = (account == 'Assets:Checking' or account == 'Assets:PayPal')
|
||||
try:
|
||||
expected.check_report(ods, START_DATE, STOP_DATE)
|
||||
except NotFound:
|
||||
assert not should_find, f"{account} not found in report"
|
||||
else:
|
||||
assert should_find, f"{account} in report but should be excluded"
|
||||
|
||||
@pytest.mark.parametrize('project,start_date,stop_date', [
|
||||
('eighteen', START_DATE, MID_DATE.replace(day=30)),
|
||||
('nineteen', MID_DATE, STOP_DATE),
|
||||
|
@ -334,6 +368,15 @@ def test_main_project_report(ledger_entries, project, start_date, stop_date):
|
|||
for _, expected in ExpectedPostings.group_by_account(related):
|
||||
expected.check_report(ods, start_date, stop_date)
|
||||
|
||||
@pytest.mark.parametrize('arg', [
|
||||
'Assets:NoneSuchBank',
|
||||
'Funny money',
|
||||
])
|
||||
def test_main_invalid_account(caplog, arg):
|
||||
retcode, output, errors = run_main(['-a', arg])
|
||||
assert retcode == 2
|
||||
assert any(log.message.endswith(f': {arg!r}') for log in caplog.records)
|
||||
|
||||
def test_main_no_postings(caplog):
|
||||
retcode, output, errors = run_main(['NonexistentProject'])
|
||||
assert retcode == 24
|
||||
|
|
Loading…
Reference in a new issue