2020-05-04 20:36:59 +00:00
|
|
|
|
#!/usr/bin/env python3
|
2020-05-20 14:52:08 +00:00
|
|
|
|
"""accrual-report - Status reports for accruals
|
|
|
|
|
|
|
|
|
|
accrual-report checks accruals (postings under Assets:Receivable and
|
|
|
|
|
Liabilities:Payable) for errors and metadata consistency, and reports any
|
|
|
|
|
problems on stderr. Then it writes a report about the status of those
|
2020-06-03 20:54:22 +00:00
|
|
|
|
accruals.
|
2020-05-20 14:52:08 +00:00
|
|
|
|
|
2020-06-17 19:17:59 +00:00
|
|
|
|
If you run it with no arguments, it will generate an aging report in ODS format.
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
|
|
|
|
Otherwise, the typical way to run it is to pass an RT ticket number or
|
|
|
|
|
invoice link as an argument, to report about accruals that match those
|
|
|
|
|
criteria::
|
2020-05-20 14:52:08 +00:00
|
|
|
|
|
|
|
|
|
# Report all accruals associated with RT#1230:
|
|
|
|
|
accrual-report 1230
|
|
|
|
|
# Report all accruals with the invoice link rt:45/670.
|
|
|
|
|
accrual-report 45/670
|
|
|
|
|
# Report all accruals with the invoice link Invoice980.pdf.
|
|
|
|
|
accrual-report Invoice980.pdf
|
|
|
|
|
|
2020-06-17 19:22:42 +00:00
|
|
|
|
By default, to stay fast, accrual-report only looks at unaudited books. You
|
|
|
|
|
can search further back in history by passing the ``--since`` argument. The
|
|
|
|
|
argument can be a fiscal year, or a negative number of how many years back
|
|
|
|
|
to search::
|
2020-05-20 14:52:08 +00:00
|
|
|
|
|
|
|
|
|
# Search for accruals since 2016
|
|
|
|
|
accrual-report --since 2016 [search terms …]
|
|
|
|
|
# Search for accruals from the beginning of three fiscal years ago
|
|
|
|
|
accrual-report --since -3 [search terms …]
|
|
|
|
|
|
|
|
|
|
If you want to further limit what accruals are reported, you can match on
|
|
|
|
|
other metadata by passing additional arguments in ``name=value`` format.
|
|
|
|
|
You can pass any number of search terms. For example::
|
|
|
|
|
|
|
|
|
|
# Report accruals associated with RT#1230 and Jane Doe
|
|
|
|
|
accrual-report 1230 entity=Doe-Jane
|
|
|
|
|
|
2020-06-03 20:54:22 +00:00
|
|
|
|
accrual-report will automatically decide what kind of report to generate
|
2020-06-17 19:17:59 +00:00
|
|
|
|
from the search terms you provide and the results they return. If you
|
|
|
|
|
searched on an RT ticket or invoice that returned a single outstanding
|
|
|
|
|
payable, it writes an outgoing approval report. If you searched on RT ticket
|
|
|
|
|
or invoice that returned other results, it writes a balance
|
|
|
|
|
report. Otherwise, it writes an aging report. You can specify what report
|
2020-06-03 20:54:22 +00:00
|
|
|
|
type you want with the ``--report-type`` option::
|
2020-05-20 14:52:08 +00:00
|
|
|
|
|
2020-06-17 19:17:59 +00:00
|
|
|
|
# Write an outgoing approval report for all outstanding payables for
|
2020-05-20 14:52:08 +00:00
|
|
|
|
# Jane Doe, even if there's more than one
|
|
|
|
|
accrual-report --report-type outgoing entity=Doe-Jane
|
2020-06-17 19:17:59 +00:00
|
|
|
|
# Write an aging report for a single RT invoice (this can be helpful when
|
|
|
|
|
# one invoice covers multiple parties)
|
|
|
|
|
accrual-report --report-type aging 12/345
|
2020-05-20 14:52:08 +00:00
|
|
|
|
"""
|
2020-05-04 20:36:59 +00:00
|
|
|
|
# Copyright © 2020 Brett Smith
|
|
|
|
|
#
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
|
import argparse
|
2020-05-23 14:13:02 +00:00
|
|
|
|
import collections
|
2020-05-04 20:36:59 +00:00
|
|
|
|
import datetime
|
|
|
|
|
import enum
|
2020-05-28 20:41:55 +00:00
|
|
|
|
import logging
|
2020-06-23 18:27:11 +00:00
|
|
|
|
import re
|
2020-05-04 20:36:59 +00:00
|
|
|
|
import sys
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
|
|
|
|
from pathlib import Path
|
2020-05-04 20:36:59 +00:00
|
|
|
|
|
|
|
|
|
from typing import (
|
2020-06-03 20:54:22 +00:00
|
|
|
|
cast,
|
2020-05-30 14:35:29 +00:00
|
|
|
|
Any,
|
2020-06-03 20:54:22 +00:00
|
|
|
|
BinaryIO,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
Callable,
|
2020-06-30 21:28:08 +00:00
|
|
|
|
Deque,
|
|
|
|
|
Dict,
|
|
|
|
|
Hashable,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
Iterable,
|
|
|
|
|
Iterator,
|
2020-05-30 21:31:21 +00:00
|
|
|
|
List,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
Mapping,
|
|
|
|
|
NamedTuple,
|
|
|
|
|
Optional,
|
|
|
|
|
Sequence,
|
|
|
|
|
Set,
|
|
|
|
|
TextIO,
|
|
|
|
|
Tuple,
|
2020-06-05 13:10:48 +00:00
|
|
|
|
TypeVar,
|
2020-05-30 21:31:21 +00:00
|
|
|
|
Union,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
)
|
|
|
|
|
from ..beancount_types import (
|
2020-06-03 20:54:22 +00:00
|
|
|
|
Entries,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
Error,
|
2020-06-03 20:54:22 +00:00
|
|
|
|
Errors,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
MetaKey,
|
|
|
|
|
MetaValue,
|
|
|
|
|
Transaction,
|
|
|
|
|
)
|
|
|
|
|
|
2020-06-03 20:54:22 +00:00
|
|
|
|
import odf.style # type:ignore[import]
|
|
|
|
|
import odf.table # type:ignore[import]
|
2020-05-04 20:36:59 +00:00
|
|
|
|
import rt
|
|
|
|
|
|
|
|
|
|
from beancount.parser import printer as bc_printer
|
|
|
|
|
|
|
|
|
|
from . import core
|
2020-06-07 13:04:53 +00:00
|
|
|
|
from .. import books
|
2020-05-30 02:02:47 +00:00
|
|
|
|
from .. import cliutil
|
2020-05-04 20:36:59 +00:00
|
|
|
|
from .. import config as configmod
|
|
|
|
|
from .. import data
|
|
|
|
|
from .. import filters
|
|
|
|
|
from .. import rtutil
|
|
|
|
|
|
2020-05-30 02:02:47 +00:00
|
|
|
|
PROGNAME = 'accrual-report'
|
|
|
|
|
|
2020-06-30 21:28:08 +00:00
|
|
|
|
PostGroups = Mapping[Optional[Hashable], 'AccrualPostings']
|
2020-06-06 15:30:44 +00:00
|
|
|
|
T = TypeVar('T')
|
2020-05-04 20:36:59 +00:00
|
|
|
|
|
2020-05-30 02:02:47 +00:00
|
|
|
|
logger = logging.getLogger('conservancy_beancount.reports.accrual')
|
2020-05-28 20:41:55 +00:00
|
|
|
|
|
2020-05-23 14:13:17 +00:00
|
|
|
|
class Account(NamedTuple):
|
|
|
|
|
name: str
|
2020-06-03 20:54:22 +00:00
|
|
|
|
aging_thresholds: Sequence[int]
|
2020-05-23 14:13:17 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class AccrualAccount(enum.Enum):
|
2020-06-03 20:54:22 +00:00
|
|
|
|
# Note the aging report uses the same order accounts are defined here.
|
|
|
|
|
# See AgingODS.start_spreadsheet().
|
2020-06-06 15:30:44 +00:00
|
|
|
|
RECEIVABLE = Account('Assets:Receivable', [365, 120, 90, 60])
|
|
|
|
|
PAYABLE = Account('Liabilities:Payable', [365, 90, 60, 30])
|
2020-05-23 14:13:17 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def account_names(cls) -> Iterator[str]:
|
|
|
|
|
return (acct.value.name for acct in cls)
|
|
|
|
|
|
2020-06-03 20:54:22 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def by_account(cls, name: data.Account) -> 'AccrualAccount':
|
|
|
|
|
for account in cls:
|
|
|
|
|
if name.is_under(account.value.name):
|
|
|
|
|
return account
|
|
|
|
|
raise ValueError(f"unrecognized account {name!r}")
|
|
|
|
|
|
2020-05-23 14:13:17 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def classify(cls, related: core.RelatedPostings) -> 'AccrualAccount':
|
|
|
|
|
for account in cls:
|
|
|
|
|
account_name = account.value.name
|
|
|
|
|
if all(post.account.is_under(account_name) for post in related):
|
|
|
|
|
return account
|
|
|
|
|
raise ValueError("unrecognized account set in related postings")
|
|
|
|
|
|
2020-06-06 15:30:44 +00:00
|
|
|
|
@property
|
|
|
|
|
def normalize_amount(self) -> Callable[[T], T]:
|
|
|
|
|
return core.normalize_amount_func(self.value.name)
|
|
|
|
|
|
2020-05-23 14:13:17 +00:00
|
|
|
|
|
2020-05-30 21:31:21 +00:00
|
|
|
|
class AccrualPostings(core.RelatedPostings):
|
2020-07-01 14:54:58 +00:00
|
|
|
|
__slots__ = ()
|
2020-06-10 18:01:12 +00:00
|
|
|
|
|
2020-06-30 21:28:08 +00:00
|
|
|
|
@classmethod
|
|
|
|
|
def make_consistent(cls,
|
|
|
|
|
postings: Iterable[data.Posting],
|
|
|
|
|
) -> Iterator[Tuple[Hashable, 'AccrualPostings']]:
|
|
|
|
|
accruals: Dict[Tuple[str, ...], List[data.Posting]] = collections.defaultdict(list)
|
|
|
|
|
payments: Dict[Tuple[str, ...], Deque[data.Posting]] = collections.defaultdict(Deque)
|
|
|
|
|
key: Tuple[str, ...]
|
|
|
|
|
for post in postings:
|
|
|
|
|
norm_func = core.normalize_amount_func(post.account)
|
|
|
|
|
invoice = str(post.meta.get('invoice', 'BlankInvoice'))
|
|
|
|
|
if norm_func(post.units.number) >= 0:
|
|
|
|
|
entity = str(post.meta.get('entity', 'BlankEntity'))
|
|
|
|
|
key = (post.meta.date.isoformat(), entity, invoice, post.account)
|
|
|
|
|
accruals[key].append(post)
|
|
|
|
|
else:
|
|
|
|
|
key = (invoice, post.account)
|
|
|
|
|
payments[key].append(post)
|
|
|
|
|
|
|
|
|
|
for key, acc_posts in accruals.items():
|
|
|
|
|
pay_posts = payments[key[2:]]
|
|
|
|
|
if not pay_posts:
|
|
|
|
|
continue
|
|
|
|
|
norm_func = core.normalize_amount_func(key[-1])
|
|
|
|
|
balance = norm_func(core.MutableBalance(post.at_cost() for post in acc_posts))
|
|
|
|
|
while pay_posts and not balance.le_zero():
|
|
|
|
|
pay_post = pay_posts.popleft()
|
|
|
|
|
acc_posts.append(pay_post)
|
|
|
|
|
balance += norm_func(pay_post.at_cost())
|
|
|
|
|
if balance.le_zero() and not balance.is_zero():
|
|
|
|
|
# pay_post causes the accrual to be overpaid. Split it into two
|
|
|
|
|
# synthesized postings: one that causes the accrual to be
|
|
|
|
|
# exactly zero, and one with the remainder back in payments.
|
|
|
|
|
post_cost = pay_post.at_cost()
|
|
|
|
|
# Calling norm_func() reverses the call in the while loop to add
|
|
|
|
|
# the amount to the balance.
|
|
|
|
|
overpayment = norm_func(balance[post_cost.currency])
|
|
|
|
|
amt_to_zero = post_cost._replace(number=post_cost.number - overpayment.number)
|
|
|
|
|
acc_posts[-1] = pay_post._replace(units=amt_to_zero, cost=None, price=None)
|
|
|
|
|
pay_posts.appendleft(pay_post._replace(units=overpayment, cost=None, price=None))
|
|
|
|
|
acc_posts.sort(key=lambda post: post.meta.date)
|
|
|
|
|
|
|
|
|
|
for key, acc_posts in accruals.items():
|
|
|
|
|
yield key, cls(acc_posts, _can_own=True)
|
|
|
|
|
for key, pay_posts in payments.items():
|
|
|
|
|
if pay_posts:
|
|
|
|
|
yield key, cls(pay_posts, _can_own=True)
|
2020-06-02 14:45:22 +00:00
|
|
|
|
|
2020-07-01 14:54:58 +00:00
|
|
|
|
def is_paid(self) -> Optional[bool]:
|
2020-05-30 21:31:21 +00:00
|
|
|
|
try:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
accrual_type = AccrualAccount.classify(self)
|
|
|
|
|
except ValueError:
|
|
|
|
|
return None
|
|
|
|
|
else:
|
|
|
|
|
return accrual_type.normalize_amount(self.balance()).le_zero()
|
2020-06-11 18:22:11 +00:00
|
|
|
|
|
2020-06-02 18:11:01 +00:00
|
|
|
|
|
|
|
|
|
class BaseReport:
|
|
|
|
|
def __init__(self, out_file: TextIO) -> None:
|
|
|
|
|
self.out_file = out_file
|
|
|
|
|
self.logger = logger.getChild(type(self).__name__)
|
2020-05-28 19:49:43 +00:00
|
|
|
|
|
2020-06-05 14:54:35 +00:00
|
|
|
|
def _report(self, posts: AccrualPostings, index: int) -> Iterable[str]:
|
2020-05-28 19:49:43 +00:00
|
|
|
|
raise NotImplementedError("BaseReport._report")
|
|
|
|
|
|
|
|
|
|
def run(self, groups: PostGroups) -> None:
|
|
|
|
|
for index, invoice in enumerate(groups):
|
2020-06-05 14:54:35 +00:00
|
|
|
|
for line in self._report(groups[invoice], index):
|
2020-05-28 19:49:43 +00:00
|
|
|
|
print(line, file=self.out_file)
|
|
|
|
|
|
|
|
|
|
|
2020-07-01 14:54:58 +00:00
|
|
|
|
class AgingODS(core.BaseODS[AccrualPostings, data.Account]):
|
2020-06-14 10:21:30 +00:00
|
|
|
|
DOC_COLUMNS = [
|
|
|
|
|
'rt-id',
|
|
|
|
|
'invoice',
|
|
|
|
|
'approval',
|
|
|
|
|
'contract',
|
|
|
|
|
'purchase-order',
|
|
|
|
|
]
|
2020-06-03 20:54:22 +00:00
|
|
|
|
COLUMNS = [
|
|
|
|
|
'Date',
|
2020-06-14 10:21:30 +00:00
|
|
|
|
data.Metadata.human_name('entity'),
|
2020-06-03 20:54:22 +00:00
|
|
|
|
'Invoice Amount',
|
|
|
|
|
'Booked Amount',
|
2020-06-14 10:21:30 +00:00
|
|
|
|
data.Metadata.human_name('project'),
|
|
|
|
|
*(data.Metadata.human_name(key) for key in DOC_COLUMNS),
|
2020-06-03 20:54:22 +00:00
|
|
|
|
]
|
|
|
|
|
COL_COUNT = len(COLUMNS)
|
|
|
|
|
|
|
|
|
|
def __init__(self,
|
2020-06-11 20:29:18 +00:00
|
|
|
|
rt_wrapper: rtutil.RT,
|
2020-06-03 20:54:22 +00:00
|
|
|
|
date: datetime.date,
|
|
|
|
|
logger: logging.Logger,
|
|
|
|
|
) -> None:
|
2020-06-12 21:10:25 +00:00
|
|
|
|
super().__init__(rt_wrapper)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
self.date = date
|
|
|
|
|
self.logger = logger
|
|
|
|
|
|
2020-07-01 14:54:58 +00:00
|
|
|
|
def section_key(self, row: AccrualPostings) -> data.Account:
|
|
|
|
|
return row[0].account
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
|
|
|
|
def start_spreadsheet(self) -> None:
|
|
|
|
|
for accrual_type in AccrualAccount:
|
|
|
|
|
self.use_sheet(accrual_type.name.title())
|
|
|
|
|
for index in range(self.COL_COUNT):
|
2020-06-17 02:41:13 +00:00
|
|
|
|
if index == 0:
|
|
|
|
|
style: Union[str, odf.style.Style] = ''
|
|
|
|
|
elif index < 6:
|
|
|
|
|
style = self.column_style(1.2)
|
|
|
|
|
else:
|
|
|
|
|
style = self.column_style(1.5)
|
|
|
|
|
self.sheet.addElement(odf.table.TableColumn(stylename=style))
|
2020-06-03 20:54:22 +00:00
|
|
|
|
self.add_row(*(
|
|
|
|
|
self.string_cell(name, stylename=self.style_bold)
|
|
|
|
|
for name in self.COLUMNS
|
|
|
|
|
))
|
|
|
|
|
self.lock_first_row()
|
|
|
|
|
|
2020-07-01 14:54:58 +00:00
|
|
|
|
def start_section(self, key: data.Account) -> None:
|
|
|
|
|
self.norm_func = core.normalize_amount_func(key)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
self.age_thresholds = list(AccrualAccount.by_account(key).value.aging_thresholds)
|
|
|
|
|
self.age_balances = [core.MutableBalance() for _ in self.age_thresholds]
|
|
|
|
|
accrual_date = self.date - datetime.timedelta(days=self.age_thresholds[-1])
|
2020-06-06 20:38:53 +00:00
|
|
|
|
acct_parts = key.slice_parts()
|
2020-06-03 20:54:22 +00:00
|
|
|
|
self.use_sheet(acct_parts[1])
|
|
|
|
|
self.add_row()
|
|
|
|
|
self.add_row(self.string_cell(
|
|
|
|
|
f"{' '.join(acct_parts[2:])} {acct_parts[1]} Aging Report"
|
|
|
|
|
f" Accrued by {accrual_date.isoformat()} Unpaid by {self.date.isoformat()}",
|
|
|
|
|
stylename=self.merge_styles(self.style_bold, self.style_centertext),
|
|
|
|
|
numbercolumnsspanned=self.COL_COUNT,
|
|
|
|
|
))
|
|
|
|
|
self.add_row()
|
|
|
|
|
|
2020-07-01 14:54:58 +00:00
|
|
|
|
def end_section(self, key: data.Account) -> None:
|
2020-06-09 19:04:41 +00:00
|
|
|
|
total_balance = core.MutableBalance()
|
2020-06-03 20:54:22 +00:00
|
|
|
|
text_style = self.merge_styles(self.style_bold, self.style_endtext)
|
2020-06-09 19:56:59 +00:00
|
|
|
|
text_span = 4
|
2020-06-09 19:04:41 +00:00
|
|
|
|
last_age_text: Optional[str] = None
|
|
|
|
|
self.add_row()
|
2020-06-03 20:54:22 +00:00
|
|
|
|
for threshold, balance in zip(self.age_thresholds, self.age_balances):
|
|
|
|
|
years, days = divmod(threshold, 365)
|
|
|
|
|
years_text = f"{years} {'Year' if years == 1 else 'Years'}"
|
|
|
|
|
days_text = f"{days} Days"
|
|
|
|
|
if years and days:
|
|
|
|
|
age_text = f"{years_text} {days_text}"
|
|
|
|
|
elif years:
|
|
|
|
|
age_text = years_text
|
|
|
|
|
else:
|
|
|
|
|
age_text = days_text
|
2020-06-09 19:04:41 +00:00
|
|
|
|
if last_age_text is None:
|
|
|
|
|
age_range = f"Over {age_text}"
|
|
|
|
|
else:
|
|
|
|
|
age_range = f"{age_text}–{last_age_text}"
|
2020-06-03 20:54:22 +00:00
|
|
|
|
self.add_row(
|
|
|
|
|
self.string_cell(
|
2020-06-09 19:04:41 +00:00
|
|
|
|
f"Total Aged {age_range}: ",
|
2020-06-03 20:54:22 +00:00
|
|
|
|
stylename=text_style,
|
|
|
|
|
numbercolumnsspanned=text_span,
|
|
|
|
|
),
|
|
|
|
|
*(odf.table.TableCell() for _ in range(1, text_span)),
|
|
|
|
|
self.balance_cell(balance),
|
|
|
|
|
)
|
2020-06-09 19:04:41 +00:00
|
|
|
|
last_age_text = age_text
|
|
|
|
|
total_balance += balance
|
|
|
|
|
self.add_row(
|
|
|
|
|
self.string_cell(
|
|
|
|
|
"Total Unpaid: ",
|
|
|
|
|
stylename=text_style,
|
|
|
|
|
numbercolumnsspanned=text_span,
|
|
|
|
|
),
|
|
|
|
|
*(odf.table.TableCell() for _ in range(1, text_span)),
|
|
|
|
|
self.balance_cell(total_balance),
|
|
|
|
|
)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
|
|
|
|
def write_row(self, row: AccrualPostings) -> None:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
row_date = row[0].meta.date
|
|
|
|
|
row_balance = self.norm_func(row.balance_at_cost())
|
|
|
|
|
age = (self.date - row_date).days
|
|
|
|
|
if row_balance.ge_zero():
|
2020-06-03 20:54:22 +00:00
|
|
|
|
for index, threshold in enumerate(self.age_thresholds):
|
|
|
|
|
if age >= threshold:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
self.age_balances[index] += row_balance
|
2020-06-03 20:54:22 +00:00
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
return
|
2020-07-01 14:54:58 +00:00
|
|
|
|
raw_balance = self.norm_func(row.balance())
|
|
|
|
|
if raw_balance == row_balance:
|
2020-06-03 20:54:22 +00:00
|
|
|
|
amount_cell = odf.table.TableCell()
|
|
|
|
|
else:
|
|
|
|
|
amount_cell = self.balance_cell(raw_balance)
|
2020-07-01 14:54:58 +00:00
|
|
|
|
entities = row.meta_values('entity')
|
|
|
|
|
entities.discard(None)
|
|
|
|
|
projects = row.meta_values('project')
|
2020-06-09 19:56:59 +00:00
|
|
|
|
projects.discard(None)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
self.add_row(
|
2020-07-01 14:54:58 +00:00
|
|
|
|
self.date_cell(row_date),
|
|
|
|
|
self.multiline_cell(sorted(entities)),
|
2020-06-03 20:54:22 +00:00
|
|
|
|
amount_cell,
|
2020-07-01 14:54:58 +00:00
|
|
|
|
self.balance_cell(row_balance),
|
2020-06-09 19:56:59 +00:00
|
|
|
|
self.multiline_cell(sorted(projects)),
|
2020-06-14 10:21:30 +00:00
|
|
|
|
*(self.meta_links_cell(row.all_meta_links(key))
|
|
|
|
|
for key in self.DOC_COLUMNS),
|
2020-06-03 20:54:22 +00:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class AgingReport(BaseReport):
|
|
|
|
|
def __init__(self,
|
2020-06-11 20:29:18 +00:00
|
|
|
|
rt_wrapper: rtutil.RT,
|
2020-06-03 20:54:22 +00:00
|
|
|
|
out_file: BinaryIO,
|
|
|
|
|
date: Optional[datetime.date]=None,
|
|
|
|
|
) -> None:
|
|
|
|
|
if date is None:
|
|
|
|
|
date = datetime.date.today()
|
|
|
|
|
self.out_bin = out_file
|
|
|
|
|
self.logger = logger.getChild(type(self).__name__)
|
2020-06-11 20:29:18 +00:00
|
|
|
|
self.ods = AgingODS(rt_wrapper, date, self.logger)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
|
|
|
|
def run(self, groups: PostGroups) -> None:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
rows = [group for group in groups.values()
|
|
|
|
|
if not group.balance_at_cost().is_zero()]
|
|
|
|
|
rows.sort(key=lambda group: (
|
|
|
|
|
group[0].account,
|
|
|
|
|
group[0].meta.date,
|
|
|
|
|
abs(sum(amt.number for amt in group.balance_at_cost().values())),
|
2020-06-03 20:54:22 +00:00
|
|
|
|
))
|
|
|
|
|
self.ods.write(rows)
|
|
|
|
|
self.ods.save_file(self.out_bin)
|
|
|
|
|
|
|
|
|
|
|
2020-05-28 19:49:43 +00:00
|
|
|
|
class BalanceReport(BaseReport):
|
2020-06-05 14:54:35 +00:00
|
|
|
|
def _report(self, posts: AccrualPostings, index: int) -> Iterable[str]:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
meta = posts[0].meta
|
|
|
|
|
date_s = meta.date.strftime('%Y-%m-%d')
|
|
|
|
|
entity_s = meta.get('entity', '<no entity>')
|
|
|
|
|
invoice_s = meta.get('invoice', '<no invoice>')
|
2020-06-18 18:07:44 +00:00
|
|
|
|
balance_s = posts.balance_at_cost().format(zero="Zero balance")
|
2020-05-28 19:49:43 +00:00
|
|
|
|
if index:
|
|
|
|
|
yield ""
|
2020-07-01 14:54:58 +00:00
|
|
|
|
yield f"{entity_s} {invoice_s}:"
|
2020-06-18 18:07:44 +00:00
|
|
|
|
yield f" {balance_s} outstanding since {date_s}"
|
2020-05-28 19:49:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class OutgoingReport(BaseReport):
|
2020-06-23 18:27:11 +00:00
|
|
|
|
PAYMENT_METHODS = {
|
|
|
|
|
'ach': 'ACH',
|
|
|
|
|
'check': 'Check',
|
|
|
|
|
'creditcard': 'Credit Card',
|
|
|
|
|
'debitcard': 'Debit Card',
|
|
|
|
|
'fxwire': 'International Wire',
|
|
|
|
|
'paypal': 'PayPal',
|
|
|
|
|
'uswire': 'Domestic Wire',
|
|
|
|
|
'vendorportal': 'Vendor Portal',
|
|
|
|
|
}
|
2020-06-23 20:46:31 +00:00
|
|
|
|
PAYMENT_METHOD_RE = re.compile(rf'^([a-z]{{3}})\s+({"|".join(PAYMENT_METHODS)})$')
|
2020-06-23 18:27:11 +00:00
|
|
|
|
|
2020-06-11 20:29:18 +00:00
|
|
|
|
def __init__(self, rt_wrapper: rtutil.RT, out_file: TextIO) -> None:
|
2020-05-28 20:41:55 +00:00
|
|
|
|
super().__init__(out_file)
|
2020-06-11 20:29:18 +00:00
|
|
|
|
self.rt_wrapper = rt_wrapper
|
|
|
|
|
self.rt_client = rt_wrapper.rt
|
2020-05-28 19:49:43 +00:00
|
|
|
|
|
2020-05-30 21:31:21 +00:00
|
|
|
|
def _primary_rt_id(self, posts: AccrualPostings) -> rtutil.TicketAttachmentIds:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
rt_ids = posts.first_meta_links('rt-id')
|
|
|
|
|
rt_id = next(rt_ids, None)
|
|
|
|
|
rt_id2 = next(rt_ids, None)
|
2020-06-11 19:27:36 +00:00
|
|
|
|
if rt_id is None:
|
|
|
|
|
raise ValueError("no rt-id links found")
|
2020-07-01 14:54:58 +00:00
|
|
|
|
elif rt_id2 is not None:
|
2020-06-11 19:27:36 +00:00
|
|
|
|
raise ValueError("multiple rt-id links found")
|
|
|
|
|
parsed = rtutil.RT.parse(rt_id)
|
2020-05-28 19:49:43 +00:00
|
|
|
|
if parsed is None:
|
|
|
|
|
raise ValueError("rt-id is not a valid RT reference")
|
|
|
|
|
else:
|
|
|
|
|
return parsed
|
|
|
|
|
|
2020-06-05 14:54:35 +00:00
|
|
|
|
def _report(self, posts: AccrualPostings, index: int) -> Iterable[str]:
|
2020-05-28 19:49:43 +00:00
|
|
|
|
try:
|
|
|
|
|
ticket_id, _ = self._primary_rt_id(posts)
|
|
|
|
|
ticket = self.rt_client.get_ticket(ticket_id)
|
|
|
|
|
# Note we only use this when ticket is None.
|
|
|
|
|
errmsg = f"ticket {ticket_id} not found"
|
|
|
|
|
except (ValueError, rt.RtError) as error:
|
|
|
|
|
ticket = None
|
|
|
|
|
errmsg = error.args[0]
|
|
|
|
|
if ticket is None:
|
2020-07-01 14:54:58 +00:00
|
|
|
|
meta = posts[0].meta
|
2020-05-28 20:41:55 +00:00
|
|
|
|
self.logger.error(
|
2020-07-01 14:54:58 +00:00
|
|
|
|
"can't generate outgoings report for %s %s %s because no RT ticket available: %s",
|
|
|
|
|
meta.date.isoformat(),
|
|
|
|
|
meta.get('entity', '<no entity>'),
|
|
|
|
|
meta.get('invoice', '<no invoice>'),
|
|
|
|
|
errmsg,
|
2020-05-28 20:41:55 +00:00
|
|
|
|
)
|
2020-05-28 19:49:43 +00:00
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
rt_requestor = self.rt_client.get_user(ticket['Requestors'][0])
|
|
|
|
|
except (IndexError, rt.RtError):
|
|
|
|
|
rt_requestor = None
|
|
|
|
|
if rt_requestor is None:
|
|
|
|
|
requestor = ''
|
|
|
|
|
requestor_name = ''
|
|
|
|
|
else:
|
|
|
|
|
requestor_name = (
|
|
|
|
|
rt_requestor.get('RealName')
|
|
|
|
|
or ticket.get('CF.{payment-to}')
|
|
|
|
|
or ''
|
|
|
|
|
)
|
|
|
|
|
requestor = f'{requestor_name} <{rt_requestor["EmailAddress"]}>'.strip()
|
|
|
|
|
|
2020-07-01 14:54:58 +00:00
|
|
|
|
balance = -posts.balance_at_cost()
|
|
|
|
|
balance_s = balance.format(None)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
raw_balance = -posts.balance()
|
2020-06-23 18:27:11 +00:00
|
|
|
|
payment_amount = raw_balance.format('¤¤ #,##0.00')
|
2020-07-01 14:54:58 +00:00
|
|
|
|
if raw_balance != balance:
|
2020-06-23 18:27:11 +00:00
|
|
|
|
payment_amount += f' ({balance_s})'
|
2020-06-03 20:54:22 +00:00
|
|
|
|
balance_s = f'{raw_balance} ({balance_s})'
|
2020-05-28 19:49:43 +00:00
|
|
|
|
|
2020-06-23 18:27:11 +00:00
|
|
|
|
payment_to = ticket.get('CF.{payment-to}') or requestor_name
|
2020-06-11 13:28:11 +00:00
|
|
|
|
contract_links = list(posts.all_meta_links('contract'))
|
2020-05-28 19:49:43 +00:00
|
|
|
|
if contract_links:
|
|
|
|
|
contract_s = ' , '.join(self.rt_wrapper.iter_urls(
|
|
|
|
|
contract_links, missing_fmt='<BROKEN RT LINK: {}>',
|
|
|
|
|
))
|
|
|
|
|
else:
|
|
|
|
|
contract_s = "NO CONTRACT GOVERNS THIS TRANSACTION"
|
|
|
|
|
projects = [v for v in posts.meta_values('project')
|
|
|
|
|
if isinstance(v, str)]
|
|
|
|
|
|
|
|
|
|
yield "PAYMENT FOR APPROVAL:"
|
|
|
|
|
yield f"REQUESTOR: {requestor}"
|
2020-06-23 18:27:11 +00:00
|
|
|
|
yield f"PAYMENT TO: {payment_to}"
|
2020-05-28 19:49:43 +00:00
|
|
|
|
yield f"TOTAL TO PAY: {balance_s}"
|
|
|
|
|
yield f"AGREEMENT: {contract_s}"
|
|
|
|
|
yield f"PROJECT: {', '.join(projects)}"
|
|
|
|
|
yield "\nBEANCOUNT ENTRIES:\n"
|
|
|
|
|
|
|
|
|
|
last_txn: Optional[Transaction] = None
|
|
|
|
|
for post in posts:
|
|
|
|
|
txn = post.meta.txn
|
|
|
|
|
if txn is not last_txn:
|
|
|
|
|
last_txn = txn
|
|
|
|
|
txn = self.rt_wrapper.txn_with_urls(txn, '{}')
|
2020-06-23 18:27:11 +00:00
|
|
|
|
# Suppress payment-method metadata from the report.
|
|
|
|
|
txn.meta.pop('payment-method', None)
|
|
|
|
|
for txn_post in txn.postings:
|
|
|
|
|
if txn_post.meta:
|
|
|
|
|
txn_post.meta.pop('payment-method', None)
|
2020-05-28 19:49:43 +00:00
|
|
|
|
yield bc_printer.format_entry(txn)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
|
2020-06-23 18:27:11 +00:00
|
|
|
|
cf_targets = {
|
|
|
|
|
'payment-amount': payment_amount,
|
|
|
|
|
'payment-to': payment_to,
|
|
|
|
|
}
|
2020-06-23 20:46:31 +00:00
|
|
|
|
payment_methods = filters.iter_unique(
|
|
|
|
|
post.meta['payment-method'].lower()
|
|
|
|
|
for post in posts
|
|
|
|
|
if isinstance(post.meta.get('payment-method'), str)
|
|
|
|
|
)
|
|
|
|
|
payment_method: Optional[str] = next(payment_methods, None)
|
|
|
|
|
if payment_method is None:
|
|
|
|
|
payment_method_count = "no"
|
|
|
|
|
elif next(payment_methods, None) is None:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
payment_method_count = "multiple"
|
|
|
|
|
payment_method = None
|
|
|
|
|
if payment_method is None:
|
2020-06-23 18:27:11 +00:00
|
|
|
|
self.logger.warning(
|
|
|
|
|
"cannot set payment-method for rt:%s: %s metadata values found",
|
|
|
|
|
ticket_id, payment_method_count,
|
|
|
|
|
)
|
|
|
|
|
else:
|
2020-06-23 20:46:31 +00:00
|
|
|
|
match = self.PAYMENT_METHOD_RE.fullmatch(payment_method)
|
2020-06-23 18:27:11 +00:00
|
|
|
|
if match is None:
|
|
|
|
|
self.logger.warning(
|
|
|
|
|
"cannot set payment-method for rt:%s: invalid value %r",
|
|
|
|
|
ticket_id, payment_method,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
cf_targets['payment-method'] = '{} {}'.format(
|
|
|
|
|
match.group(1).upper(),
|
2020-06-23 20:46:31 +00:00
|
|
|
|
self.PAYMENT_METHODS[match.group(2)],
|
2020-06-23 18:27:11 +00:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
cf_updates = {
|
|
|
|
|
f'CF_{key}': value
|
|
|
|
|
for key, value in cf_targets.items()
|
|
|
|
|
if ticket.get(f'CF.{{{key}}}') != value
|
|
|
|
|
}
|
|
|
|
|
if cf_updates:
|
|
|
|
|
try:
|
|
|
|
|
ok = self.rt_client.edit_ticket(ticket_id, **cf_updates)
|
|
|
|
|
except rt.RtError:
|
|
|
|
|
self.logger.debug("RT exception on edit_ticket", exc_info=True)
|
|
|
|
|
ok = False
|
|
|
|
|
if not ok:
|
|
|
|
|
self.logger.warning("failed to set custom fields for rt:%s", ticket_id)
|
|
|
|
|
|
2020-05-28 19:49:43 +00:00
|
|
|
|
|
|
|
|
|
class ReportType(enum.Enum):
|
2020-06-03 20:54:22 +00:00
|
|
|
|
AGING = AgingReport
|
2020-05-28 19:49:43 +00:00
|
|
|
|
BALANCE = BalanceReport
|
|
|
|
|
OUTGOING = OutgoingReport
|
2020-06-03 20:54:22 +00:00
|
|
|
|
AGE = AGING
|
2020-05-28 19:49:43 +00:00
|
|
|
|
BAL = BALANCE
|
|
|
|
|
OUT = OUTGOING
|
|
|
|
|
OUTGOINGS = OUTGOING
|
2020-05-04 20:36:59 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
2020-05-28 19:49:43 +00:00
|
|
|
|
def by_name(cls, name: str) -> 'ReportType':
|
2020-05-04 20:36:59 +00:00
|
|
|
|
try:
|
2020-05-28 19:49:43 +00:00
|
|
|
|
return cls[name.upper()]
|
2020-05-04 20:36:59 +00:00
|
|
|
|
except KeyError:
|
|
|
|
|
raise ValueError(f"unknown report type {name!r}") from None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ReturnFlag(enum.IntFlag):
|
|
|
|
|
LOAD_ERRORS = 1
|
2020-06-11 19:27:36 +00:00
|
|
|
|
# 2 was used in the past, it can probably be reclaimed.
|
2020-05-04 20:36:59 +00:00
|
|
|
|
REPORT_ERRORS = 4
|
|
|
|
|
NOTHING_TO_REPORT = 8
|
|
|
|
|
|
2020-06-10 18:01:12 +00:00
|
|
|
|
|
2020-05-04 20:36:59 +00:00
|
|
|
|
def filter_search(postings: Iterable[data.Posting],
|
2020-06-06 14:29:44 +00:00
|
|
|
|
search_terms: Iterable[cliutil.SearchTerm],
|
2020-05-04 20:36:59 +00:00
|
|
|
|
) -> Iterable[data.Posting]:
|
2020-05-23 14:13:17 +00:00
|
|
|
|
accounts = tuple(AccrualAccount.account_names())
|
|
|
|
|
postings = (post for post in postings if post.account.is_under(*accounts))
|
2020-06-06 14:29:44 +00:00
|
|
|
|
for query in search_terms:
|
|
|
|
|
postings = query.filter_postings(postings)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
return postings
|
|
|
|
|
|
|
|
|
|
def parse_arguments(arglist: Optional[Sequence[str]]=None) -> argparse.Namespace:
|
2020-05-30 02:02:47 +00:00
|
|
|
|
parser = argparse.ArgumentParser(prog=PROGNAME)
|
|
|
|
|
cliutil.add_version_argument(parser)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
parser.add_argument(
|
|
|
|
|
'--report-type', '-t',
|
|
|
|
|
metavar='NAME',
|
|
|
|
|
type=ReportType.by_name,
|
2020-06-03 20:54:22 +00:00
|
|
|
|
help="""The type of report to generate, one of `aging`, `balance`, or
|
|
|
|
|
`outgoing`. If not specified, the default is `aging` when no search terms are
|
|
|
|
|
given, `outgoing` for search terms that return a single outstanding payable,
|
|
|
|
|
and `balance` any other time.
|
2020-05-04 20:36:59 +00:00
|
|
|
|
""")
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'--since',
|
|
|
|
|
metavar='YEAR',
|
|
|
|
|
type=int,
|
2020-06-17 19:22:42 +00:00
|
|
|
|
default=0,
|
2020-05-04 20:36:59 +00:00
|
|
|
|
help="""How far back to search the books for related transactions.
|
|
|
|
|
You can either specify a fiscal year, or a negative offset from the current
|
2020-06-17 19:22:42 +00:00
|
|
|
|
fiscal year, to start loading entries from. The default is to load the current,
|
|
|
|
|
unaudited books.
|
2020-06-03 20:54:22 +00:00
|
|
|
|
""")
|
|
|
|
|
parser.add_argument(
|
|
|
|
|
'--output-file', '-O',
|
|
|
|
|
metavar='PATH',
|
|
|
|
|
type=Path,
|
|
|
|
|
help="""Write the report to this file, or stdout when PATH is `-`.
|
|
|
|
|
The default is stdout for the balance and outgoing reports, and a generated
|
|
|
|
|
filename for other reports.
|
2020-05-04 20:36:59 +00:00
|
|
|
|
""")
|
2020-05-30 02:02:47 +00:00
|
|
|
|
cliutil.add_loglevel_argument(parser)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
parser.add_argument(
|
2020-06-06 14:29:44 +00:00
|
|
|
|
'search_terms',
|
|
|
|
|
metavar='FILTER',
|
|
|
|
|
type=cliutil.SearchTerm.arg_parser('invoice', 'rt-id'),
|
2020-05-04 20:36:59 +00:00
|
|
|
|
nargs=argparse.ZERO_OR_MORE,
|
|
|
|
|
help="""Report on accruals that match this criteria. The format is
|
|
|
|
|
NAME=TERM. TERM is a link or word that must exist in a posting's NAME
|
|
|
|
|
metadata to match. A single ticket number is a shortcut for
|
|
|
|
|
`rt-id=rt:NUMBER`. Any other link, including an RT attachment link in
|
|
|
|
|
`TIK/ATT` format, is a shortcut for `invoice=LINK`.
|
|
|
|
|
""")
|
|
|
|
|
args = parser.parse_args(arglist)
|
2020-06-17 19:17:59 +00:00
|
|
|
|
if args.report_type is None and not any(
|
|
|
|
|
term.meta_key == 'invoice' or term.meta_key == 'rt-id'
|
|
|
|
|
for term in args.search_terms
|
|
|
|
|
):
|
2020-06-03 20:54:22 +00:00
|
|
|
|
args.report_type = ReportType.AGING
|
2020-05-04 20:36:59 +00:00
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
def main(arglist: Optional[Sequence[str]]=None,
|
|
|
|
|
stdout: TextIO=sys.stdout,
|
|
|
|
|
stderr: TextIO=sys.stderr,
|
|
|
|
|
config: Optional[configmod.Config]=None,
|
|
|
|
|
) -> int:
|
|
|
|
|
args = parse_arguments(arglist)
|
2020-06-12 19:08:08 +00:00
|
|
|
|
cliutil.set_loglevel(logger, args.loglevel)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
if config is None:
|
|
|
|
|
config = configmod.Config()
|
|
|
|
|
config.load_file()
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
2020-06-11 19:27:36 +00:00
|
|
|
|
returncode = 0
|
2020-05-04 20:36:59 +00:00
|
|
|
|
books_loader = config.books_loader()
|
2020-06-03 20:54:22 +00:00
|
|
|
|
if books_loader is None:
|
2020-06-07 13:04:53 +00:00
|
|
|
|
entries, load_errors, _ = books.Loader.load_none(config.config_file_path())
|
2020-06-03 20:54:22 +00:00
|
|
|
|
else:
|
2020-06-17 19:22:42 +00:00
|
|
|
|
load_since = None if args.report_type == ReportType.AGING else args.since
|
|
|
|
|
entries, load_errors, _ = books_loader.load_all(load_since)
|
2020-06-02 17:40:21 +00:00
|
|
|
|
filters.remove_opening_balance_txn(entries)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
for error in load_errors:
|
|
|
|
|
bc_printer.print_error(error, file=stderr)
|
|
|
|
|
returncode |= ReturnFlag.LOAD_ERRORS
|
2020-06-11 19:27:36 +00:00
|
|
|
|
|
|
|
|
|
postings = list(filter_search(
|
|
|
|
|
data.Posting.from_entries(entries), args.search_terms,
|
|
|
|
|
))
|
|
|
|
|
if not postings:
|
2020-05-28 20:41:55 +00:00
|
|
|
|
logger.warning("no matching entries found to report")
|
2020-05-04 20:36:59 +00:00
|
|
|
|
returncode |= ReturnFlag.NOTHING_TO_REPORT
|
2020-06-12 17:24:25 +00:00
|
|
|
|
# groups is a mapping of metadata value strings to AccrualPostings.
|
|
|
|
|
# The keys are basically arbitrary, the report classes don't rely on them,
|
|
|
|
|
# but they do help symbolize what's being grouped.
|
|
|
|
|
# For the outgoing approval report, groups maps rt-id link strings to
|
|
|
|
|
# associated accruals.
|
2020-06-30 21:28:08 +00:00
|
|
|
|
# For all other reports, groups comes from AccrualReport.make_consistent().
|
2020-06-11 19:27:36 +00:00
|
|
|
|
groups: PostGroups
|
|
|
|
|
if args.report_type is None or args.report_type is ReportType.OUTGOING:
|
|
|
|
|
groups = dict(AccrualPostings.group_by_first_meta_link(postings, 'rt-id'))
|
2020-07-01 14:54:58 +00:00
|
|
|
|
if args.report_type is None and len(groups) == 1:
|
|
|
|
|
key = next(iter(groups))
|
|
|
|
|
group = groups[key]
|
|
|
|
|
account = group[0].account
|
|
|
|
|
if (AccrualAccount.by_account(account) is AccrualAccount.PAYABLE
|
|
|
|
|
and all(post.account == account for post in group)
|
|
|
|
|
and not group.balance().ge_zero()
|
|
|
|
|
and key): # Make sure we have a usable rt-id
|
|
|
|
|
args.report_type = ReportType.OUTGOING
|
2020-06-11 19:27:36 +00:00
|
|
|
|
if args.report_type is not ReportType.OUTGOING:
|
2020-06-30 21:28:08 +00:00
|
|
|
|
groups = dict(AccrualPostings.make_consistent(postings))
|
2020-06-03 20:54:22 +00:00
|
|
|
|
if args.report_type is not ReportType.AGING:
|
|
|
|
|
groups = {
|
|
|
|
|
key: posts for key, posts in groups.items() if not posts.is_paid()
|
|
|
|
|
} or groups
|
2020-06-11 19:27:36 +00:00
|
|
|
|
del postings
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
2020-05-28 19:49:43 +00:00
|
|
|
|
report: Optional[BaseReport] = None
|
2020-06-03 20:54:22 +00:00
|
|
|
|
output_path: Optional[Path] = None
|
|
|
|
|
if args.report_type is ReportType.AGING:
|
2020-06-11 20:29:18 +00:00
|
|
|
|
rt_wrapper = config.rt_wrapper()
|
|
|
|
|
if rt_wrapper is None:
|
2020-06-03 20:54:22 +00:00
|
|
|
|
logger.error("unable to generate aging report: RT client is required")
|
|
|
|
|
else:
|
|
|
|
|
now = datetime.datetime.now()
|
2020-06-06 17:32:59 +00:00
|
|
|
|
if args.output_file is None:
|
2020-06-12 19:56:29 +00:00
|
|
|
|
out_dir_path = config.repository_path() or Path()
|
|
|
|
|
args.output_file = out_dir_path / now.strftime('AgingReport_%Y-%m-%d_%H:%M.ods')
|
2020-06-06 17:32:59 +00:00
|
|
|
|
logger.info("Writing report to %s", args.output_file)
|
|
|
|
|
out_bin = cliutil.bytes_output(args.output_file, stdout)
|
2020-06-11 20:29:18 +00:00
|
|
|
|
report = AgingReport(rt_wrapper, out_bin)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
elif args.report_type is ReportType.OUTGOING:
|
2020-06-11 20:29:18 +00:00
|
|
|
|
rt_wrapper = config.rt_wrapper()
|
|
|
|
|
if rt_wrapper is None:
|
2020-05-28 20:41:55 +00:00
|
|
|
|
logger.error("unable to generate outgoing report: RT client is required")
|
2020-05-28 19:49:43 +00:00
|
|
|
|
else:
|
2020-06-06 17:32:59 +00:00
|
|
|
|
out_file = cliutil.text_output(args.output_file, stdout)
|
2020-06-11 20:29:18 +00:00
|
|
|
|
report = OutgoingReport(rt_wrapper, out_file)
|
2020-05-28 19:49:43 +00:00
|
|
|
|
else:
|
2020-06-06 17:32:59 +00:00
|
|
|
|
out_file = cliutil.text_output(args.output_file, stdout)
|
2020-06-11 19:27:36 +00:00
|
|
|
|
report = BalanceReport(out_file)
|
2020-06-03 20:54:22 +00:00
|
|
|
|
|
2020-05-28 19:49:43 +00:00
|
|
|
|
if report is None:
|
|
|
|
|
returncode |= ReturnFlag.REPORT_ERRORS
|
|
|
|
|
else:
|
|
|
|
|
report.run(groups)
|
2020-05-04 20:36:59 +00:00
|
|
|
|
return 0 if returncode == 0 else 16 + returncode
|
|
|
|
|
|
2020-06-12 19:08:08 +00:00
|
|
|
|
entry_point = cliutil.make_entry_point(__name__, PROGNAME)
|
|
|
|
|
|
2020-05-04 20:36:59 +00:00
|
|
|
|
if __name__ == '__main__':
|
2020-06-12 19:08:08 +00:00
|
|
|
|
exit(entry_point())
|