reconcile: Simplify multirecord sorting.
This commit is contained in:
		
							parent
							
								
									405dd553cb
								
							
						
					
					
						commit
						7c5e98c588
					
				
					 1 changed files with 13 additions and 13 deletions
				
			
		|  | @ -181,9 +181,9 @@ def format_multirecord(r1s, r2s, note): | |||
|     assert len(r1s) == 1 | ||||
|     assert len(r2s) > 1 | ||||
|     match_output = [] | ||||
|     match_output.append([r1s[0]['date'].isoformat() + ' ' + r1s[0]['payee'], f'{format_record(r1s[0])}  →  {format_record(r2s[0])}  ✓ Matched{note}']) | ||||
|     for i, r2 in enumerate(r2s[1:]): | ||||
|         match_output.append([r1s[0]['date'].isoformat() + str(i) + r1s[0]['payee'], f'{r1s[0]["date"].isoformat()}:             ↳                                    →  {format_record(r2)}  ✓ Matched{note}']) | ||||
|     match_output.append([r1s[0]['date'], f'{format_record(r1s[0])}  →  {format_record(r2s[0])}  ✓ Matched{note}']) | ||||
|     for r2 in r2s[1:]: | ||||
|         match_output.append([r1s[0]['date'], f'{r1s[0]["date"].isoformat()}:             ↳                                    →  {format_record(r2)}  ✓ Matched{note}']) | ||||
|     return match_output | ||||
| 
 | ||||
| def sort_records(records: List) -> List: | ||||
|  | @ -290,19 +290,19 @@ def match_statement_and_books(statement_trans: list, books_trans: list): | |||
| 
 | ||||
| def format_matches(matches, csv_statement: str, show_reconciled_matches): | ||||
|     match_output = [] | ||||
|     for r1, r2, note in matches: | ||||
|     for r1s, r2s, note in matches: | ||||
|         note = ', '.join(note) | ||||
|         note = ': ' + note if note else note | ||||
|         if r1 and r2: | ||||
|             if show_reconciled_matches and all(x['bank_statement'] for x in r2): | ||||
|                 if len(r2) == 1: | ||||
|                     match_output.append([r1[0]['date'].isoformat() + r1[0]['payee'], f'{format_record(r1[0])}  →  {format_record(r2[0])}  ✓ Matched{note}']) | ||||
|         if r1s and r2s: | ||||
|             if show_reconciled_matches and all(x['bank_statement'] for x in r2s): | ||||
|                 if len(r2s) == 1: | ||||
|                     match_output.append([r1s[0]['date'], f'{format_record(r1s[0])}  →  {format_record(r2s[0])}  ✓ Matched{note}']) | ||||
|                 else: | ||||
|                     match_output.extend(format_multirecord(r1, r2, note)) | ||||
|         elif r1: | ||||
|             match_output.append([r1[0]['date'].isoformat() + r1[0]['payee'], Fore.RED + Style.BRIGHT + f'{format_record(r1[0])}  →  {" ":^59}  ✗ NOT IN BOOKS ({os.path.basename(csv_statement)}:{r1[0]["line"]})' + Style.RESET_ALL]) | ||||
|                     match_output.extend(format_multirecord(r1s, r2s, note)) | ||||
|         elif r1s: | ||||
|             match_output.append([r1s[0]['date'], Fore.RED + Style.BRIGHT + f'{format_record(r1s[0])}  →  {" ":^59}  ✗ NOT IN BOOKS ({os.path.basename(csv_statement)}:{r1s[0]["line"]})' + Style.RESET_ALL]) | ||||
|         else: | ||||
|             match_output.append([r2[0]['date'].isoformat() + r2[0]['payee'], Fore.RED + Style.BRIGHT + f'{" ":^59}  →  {format_record(r2[0])}  ✗ NOT ON STATEMENT ({os.path.basename(r2[0]["filename"])}:{r2[0]["line"]})' + Style.RESET_ALL]) | ||||
|             match_output.append([r2s[0]['date'], Fore.RED + Style.BRIGHT + f'{" ":^59}  →  {format_record(r2s[0])}  ✗ NOT ON STATEMENT ({os.path.basename(r2s[0]["filename"])}:{r2s[0]["line"]})' + Style.RESET_ALL]) | ||||
|     return match_output | ||||
| 
 | ||||
| 
 | ||||
|  | @ -507,7 +507,7 @@ def main(args): | |||
|     print('-' * 155) | ||||
|     print(f'{"Statement transaction":<52}            {"Books transaction":<58}   Notes') | ||||
|     print('-' * 155) | ||||
|     for _, output in sorted(match_output): | ||||
|     for _, output in sorted(match_output, key=lambda x: x[0]): | ||||
|         print(output) | ||||
|     print('-' * 155) | ||||
|     print(f'Statement period {begin_date} to {end_date}') | ||||
|  |  | |||
		Loading…
	
	Add table
		
		Reference in a new issue