Fixed work multi-processing & creates combined monthly

back_report_creation
= 3 years ago
parent 9e2d960f7e
commit 86afb277e5
Signed by untrusted user who does not match committer: gprog
GPG Key ID: 5BE9BB58D37713F8
  1. 162
      src/back_reporting.py

@ -2,7 +2,7 @@ from pathlib import Path
import re import re
from re import Pattern from re import Pattern
import pandas as pd import pandas as pd
from pandas import DataFrame from pandas import DataFrame, ExcelWriter, read_excel
from datetime import datetime as dt, timedelta from datetime import datetime as dt, timedelta
import logging import logging
import il_reports as ilr import il_reports as ilr
@ -14,16 +14,39 @@ import os
TOP_PATH: Path = Path(r"\\leafnow.com\shared\Accounting\CASH APPS\2023") TOP_PATH: Path = Path(r"\\leafnow.com\shared\Accounting\CASH APPS\2023")
def create_logger(log_file: Path, logger_name: str = __name__, ): class LevelFilter(object):
def __init__(self, level):
self.__level = level
def filter(self, logRecord):
return logRecord.levelno == self.__level
def create_logger(logger_name: str = __name__, ):
logger = logging.getLogger(logger_name) logger = logging.getLogger(logger_name)
f_handler = logging.FileHandler(log_file, 'w')
f_handler.setLevel(logging.DEBUG) log_folder = Path(r"\\leafnow.com\shared\Business Solutions\Griff\Code\InfoLeaseExtract\logs")
fail_handler = logging.FileHandler(Path(log_folder,"Fail_br.log"), 'w')
fail_handler.setLevel(logging.WARNING)
info_handler = logging.FileHandler(Path(log_folder,"Info_br.log"), 'w')
info_handler.setLevel(logging.INFO)
info_handler.addFilter(LevelFilter(logging.INFO))
debug_handler = logging.FileHandler(Path(log_folder,"Debug_br.log"), 'w')
debug_handler.setLevel(logging.DEBUG)
debug_handler.addFilter(LevelFilter(logging.DEBUG))
s_handler = logging.StreamHandler() s_handler = logging.StreamHandler()
s_handler.setLevel(logging.INFO) s_handler.setLevel(logging.INFO)
logger.addHandler(f_handler)
logger.addHandler(fail_handler)
logger.addHandler(info_handler)
logger.addHandler(debug_handler)
logger.addHandler(s_handler) logger.addHandler(s_handler)
return logger return logger
logger = create_logger()
@dataclass @dataclass
class ExtractInstruction: class ExtractInstruction:
@ -52,7 +75,7 @@ def append_to_consolidated_report( report_path: Path, report_df: DataFrame, shee
report_name: str = f"{str(report_month.name).replace('.','-')}_{sheet_name}_ConsolidatedReport.xlsx" report_name: str = f"{str(report_month.name).replace('.','-')}_{sheet_name}_ConsolidatedReport.xlsx"
save_path = Path("../2023",report_name) save_path = Path(r"\\leafnow.com\shared\Business Solutions\Griff\Code\InfoLeaseExtract\2023",report_name)
logger.debug(f"{save_path=}") logger.debug(f"{save_path=}")
# Check if the current month has a consolidated report # Check if the current month has a consolidated report
@ -76,7 +99,7 @@ def append_to_consolidated_report( report_path: Path, report_df: DataFrame, shee
current_data_len = len(pd.read_excel(save_path,sheet_name=sheet_name)) current_data_len = len(pd.read_excel(save_path,sheet_name=sheet_name))
with pd.ExcelWriter(save_path, engine='openpyxl', mode='a',if_sheet_exists="overlay") as writer: with pd.ExcelWriter(save_path, engine='openpyxl', mode='a',if_sheet_exists="overlay") as writer:
logger.debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}") logger.debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}")
report_df.to_excel(writer, index=False, sheet_name=sheet_name,startrow=len(current_data_len),header=False) report_df.to_excel(writer, index=False, sheet_name=sheet_name,startrow=current_data_len,header=False)
except Exception as e: except Exception as e:
logger.error(f"Failed to append to consolidated report! {report_name} | {sheet_name} | {report_path} :\n{e}") logger.error(f"Failed to append to consolidated report! {report_name} | {sheet_name} | {report_path} :\n{e}")
@ -102,7 +125,7 @@ def process_report(file: Path, extract_inst: ExtractInstruction) -> bool:
def process_folder(folder: ReportFolder): def process_folder(folder: ReportFolder):
# Search recurively through date directories # Search recurively through date directories
report_date: dt = dt(2023, 1, 1) report_date: dt = dt(2023, 5, 1)
while report_date.date() < dt.now().date(): while report_date.date() < dt.now().date():
logger.info(f"{folder.folder_name} | Processing date: {report_date}") logger.info(f"{folder.folder_name} | Processing date: {report_date}")
report_folder: Path = Path(TOP_PATH, report_folder: Path = Path(TOP_PATH,
@ -114,8 +137,11 @@ def process_folder(folder: ReportFolder):
if report_folder.exists(): if report_folder.exists():
for xi in folder.extraction_methods: for xi in folder.extraction_methods:
try: try:
report_file: Path = next(report_folder.glob(f"*{xi.input_regex}*")) files = report_folder.glob(f"*{xi.input_regex}*")
report_file: Path = next(files)
logger.debug(f"Report file: {report_file}") logger.debug(f"Report file: {report_file}")
except IndexError as ie:
logger.warning(f"No matching reports!: {ie}")
except Exception as e: except Exception as e:
logger.debug(f"Could not get report_file: {report_folder.glob(f'*{xi.input_regex}*')} \n{e}") logger.debug(f"Could not get report_file: {report_folder.glob(f'*{xi.input_regex}*')} \n{e}")
continue continue
@ -133,50 +159,84 @@ def process_folder(folder: ReportFolder):
logger.debug(f"Folder '{report_folder}' does not exist!") logger.debug(f"Folder '{report_folder}' does not exist!")
report_date = report_date + timedelta(days=1) report_date = report_date + timedelta(days=1)
logger.debug(f"Finished scanning {folder.folder_name}!") logger.debug(f"Finished scanning {folder.folder_name}!")
def combine():
WORK_DIR = Path(r"\\leafnow.com\shared\Business Solutions\Griff\Code\InfoLeaseExtract\2023")
REPORTS = [
"ACH",
"CHECKS LIVE",
"CREDIT CARDS",
"PAY BY PHONE",
"WIRE",
"RETURNS ACH",
"RETURNS PORTAL"
]
for i in range(1,6):
month = f"2023-0{i}"
mcr: Path = Path(f"{month} Consolidated Report.xlsx")
print(f"Creating monthly consolidated report: {mcr}")
with ExcelWriter(Path(WORK_DIR, "Monthly", mcr), engine="xlsxwriter") as wrtr:
for r in REPORTS:
report_path: Path = Path(WORK_DIR, f"{month}_{r}_ConsolidatedReport.xlsx")
print(f"Report Path ({r}): {report_path}")
if __name__ == "__main__": rdf: DataFrame = read_excel(report_path, sheet_name=r)
logger = create_logger(f"BackReporting.log") rdf.to_excel(wrtr, sheet_name=r, freeze_panes=(1,0), index=False)
try: if __name__ == "__main__":
FOLDERS = [
ReportFolder("ACH", [
ExtractInstruction("_ACH_", "ACH", ilr.ach),
]), FOLDERS = [
ReportFolder("CHECKS LIVE", [ ReportFolder("ACH", [
ExtractInstruction("_PROGPAY_BER", "CHECKS LIVE", ilr.payment_transactions) ExtractInstruction("_ACH_", "ACH", ilr.ach),
]), ]),
ReportFolder("CREDIT CARDS", [ ReportFolder("CHECKS LIVE", [
ExtractInstruction("_VMCC_BER", "CREDIT CARDS", ilr.payment_transactions) ExtractInstruction("_PROGPAY_BER", "CHECKS LIVE", ilr.payment_transactions)
]), ]),
ReportFolder("LOCKBOX", [ ReportFolder("CREDIT CARDS", [
ExtractInstruction("_LOCKBOX_\d+_", "LOCKBOX", ilr.lockbox) ExtractInstruction("_VMCC_BER", "CREDIT CARDS", ilr.payment_transactions)
]), ]),
ReportFolder("PAY BY PHONE", [ ReportFolder("LOCKBOX", [
ExtractInstruction("_PBP_EPAY_DPS_BER", "PAY BY PHONE", ilr.lockbox) ExtractInstruction("_LOCKBOX_\d+_", "LOCKBOX", ilr.lockbox)
]), ]),
ReportFolder("RETURN REPORTING", [ ReportFolder("PAY BY PHONE", [
ExtractInstruction("_PBP_EPAY_RETURNS_BER", "RETURNS ACH", ilr.payment_transactions), ExtractInstruction("_PBP_EPAY_DPS_BER", "PAY BY PHONE", ilr.payment_transactions)
ExtractInstruction("_RETURNS_BER", "RETURNS PORTAL", ilr.payment_transactions)] ]),
), ReportFolder("RETURN REPORTING", [
ReportFolder("WIRES", [ ExtractInstruction("_PBP_EPAY_RETURNS_BER", "RETURNS ACH", ilr.payment_transactions),
ExtractInstruction("MTBWIRE_BER", "WIRE", ilr.payment_transactions) ExtractInstruction("_RETURNS_BER", "RETURNS PORTAL", ilr.payment_transactions)]
]), ),
] ReportFolder("WIRES", [
ExtractInstruction("MTBWIRE_BER", "WIRE", ilr.payment_transactions)
with Pool(cpu_count()) as pool: ]),
for folder in tqdm(pool.imap_unordered(process_folder,FOLDERS)): ]
try:
process_folder(folder) process_folder(FOLDERS[0])
print(f"Completed: {folder.folder_name}") # with Pool(cpu_count()) as pool:
except Exception as e: # for folder in tqdm(pool.imap_unordered(process_folder,FOLDERS)):
print(f"Failed to process {folder.folder_name} \n {e}") # try:
continue # print(f"Completed!")
input("Complete!") # except Exception as e:
# print(f"Failed to process\n {e}")
# continue
# for folder in tqdm(FOLDERS):
# try:
# process_folder(folder)
# print(f"Completed: {folder.folder_name}")
# except Exception as e:
# print(f"Failed to process {folder.folder_name} \n {e}")
# continue
# input("Complete!")
combine()
except Exception as e:
logger.error(f"Program failed:\n{e}")
input(f"")
Loading…
Cancel
Save