Compare commits

...

7 Commits
master ... dev

Author SHA1 Message Date
= e3625793da
Added sending logs to my remote folder. 3 years ago
= cadf5f4b0b
Changed ILReport to ignore provided consolidatedbase path. Confirms this 3 years ago
= 5b03a6c7a9
Fixed toml write error and updated pyinstaller spec to include new settings. 3 years ago
= 1bb12c722a
Reworked how consolidated reports are created. Uses new relative path 3 years ago
= 5caaf3d7ac
version 4.0 staging | New header relational positioning class structure 3 years ago
= a3905d118e
Readded consolidated reports with Return reports as well. 3 years ago
= 5067678a8c
Project restructure to faciliate future work 3 years ago
  1. 9
      .gitignore
  2. 8
      IL Extract.spec
  3. 1
      compile_gui
  4. 61
      create_dir_schema.py
  5. 118
      ile_installer.py
  6. BIN
      requirements.txt
  7. 2
      settings.json
  8. 14
      settings.toml
  9. 0
      src/__init__.py
  10. 0
      src/assets/checkedCircle.svg
  11. 0
      src/assets/copy.svg
  12. 0
      src/assets/excel.svg
  13. 0
      src/assets/extract.ico
  14. 0
      src/assets/extract.svg
  15. 0
      src/assets/fileSearch.svg
  16. 0
      src/assets/folder.svg
  17. 0
      src/assets/maximize.svg
  18. 0
      src/assets/process.svg
  19. 0
      src/assets/settings.svg
  20. 12
      src/datasets/dataset_template.json
  21. 184
      src/extractors.py
  22. 59
      src/il_extract.py
  23. 130
      src/il_reports.py
  24. 3
      src/ui_ile_main_window.py
  25. 37
      todo.md

9
.gitignore vendored

@ -1,10 +1,17 @@
build/
venv/
dist/
inputFiles/
InputFiles/
__pycache__/
2023/
ilr_test/
*.lnk
*.spec
*.log
*.xlsx
*.txt
*.md
!todo.md
!requirements.txt

@ -5,10 +5,10 @@ block_cipher = None
a = Analysis(
['main.py'],
pathex=[],
['src/il_extract.py'],
pathex=['src'],
binaries=[],
datas=[('assets/extract.svg', '.'), ('assets/process.svg', '.'), ('assets/folder.svg', '.'), ('assets/copy.svg', '.'), ('settings.json', '.')],
datas=[('src/assets/*', 'assets'), ('settings.toml', '.')],
hiddenimports=[],
hookspath=[],
hooksconfig={},
@ -37,7 +37,7 @@ exe = EXE(
target_arch=None,
codesign_identity=None,
entitlements_file=None,
icon='assets\\extract.ico',
icon='src/assets/extract.ico',
)
coll = COLLECT(
exe,

@ -1 +0,0 @@
pyinstaller -w --add-data "assets/extract.svg;." --add-data "assets/process.svg;." --add-data "assets/folder.svg;." --add-data "assets/copy.svg;." --add-data "settings.json;." -i assets/extract.ico -n "IL Extract" main.py

@ -0,0 +1,61 @@
from pathlib import Path
from dataclasses import dataclass
import os
from datetime import datetime, timedelta, date
import shutil as sh
class Report:
def __init__(self, sample_file: str, sample_folder: str):
self.sample_file: Path = Path(sample_file)
self.folder_name: Path = Path(sample_folder)
def create_record(self, location: Path) -> Path|None:
# Create the folder
fp = Path(location, self.folder_name)
def main():
folders = [
Report(r"InputFiles\2022.09.02_ACH_C", r"ACH"),
Report(r"InputFiles\2022.09.02_PROGPAY_BER", r"CHECKS LIVE"),
Report(r"InputFiles\2022.09.01_VMCC_BER", r"CREDIT CARDS"),
Report(r"InputFiles\2022.09.02_DISPOSITION_PM_C", r"DISPOSITION REPORTING"),
Report(r"InputFiles\2022.09.02_LOCKBOX_094_C", r"LOCKBOX"),
Report(r"InputFiles\2022.09.02_PBP_EPAY_DPS_BER", r"PAY BY PHONE"),
Report(r"InputFiles\2022.12.30_PBP_EPAY_RETURNS_BER", r"RETURN REPORTING"),
Report(r"InputFiles\2022.09.01_PUB_WIRES_BER", r"WIRES"),
]
folder_date = date(2023,1,1)
while folder_date < date(2024,1,1):
year = folder_date.strftime("%Y")
month = folder_date.strftime("%Y.%m")
day = folder_date.strftime("%Y.%m.%d")
date_path = Path(year, month, day)
for rp in folders:
# Create folder
fold_p = Path(date_path, rp.folder_name)
print(f"Creating filepath: {fold_p}")
os.makedirs(fold_p, exist_ok=True)
file_p = Path(fold_p, rp.sample_file.name)
print(f"Cp {rp.sample_file} into {file_p}")
sh.copyfile(rp.sample_file, file_p )
folder_date += timedelta(days=1)
if __name__ == "__main__":
main()

@ -1,118 +0,0 @@
from os import system, getlogin
import os
from sys import exit
from zipfile import ZipFile
import win32com.client
from glob import glob
import re
from itertools import cycle
from shutil import get_terminal_size
from threading import Thread
from time import sleep
def error_exit(exception_info: str):
print(exception_info)
input("\nPress enter/return to exit")
exit(1)
class NoMatchingFile(Exception):
def __init__(self, search_file: str, found: list) -> None:
super().__init__(f"File: {search_file} was not found: {found}")
class Loader:
def __init__(self, desc="Loading...", end="Done!", timeout=0.1):
"""
A loader-like context manager
Args:
desc (str, optional): The loader's description. Defaults to "Loading...".
end (str, optional): Final print. Defaults to "Done!".
timeout (float, optional): Sleep time between prints. Defaults to 0.1.
"""
self.desc = desc
self.end = end
self.timeout = timeout
self._thread = Thread(target=self._animate, daemon=True)
self.steps = ["|", "/", "-", "\\",]
self.done = False
def start(self):
self._thread.start()
return self
def _animate(self):
for c in cycle(self.steps):
if self.done:
break
print(f"\r{self.desc} {c}", flush=True, end="")
sleep(self.timeout)
def __enter__(self):
self.start()
def stop(self):
self.done = True
cols = get_terminal_size((80, 20)).columns
print("\r" + " " * cols, end="", flush=True)
print(f"\r{self.end}", flush=True)
def __exit__(self, exc_type, exc_value, tb):
# handle exceptions with those variables ^
self.stop()
ZIP_LOCATION = r"\\leafnow.com\public\Accounting Shared\ILE Apps"
APP_FOLDER = r"InfoLeaseExtract"
try:
user = getlogin()
install_folder = f"C:\\Users\\{user}\\AppData\\Local"
backup_install_folder = f"C:\\Users\\{user}\\Documents\\"
print(f"Initalizing InfoLease Extract Installer\n#######################################")
# Find the newest version:
latest_version = glob(f"{ZIP_LOCATION}\\LATEST*")
if len(latest_version) == 0:
# Create Custom exception
raise NoMatchingFile(f"{ZIP_LOCATION}\\LATEST*", latest_version)
latest_version: str = latest_version[0]
version = re.search("\d+\.\d+", latest_version).group()
print(f"Installing verion {version}...")
with ZipFile(latest_version, 'r') as zipObj:
try:
with Loader("Setting up program files..."):
zipObj.extractall(install_folder)
except Exception as e:
error_exit(f"Failed to extract file ({latest_version}) to '{install_folder}' :\n{e}")
print("Creating Desktop shortcut...")
try:
desktop = f"C:\\Users\\{user}\\OneDrive - LEAF Commercial Capital\\Desktop"
shell = win32com.client.Dispatch("WScript.Shell")
shortcut = shell.CreateShortCut(os.path.join(desktop, "IL Extract v3.10.lnk"),)
shortcut.Targetpath = f"{install_folder}\\IL Extract\\IL Extract.exe"
shortcut.IconLocation = f"{install_folder}\\IL Extract\\assets\\extract.ico"
shortcut.WorkingDirectory = f"{install_folder}\\IL Extract"
shortcut.save()
except:
try:
desktop = f"C:\\Users\\{user}\\Desktop"
shell = win32com.client.Dispatch("WScript.Shell")
shortcut = shell.CreateShortCut(os.path.join(desktop, "IL Extract v3.10.lnk"),)
shortcut.Targetpath = f"{install_folder}\\IL Extract\\IL Extract.exe"
shortcut.IconLocation = f"{install_folder}\\IL Extract\\assets\\extract.ico"
shortcut.WorkingDirectory = f"{install_folder}\\IL Extract"
shortcut.save()
except Exception as e:
error_exit(f"Failed to create shortcut. The application is still installed at:\n{install_folder}\\IL Extract.\nYou can manually create a shortcut if you would like.\n{e}")
print(f"\nInstallation Completed Successfully!")
input("\nPress Enter/Return to exit.")
except Exception as e:
error_exit(f"High level exception:\n{e}")

Binary file not shown.

@ -1 +1 @@
{"debug": false, "consolidatedBasePath": "leafnow.com/shared/cashapps", "defaultLocations": {"ach": "", "disp": "", "gl": "", "lb": "", "minv": "", "niv": "", "ren": "", "pymt": "", "uap": "", "pastdue": ""}}
{"debug": true, "consolidatedBasePath": ".", "defaultLocations": {"ach": "Z:/shared/Business Solutions/Griff/Code/InfoLeaseExtract/2023/2023.05/2023.05.24/ACH", "disp": "", "gl": "", "lb": "Z:/Business Solutions/Griff/Code/InfoLeaseExtract/InputFiles", "minv": "", "niv": "", "ren": "", "pymt": "Z:/Business Solutions/Griff/Code/InfoLeaseExtract/InputFiles", "uap": "", "pastdue": ""}}

@ -0,0 +1,14 @@
debug = true
consolidatedBasePath = ""
[defaultLocations]
ach = "//leafnow.com/shared/Business Solutions/Griff/Code/InfoLeaseExtract/2023/2023.03/2023.03.01/ACH"
disp = ""
gl = ""
lb = "//leafnow.com/shared/Business Solutions/Griff/Code/InfoLeaseExtract/2023/2023.03/2023.03.01/LOCKBOX"
minv = ""
niv = ""
ren = ""
pymt = ""
uap = ""
pastdue = ""

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

Before

Width:  |  Height:  |  Size: 477 B

After

Width:  |  Height:  |  Size: 477 B

Before

Width:  |  Height:  |  Size: 6.9 KiB

After

Width:  |  Height:  |  Size: 6.9 KiB

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

Before

Width:  |  Height:  |  Size: 819 B

After

Width:  |  Height:  |  Size: 819 B

Before

Width:  |  Height:  |  Size: 512 B

After

Width:  |  Height:  |  Size: 512 B

Before

Width:  |  Height:  |  Size: 568 B

After

Width:  |  Height:  |  Size: 568 B

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

Before

Width:  |  Height:  |  Size: 3.1 KiB

After

Width:  |  Height:  |  Size: 3.1 KiB

@ -0,0 +1,12 @@
{
"name": {
"report": "",
"excel": ""
},
"relative_position": {
"rows": 0,
"col": 0
},
"length": 0,
"data_type": "int"
}

@ -0,0 +1,184 @@
from typing import TypeAlias, TypeVar
from dataclasses import dataclass
from pathlib import Path
import pathlib as pl
from abc import ABC, abstractmethod, abstractproperty
from re import search, match, compile, Match, Pattern
from enum import Enum
ColumnIndex: TypeAlias = int
Money: TypeAlias = float
Numeric = TypeVar("Numeric", float, int)
class Line(Enum):
Header: str
Data: str
Erroneous: str
Top: str
Bottom: str
@dataclass
class RelativePosition:
"""
Coordinates for navigating from one point in a row to another
"""
rows: int
col: ColumnIndex
@dataclass
class DataValue:
position: RelativePosition
length : int
regex: Pattern
dtype: type
def correct_line(self, adj_lines_since_header: int) -> bool:
"""
"""
return adj_lines_since_header % self.position.rows == 0
def _line_slice(self, line: Line.Data) -> str|None:
"""
Attempts to get the data from the line.
Returns string in correct postion or None if out of range.
"""
try:
start: int = self.position.col
end: int = start + self.length
line_slice: str = line[start:end]
except IndexError:
#TODO: Add logging
line_slice = None
finally:
return line_slice
@staticmethod
def _to_float(number_str: str) -> float|None:
try:
f_value:float = float(number_str.replace(',',''))
return f_value
except:
return None
def extract(self, line: Line.Data) -> type|None:
"""
"""
line_slice: str|None = self._line_slice(line)
if isinstance(line_slice, None):
return None
value_match: Match|None = search(self.regex, line_slice)
if isinstance(value_match, None):
return None
value_str: str = value_match.group()
value_str.strip()
if self.dtype == int or self.dtype == float:
return self._to_float(value_str)
#TODO datetime
return value_str
class DataSet:
def __init__(self, config: dict) -> None:
self.r_name = config["naming"]["report"]
try:
self.e_name = config["naming"]["excel"]
except KeyError:
self.e_name = self.r_name
self.data_value: DataValue = DataValue(
position = RelativePosition(
rows= config["relative_position"]["rows"],
col= config["relative_position"]["col"]
),
length = config["length"],
dtype = config["data_type"],
)
def line_position(self, line: str) -> ColumnIndex|None:
"""
Searches a line for the report header for this dataset.
Returns:
- ColumnIndex(int) | None: The column index of the matches end position
or None if no match was found
"""
header_match: Match|None = search(self.r_name, line)
return header_match.end() if isinstance(header_match, Match) else None
@dataclass
class ReportConfig:
file_extension: str
name: str
datasets: list[DataSet]
data_line_regexes: list[Pattern]
class ILReport(ABC):
def __init__(self, file_path: Path, report_config: ReportConfig) -> None:
self.in_file_path: Path = file_path
self.line_gen = self._line_generator(file_path)
self.config: ReportConfig = report_config
self.name = report_config.name
self.line_type_history: list[Line] = []
self.last_header_line: int|None = None
self.data_dict: dict = {
header.e_name: []
for header in self.config.datasets
}
@staticmethod
def _line_generator(file_path: Path):
with open(file_path, 'r') as in_file:
line: str
for line in in_file.readlines():
yield line
def _add_line_history(self, line: Line, max_history: int = 10):
self.line_type_history.append(line)
while len(self.line_type_history) > max_history:
self.line_type_history.pop(0)
def _is_header_line(self, line: str) -> bool:
"""
Checks whether a report line has data headers.
"""
regex: Pattern
for regex in self.config.data_line_regexes:
if isinstance(search(regex,line), Match):
return True
return False
@abstractmethod
def _skip_line(self, line) -> bool:
"""
Tells whether we should skip this line
"""
@abstractmethod
def _process_line(self):
"""
"""
@abstractmethod
def _process_dataline(self, dataline: Line.Data):
"""
"""
# Search the row for a data set name, or list of data set names
# extract all the data until the next row
if __name__ == "__main__":
datasets = []

@ -1,20 +1,26 @@
from ILE_MainWindow import Ui_MainWindow
from ui_ile_main_window import Ui_MainWindow
import sys
import os
import pandas as pd
import json
from PyQt5 import QtWidgets
from datetime import datetime as dt
import ILExtract as ilx
from logging import debug, DEBUG, basicConfig
import il_reports as ilx #TODO redo aliasing
from logging import debug, DEBUG, basicConfig, exception, warn
from tomllib import load
from tomli_w import dump
from shutil import copyfile
from pathlib import Path
from getpass import getuser
with open("settings.toml", mode='rb') as s:
settings = load(s)
#if settings["debug"]:
basicConfig(filename='debug.log', mode='w', encoding='utf-8',
level=DEBUG,
format="%(asctime)s %(message)s",
)
with open("settings.json") as s:
settings = json.loads(s.read())
if settings["debug"]:
basicConfig(filename='debug.log', encoding='utf-8', level=DEBUG)
debug("\n\n\n########################### VERSION = 3.10 ###########################\n\n\n")
debug("\n\n\n########################### VERSION = 3.4 ###########################\n\n\n")
debug("Running main.py...")
class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
"""
@ -33,9 +39,9 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
self.inputFile = ""
# The location that the outputfile will be saved at
self.outputFile = ""
# Load the settings.json
with open("settings.json") as s:
self.settings = json.loads(s.read())
# Load the settings.toml
with open("settings.toml", mode="rb") as s:
self.settings = load(s)
# Set the current report type to ACH as default
self.curReportType = "ach"
@ -220,9 +226,9 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
if self.settings["defaultLocations"][self.curReportType] == '':
self.settings["defaultLocations"][self.curReportType] = ('/').join(self.inputFile.split('/')[:-1])
debug(f"checked_for_saved: saved new deafult location | {self.curReportType} | {self.settings['defaultLocations'][self.curReportType]}")
with open('settings.json', 'w') as s:
with open('settings.toml', 'wb') as s:
# Save changes to the setting
json.dump(self.settings, s)
dump(self.settings, s)
def report_type_change(self):
debug(f"Changing report type | Was: {self.curReportType} -> {self.reportTypeCB.currentText()}")
@ -291,6 +297,9 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
debug(f"report_type_change | outputFile: {self.outputFile}")
self.check_ready_to_process()
if __name__ == "__main__":
try:
# Defines the app
app = QtWidgets.QApplication(sys.argv)
# Sets the style
@ -301,3 +310,23 @@ window.setWindowTitle("IL Extract")
window.show()
# Starts the app
app.exec()
except Exception as e:
exception(f"HIGH LEVEL EXECPTION: {e}")
finally:
log_path = Path(os.curdir,'debug.log')
remote_log_folder = Path(
r"\\leafnow.com\public\Business Solutions\Apps\AppSources\Logging\ILE",
)
if not remote_log_folder.exists:
os.mkdir(remote_log_folder)
rlog_file = f"{getuser()}_ile.log"
try:
copyfile(
log_path,
Path(remote_log_folder, rlog_file)
)
except Exception as e:
warn(f"Failed to save remote log: {remote_log_folder}/{rlog_file}!")

@ -7,18 +7,40 @@ from pathlib import Path
import numpy as np
from glob import glob
from logging import debug, DEBUG, basicConfig, warn, error
from hashlib import md5
import openpyxl as pxl
from tomllib import load
# V3.1 | 01/19/23
# V3.4 | 06/15/23
with open("settings.json") as s:
settings = json.loads(s.read())
with open("settings.toml", mode='rb') as s:
settings = load(s)
if settings["debug"]:
basicConfig(filename='debug.log', encoding='utf-8', level=DEBUG)
basicConfig(filename='debug.log', filemode='w',encoding='utf-8',
format="%(asctime)s %(message)s",
level=DEBUG)
# contract numbers are a common feature in many reports to it's
# useful to have the regex for them globally avaiable
contract_number_regex = "\d{3}-\d{7}-\d{3}"
def extract_date_path(path: Path) -> Path|None:
"""
Used to get the month folder for a report
"""
date_pattern = re.compile(r'^\d{4}\.\d{2}$')
for parent in path.parents:
if date_pattern.match(parent.name):
return parent
return None
def hash_cols(row: pd.Series, cols_to_hash: list[str]) -> str:
md5_hash = md5()
md5_hash.update((''.join(str(row[col]) for col in cols_to_hash)).encode('utf-8'))
return md5_hash.hexdigest()
class ILReport:
"""
InfoLease Report class will be used to work with the files.
@ -54,10 +76,11 @@ class ILReport:
if dataframe.empty:
warn(f"ILReport: resulting dataframe was empty! Exiting with None.")
return dataframe
self._append_to_consolidated_report(dataframe, settings["consolidatedBasePath"])
self._append_to_consolidated_report(dataframe)
return dataframe
def _append_to_consolidated_report(self, dataframe_to_append: DataFrame, base_path: str):
def _append_to_consolidated_report(self, dataframe_to_append: DataFrame,
reports_base_path: Path = None):
"""
"""
# Decide the sheet name based on the save_location_name
@ -70,52 +93,95 @@ class ILReport:
sheet_name = "CREDIT CARDS"
elif re.search("(?i)lockbox", self.location) != None:
sheet_name = "LOCKBOX"
elif re.search("(?i)PBP_EPAY_RETURNS_BER", self.location) != None:
sheet_name = "RETURNS ACH"
elif re.search("(?i)epay", self.location) != None:
sheet_name = "PAY BY PHONE"
elif re.search("(?i)wires", self.location) != None:
sheet_name = "WIRES"
elif re.search("(?i)RETURNS_BER", self.location) != None:
sheet_name = "RETURNS Portal"
else:
debug(f"No consolidated report for {self.location}!")
return None
il_report_path: Path = Path(self.location)
debug(f"{il_report_path=}")
current_date: list(str) = dt.now().strftime("%Y.%m.%d").split('.')
report_name = f"{dt.now().strftime('%B')}_ConsolidatedReport.xlsx"
debug(f"Consolidated Reports {report_name} | {self.output_location} | {self.x_method} | {current_date}")
year = current_date[0]
month = current_date[1]
year_month = f"{year}.{month}"
save_path = f"{base_path}/{year}/{year_month}/{report_name}"
# Check if the current month has a consolidated report
month_summary_file: list(str) = glob(save_path)
if len(month_summary_file) == 0:
month_dir: Path|None = extract_date_path(il_report_path)
if month_dir is None and reports_base_path is None:
warn(f"Consolidated report not created! No valid base path: {il_report_path} | {reports_base_path}")
return None
report_date: str = dt.now().date() \
if re.search(r"^\d{4}\.\d{2}\.\d{2}$",il_report_path.parent.parent.name) is None\
else il_report_path.parent.parent.name.replace('.','/')
debug(f"{report_date=}")
report_month:str = il_report_path.parents[2].name.replace('.','-')
if month_dir is None:
if reports_base_path is None:
warn(f"Consolidated report not created! Could not find month folder: {il_report_path} | {reports_base_path}")
return None
else:
month_dir = reports_base_path
report_name: Path = Path(f"{report_month} ConsolidatedReport.xlsx")
debug(f"{month_dir=}")
debug(f"{report_name=}")
save_path = Path(month_dir, report_name)
debug(f"Consolidated Report {save_path=}")
consolidated_df = dataframe_to_append.copy(deep=True)
consolidated_df["ExtractDate"] = report_date
consolidated_df.fillna('--', inplace=True)
consolidated_df["Hash"] = consolidated_df.apply(
lambda r: hash_cols(r,r.keys())
, axis=1
)
consolidated_df.replace("--", None, inplace=True)
debug(consolidated_df)
if not save_path.exists():
debug(f"Consolidated Report | No monthly summary file!\n\tCreating: {save_path}")
# No file exists yet
# Create it and add the current month
try:
with pd.ExcelWriter(save_path) as writer:
debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}")
dataframe_to_append.to_excel(writer, index=False, sheet_name=sheet_name)
consolidated_df.to_excel(writer, index=False, sheet_name=sheet_name)
except Exception as e:
error(f"[E] Failed to create consolidated report! {sheet_name}:\n{e}")
error(f"Failed to create to consolidated report! {report_name} | {sheet_name} | {il_report_path} :\n{e}")
else:
debug(f"{save_path} already exisits.")
# Get the current worksheets
wb: pxl.Workbook = pxl.open(save_path, read_only=True)
current_sheets = wb.worksheets
wb.close()
with pd.ExcelWriter(save_path, engine='openpyxl', mode='a',if_sheet_exists="overlay") as writer:
if sheet_name in [w.title for w in current_sheets]:
debug(f"{sheet_name} sheet already exisits.")
# We need to read the dataframe in the current monthly report
# Check that we are not adding matching data
# Save the new report
current_data: DataFrame = pd.read_excel(month_summary_file[0], sheet_name=sheet_name)
new_data_len = len(dataframe_to_append)
cur_first_col = current_data.iloc[len(current_data)-new_data_len:,0].to_list()
new_first_col = dataframe_to_append.iloc[:,0].to_list()
if cur_first_col == new_first_col:
debug(f"Consolidated Report | Data is same as previous! Skipping!")
return None
# We need to find the start cols (where the new data should go)
# Check that we are not adding duplicate data
try:
with pd.ExcelWriter(save_path, engine='openpyxl', mode='a',if_sheet_exists="overlay") as writer:
# Merge the current data and drop duplicates
prev_data: DataFrame = pd.read_excel(save_path, sheet_name=sheet_name)
debug(f"Prev:\n{prev_data}")
consolidated_df = consolidated_df[~(consolidated_df["Hash"].isin(prev_data["Hash"]) )]
debug(f"New data:\n{consolidated_df}")
debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}")
dataframe_to_append.to_excel(writer, index=False, sheet_name=sheet_name,startrow=len(current_data),header=False)
consolidated_df.to_excel(writer, index=False, sheet_name=sheet_name, header=False,
startrow=prev_data.shape[0]+1)
except Exception as e:
error(f"[E] Failed to append to consolidated report! {sheet_name}:\n{e}")
error(f"Failed to append to consolidated report! {report_name} | {sheet_name} | {il_report_path} :\n{e}")
else:
consolidated_df.to_excel(writer, index=False, sheet_name=sheet_name)
def create_line_divider(breakage_list: list):

@ -1,3 +1,6 @@
"""
The user interface set up for the main window of the application
"""
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ILE_MainWindow.ui'

@ -0,0 +1,37 @@
# Work List
## Priority
- [ ] Bring back in consolidated reports
- [X] ACH
- [X] CHECKS_LIVE
- [X] CREDIT
- [X] LOCKBOX
- [X] PAY BY PHONE
- [X] WIRES
- [X] RETURNS ACH
- [X] RETURNS Portal *(new addition)*
- [X] Adjust pyinstaller spec for new file structure
- [ ] Function to recap year
- [ ] Fix Logging
## Feature Goals
- [ ] Year Walkthrough report
- [ ] 'In Progress' notification/spinner
- Speed up ACH/All
Generate monthly consolidated reports for each month in a year
- Must generate IL Extract report where nesseary
- [ ] Users can add/create new reports
- This would be very complicated
## Code Improvement
- [ ] Rework IL Report as an ABC and report types as sub-classes
- [ ] Rework config set up
- [ ] Simpify & standardize row/data parsing
---
Loading…
Cancel
Save