Compare commits

...

5 Commits

Author SHA1 Message Date
= 86afb277e5
Fixed work multi-processing & creates combined monthly 3 years ago
= 9e2d960f7e
Passing None as report save path cancels excel doc creation (for some 3 years ago
= 40c2a8a0df
Has issues... 3 years ago
= 5caaf3d7ac
version 4.0 staging | New header relational positioning class structure 3 years ago
= a3905d118e
Readded consolidated reports with Return reports as well. 3 years ago
  1. 5
      .gitignore
  2. 8
      IL Extract.spec
  3. 44
      assets/checkedCircle.svg
  4. 60
      assets/copy.svg
  5. 6
      assets/excel.svg
  6. BIN
      assets/extract.ico
  7. 1
      assets/extract.svg
  8. 4
      assets/fileSearch.svg
  9. 2
      assets/folder.svg
  10. 24
      assets/maximize.svg
  11. 1
      assets/process.svg
  12. 62
      assets/settings.svg
  13. 1
      report_config_termplate.toml
  14. BIN
      requirements.txt
  15. 1
      settings.json
  16. 0
      src/__init__.py
  17. 242
      src/back_reporting.py
  18. 12
      src/datasets/dataset_template.json
  19. 184
      src/extractors.py
  20. 30
      src/il_extract.py
  21. 54
      src/il_reports.py
  22. 1
      src/settings.json
  23. 3
      src/ui_ile_main_window.py
  24. 17
      todo.md

5
.gitignore vendored

@ -1,7 +1,7 @@
build/
venv/
dist/
IL_InputFiles/
InputFiles/
__pycache__/
*.spec
@ -10,4 +10,5 @@ __pycache__/
*.txt
*.md
!todo.md
!todo.md
!requirements.txt

@ -5,10 +5,10 @@ block_cipher = None
a = Analysis(
['main.py'],
pathex=[],
['src/il_extract.py'],
pathex=['src'],
binaries=[],
datas=[('assets/extract.svg', '.'), ('assets/process.svg', '.'), ('assets/folder.svg', '.'), ('assets/copy.svg', '.'), ('settings.json', '.')],
datas=[('src/assets/*', 'assets'), ('settings.json', '.')],
hiddenimports=[],
hookspath=[],
hooksconfig={},
@ -37,7 +37,7 @@ exe = EXE(
target_arch=None,
codesign_identity=None,
entitlements_file=None,
icon='assets\\extract.ico',
icon='src/assets/extract.ico',
)
coll = COLLECT(
exe,

@ -1,44 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 18.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 330 330" style="enable-background:new 0 0 330 330;" xml:space="preserve">
<g>
<path d="M165,0C74.019,0,0,74.019,0,165s74.019,165,165,165s165-74.019,165-165S255.981,0,165,0z M165,300
c-74.44,0-135-60.561-135-135S90.56,30,165,30s135,60.561,135,135S239.439,300,165,300z"/>
<path d="M226.872,106.664l-84.854,84.853l-38.89-38.891c-5.857-5.857-15.355-5.858-21.213-0.001
c-5.858,5.858-5.858,15.355,0,21.213l49.496,49.498c2.813,2.813,6.628,4.394,10.606,4.394c0.001,0,0,0,0.001,0
c3.978,0,7.793-1.581,10.606-4.393l95.461-95.459c5.858-5.858,5.858-15.355,0-21.213
C242.227,100.807,232.73,100.806,226.872,106.664z"/>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

@ -1,60 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 64 64" enable-background="new 0 0 64 64" xml:space="preserve">
<g id="Text-files">
<path d="M53.9791489,9.1429005H50.010849c-0.0826988,0-0.1562004,0.0283995-0.2331009,0.0469999V5.0228
C49.7777481,2.253,47.4731483,0,44.6398468,0h-34.422596C7.3839517,0,5.0793519,2.253,5.0793519,5.0228v46.8432999
c0,2.7697983,2.3045998,5.0228004,5.1378999,5.0228004h6.0367002v2.2678986C16.253952,61.8274002,18.4702511,64,21.1954517,64
h32.783699c2.7252007,0,4.9414978-2.1725998,4.9414978-4.8432007V13.9861002
C58.9206467,11.3155003,56.7043495,9.1429005,53.9791489,9.1429005z M7.1110516,51.8661003V5.0228
c0-1.6487999,1.3938999-2.9909999,3.1062002-2.9909999h34.422596c1.7123032,0,3.1062012,1.3422,3.1062012,2.9909999v46.8432999
c0,1.6487999-1.393898,2.9911003-3.1062012,2.9911003h-34.422596C8.5049515,54.8572006,7.1110516,53.5149002,7.1110516,51.8661003z
M56.8888474,59.1567993c0,1.550602-1.3055,2.8115005-2.9096985,2.8115005h-32.783699
c-1.6042004,0-2.9097996-1.2608986-2.9097996-2.8115005v-2.2678986h26.3541946
c2.8333015,0,5.1379013-2.2530022,5.1379013-5.0228004V11.1275997c0.0769005,0.0186005,0.1504021,0.0469999,0.2331009,0.0469999
h3.9682999c1.6041985,0,2.9096985,1.2609005,2.9096985,2.8115005V59.1567993z"/>
<path d="M38.6031494,13.2063999H16.253952c-0.5615005,0-1.0159006,0.4542999-1.0159006,1.0158005
c0,0.5615997,0.4544001,1.0158997,1.0159006,1.0158997h22.3491974c0.5615005,0,1.0158997-0.4542999,1.0158997-1.0158997
C39.6190491,13.6606998,39.16465,13.2063999,38.6031494,13.2063999z"/>
<path d="M38.6031494,21.3334007H16.253952c-0.5615005,0-1.0159006,0.4542999-1.0159006,1.0157986
c0,0.5615005,0.4544001,1.0159016,1.0159006,1.0159016h22.3491974c0.5615005,0,1.0158997-0.454401,1.0158997-1.0159016
C39.6190491,21.7877007,39.16465,21.3334007,38.6031494,21.3334007z"/>
<path d="M38.6031494,29.4603004H16.253952c-0.5615005,0-1.0159006,0.4543991-1.0159006,1.0158997
s0.4544001,1.0158997,1.0159006,1.0158997h22.3491974c0.5615005,0,1.0158997-0.4543991,1.0158997-1.0158997
S39.16465,29.4603004,38.6031494,29.4603004z"/>
<path d="M28.4444485,37.5872993H16.253952c-0.5615005,0-1.0159006,0.4543991-1.0159006,1.0158997
s0.4544001,1.0158997,1.0159006,1.0158997h12.1904964c0.5615025,0,1.0158005-0.4543991,1.0158005-1.0158997
S29.0059509,37.5872993,28.4444485,37.5872993z"/>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

@ -1,6 +0,0 @@
<svg width="24px" height="24px" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<g>
<path fill="none" d="M0 0h24v24H0z"/>
<path d="M2.859 2.877l12.57-1.795a.5.5 0 0 1 .571.495v20.846a.5.5 0 0 1-.57.495L2.858 21.123a1 1 0 0 1-.859-.99V3.867a1 1 0 0 1 .859-.99zM4 4.735v14.53l10 1.429V3.306L4 4.735zM17 19h3V5h-3V3h4a1 1 0 0 1 1 1v16a1 1 0 0 1-1 1h-4v-2zm-6.8-7l2.8 4h-2.4L9 13.714 7.4 16H5l2.8-4L5 8h2.4L9 10.286 10.6 8H13l-2.8 4z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 477 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.9 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 18 KiB

@ -1,4 +0,0 @@
<?xml version="1.0" standalone="no"?>
<svg width="1024px" height="1024px" viewBox="0 0 1024 1024" xmlns="http://www.w3.org/2000/svg" class="icon">
<path d="M688 312v-48c0-4.4-3.6-8-8-8H296c-4.4 0-8 3.6-8 8v48c0 4.4 3.6 8 8 8h384c4.4 0 8-3.6 8-8zm-392 88c-4.4 0-8 3.6-8 8v48c0 4.4 3.6 8 8 8h184c4.4 0 8-3.6 8-8v-48c0-4.4-3.6-8-8-8H296zm144 452H208V148h560v344c0 4.4 3.6 8 8 8h56c4.4 0 8-3.6 8-8V108c0-17.7-14.3-32-32-32H168c-17.7 0-32 14.3-32 32v784c0 17.7 14.3 32 32 32h272c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm445.7 51.5l-93.3-93.3C814.7 780.7 828 743.9 828 704c0-97.2-78.8-176-176-176s-176 78.8-176 176 78.8 176 176 176c35.8 0 69-10.7 96.8-29l94.7 94.7c1.6 1.6 3.6 2.3 5.6 2.3s4.1-.8 5.6-2.3l31-31a7.9 7.9 0 0 0 0-11.2zM652 816c-61.9 0-112-50.1-112-112s50.1-112 112-112 112 50.1 112 112-50.1 112-112 112z"/>
</svg>

Before

Width:  |  Height:  |  Size: 819 B

@ -1,2 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" id="Outline" viewBox="0 0 24 24" width="512" height="512"><path d="M19,3H12.472a1.019,1.019,0,0,1-.447-.1L8.869,1.316A3.014,3.014,0,0,0,7.528,1H5A5.006,5.006,0,0,0,0,6V18a5.006,5.006,0,0,0,5,5H19a5.006,5.006,0,0,0,5-5V8A5.006,5.006,0,0,0,19,3ZM5,3H7.528a1.019,1.019,0,0,1,.447.1l3.156,1.579A3.014,3.014,0,0,0,12.472,5H19a3,3,0,0,1,2.779,1.882L2,6.994V6A3,3,0,0,1,5,3ZM19,21H5a3,3,0,0,1-3-3V8.994l20-.113V18A3,3,0,0,1,19,21Z"/></svg>

Before

Width:  |  Height:  |  Size: 512 B

@ -1,24 +0,0 @@
<svg
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M3 3H9V5H6.46173L11.3047 9.84298L9.8905 11.2572L5 6.3667V9H3V3Z"
fill="currentColor"
/>
<path
d="M3 21H9V19H6.3764L11.3046 14.0718L9.89038 12.6576L5 17.548V15H3V21Z"
fill="currentColor"
/>
<path
d="M15 21H21V15H19V17.5244L14.1332 12.6576L12.719 14.0718L17.6472 19H15V21Z"
fill="currentColor"
/>
<path
d="M21 3H15V5H17.5619L12.7189 9.84301L14.1331 11.2572L19 6.39032V9H21V3Z"
fill="currentColor"
/>
</svg>

Before

Width:  |  Height:  |  Size: 568 B

@ -1 +0,0 @@
<svg width="16px" height="16px" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg" fill="currentColor"><path fill-rule="evenodd" clip-rule="evenodd" d="M1.5 2h13l.5.5V9h-1V6H2v7h7v1H1.5l-.5-.5v-11l.5-.5zM2 5h12V3H2v2zm5 7v-1.094a1.633 1.633 0 0 1-.469-.265l-.945.539-.5-.86.937-.547a1.57 1.57 0 0 1 0-.547l-.937-.546.5-.86.945.54c.151-.12.308-.209.469-.266V7h1v1.094a1.48 1.48 0 0 1 .469.265l.945-.539.5.86-.937.547a1.57 1.57 0 0 1 0 .546l.937.547-.5.86-.945-.54a1.807 1.807 0 0 1-.469.266V12H7zm-.25-2.5c0 .208.073.385.219.531a.723.723 0 0 0 .531.219.723.723 0 0 0 .531-.219.723.723 0 0 0 .219-.531.723.723 0 0 0-.219-.531.723.723 0 0 0-.531-.219.723.723 0 0 0-.531.219.723.723 0 0 0-.219.531zm5.334 5.5v-1.094a1.634 1.634 0 0 1-.469-.265l-.945.539-.5-.86.938-.547a1.572 1.572 0 0 1 0-.547l-.938-.546.5-.86.945.54c.151-.12.308-.209.47-.266V10h1v1.094a1.486 1.486 0 0 1 .468.265l.945-.539.5.86-.937.547a1.562 1.562 0 0 1 0 .546l.937.547-.5.86-.945-.54a1.806 1.806 0 0 1-.469.266V15h-1zm-.25-2.5c0 .208.073.385.219.531a.723.723 0 0 0 .531.219.723.723 0 0 0 .531-.219.723.723 0 0 0 .22-.531.723.723 0 0 0-.22-.531.723.723 0 0 0-.53-.219.723.723 0 0 0-.532.219.723.723 0 0 0-.219.531z"/></svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

@ -1,62 +0,0 @@
<?xml version="1.0" encoding="iso-8859-1"?>
<!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 478.703 478.703" style="enable-background:new 0 0 478.703 478.703;" xml:space="preserve">
<g>
<g>
<path d="M454.2,189.101l-33.6-5.7c-3.5-11.3-8-22.2-13.5-32.6l19.8-27.7c8.4-11.8,7.1-27.9-3.2-38.1l-29.8-29.8
c-5.6-5.6-13-8.7-20.9-8.7c-6.2,0-12.1,1.9-17.1,5.5l-27.8,19.8c-10.8-5.7-22.1-10.4-33.8-13.9l-5.6-33.2
c-2.4-14.3-14.7-24.7-29.2-24.7h-42.1c-14.5,0-26.8,10.4-29.2,24.7l-5.8,34c-11.2,3.5-22.1,8.1-32.5,13.7l-27.5-19.8
c-5-3.6-11-5.5-17.2-5.5c-7.9,0-15.4,3.1-20.9,8.7l-29.9,29.8c-10.2,10.2-11.6,26.3-3.2,38.1l20,28.1
c-5.5,10.5-9.9,21.4-13.3,32.7l-33.2,5.6c-14.3,2.4-24.7,14.7-24.7,29.2v42.1c0,14.5,10.4,26.8,24.7,29.2l34,5.8
c3.5,11.2,8.1,22.1,13.7,32.5l-19.7,27.4c-8.4,11.8-7.1,27.9,3.2,38.1l29.8,29.8c5.6,5.6,13,8.7,20.9,8.7c6.2,0,12.1-1.9,17.1-5.5
l28.1-20c10.1,5.3,20.7,9.6,31.6,13l5.6,33.6c2.4,14.3,14.7,24.7,29.2,24.7h42.2c14.5,0,26.8-10.4,29.2-24.7l5.7-33.6
c11.3-3.5,22.2-8,32.6-13.5l27.7,19.8c5,3.6,11,5.5,17.2,5.5l0,0c7.9,0,15.3-3.1,20.9-8.7l29.8-29.8c10.2-10.2,11.6-26.3,3.2-38.1
l-19.8-27.8c5.5-10.5,10.1-21.4,13.5-32.6l33.6-5.6c14.3-2.4,24.7-14.7,24.7-29.2v-42.1
C478.9,203.801,468.5,191.501,454.2,189.101z M451.9,260.401c0,1.3-0.9,2.4-2.2,2.6l-42,7c-5.3,0.9-9.5,4.8-10.8,9.9
c-3.8,14.7-9.6,28.8-17.4,41.9c-2.7,4.6-2.5,10.3,0.6,14.7l24.7,34.8c0.7,1,0.6,2.5-0.3,3.4l-29.8,29.8c-0.7,0.7-1.4,0.8-1.9,0.8
c-0.6,0-1.1-0.2-1.5-0.5l-34.7-24.7c-4.3-3.1-10.1-3.3-14.7-0.6c-13.1,7.8-27.2,13.6-41.9,17.4c-5.2,1.3-9.1,5.6-9.9,10.8l-7.1,42
c-0.2,1.3-1.3,2.2-2.6,2.2h-42.1c-1.3,0-2.4-0.9-2.6-2.2l-7-42c-0.9-5.3-4.8-9.5-9.9-10.8c-14.3-3.7-28.1-9.4-41-16.8
c-2.1-1.2-4.5-1.8-6.8-1.8c-2.7,0-5.5,0.8-7.8,2.5l-35,24.9c-0.5,0.3-1,0.5-1.5,0.5c-0.4,0-1.2-0.1-1.9-0.8l-29.8-29.8
c-0.9-0.9-1-2.3-0.3-3.4l24.6-34.5c3.1-4.4,3.3-10.2,0.6-14.8c-7.8-13-13.8-27.1-17.6-41.8c-1.4-5.1-5.6-9-10.8-9.9l-42.3-7.2
c-1.3-0.2-2.2-1.3-2.2-2.6v-42.1c0-1.3,0.9-2.4,2.2-2.6l41.7-7c5.3-0.9,9.6-4.8,10.9-10c3.7-14.7,9.4-28.9,17.1-42
c2.7-4.6,2.4-10.3-0.7-14.6l-24.9-35c-0.7-1-0.6-2.5,0.3-3.4l29.8-29.8c0.7-0.7,1.4-0.8,1.9-0.8c0.6,0,1.1,0.2,1.5,0.5l34.5,24.6
c4.4,3.1,10.2,3.3,14.8,0.6c13-7.8,27.1-13.8,41.8-17.6c5.1-1.4,9-5.6,9.9-10.8l7.2-42.3c0.2-1.3,1.3-2.2,2.6-2.2h42.1
c1.3,0,2.4,0.9,2.6,2.2l7,41.7c0.9,5.3,4.8,9.6,10,10.9c15.1,3.8,29.5,9.7,42.9,17.6c4.6,2.7,10.3,2.5,14.7-0.6l34.5-24.8
c0.5-0.3,1-0.5,1.5-0.5c0.4,0,1.2,0.1,1.9,0.8l29.8,29.8c0.9,0.9,1,2.3,0.3,3.4l-24.7,34.7c-3.1,4.3-3.3,10.1-0.6,14.7
c7.8,13.1,13.6,27.2,17.4,41.9c1.3,5.2,5.6,9.1,10.8,9.9l42,7.1c1.3,0.2,2.2,1.3,2.2,2.6v42.1H451.9z"/>
<path d="M239.4,136.001c-57,0-103.3,46.3-103.3,103.3s46.3,103.3,103.3,103.3s103.3-46.3,103.3-103.3S296.4,136.001,239.4,136.001
z M239.4,315.601c-42.1,0-76.3-34.2-76.3-76.3s34.2-76.3,76.3-76.3s76.3,34.2,76.3,76.3S281.5,315.601,239.4,315.601z"/>
</g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
<g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 3.1 KiB

@ -0,0 +1 @@
name = "Test Name"

Binary file not shown.

@ -0,0 +1 @@
{"debug": true, "consolidatedBasePath": ".", "defaultLocations": {"ach": "Z:/Business Solutions/Griff/Code/InfoLeaseExtract/InputFiles", "disp": "", "gl": "", "lb": "Z:/Business Solutions/Griff/Code/InfoLeaseExtract/InputFiles", "minv": "", "niv": "", "ren": "", "pymt": "Z:/Business Solutions/Griff/Code/InfoLeaseExtract/InputFiles", "uap": "", "pastdue": ""}}

@ -0,0 +1,242 @@
from pathlib import Path
import re
from re import Pattern
import pandas as pd
from pandas import DataFrame, ExcelWriter, read_excel
from datetime import datetime as dt, timedelta
import logging
import il_reports as ilr
from dataclasses import dataclass
from typing import Callable
from tqdm import tqdm
from multiprocessing import Pool, cpu_count
import os
TOP_PATH: Path = Path(r"\\leafnow.com\shared\Accounting\CASH APPS\2023")
class LevelFilter(object):
def __init__(self, level):
self.__level = level
def filter(self, logRecord):
return logRecord.levelno == self.__level
def create_logger(logger_name: str = __name__, ):
logger = logging.getLogger(logger_name)
log_folder = Path(r"\\leafnow.com\shared\Business Solutions\Griff\Code\InfoLeaseExtract\logs")
fail_handler = logging.FileHandler(Path(log_folder,"Fail_br.log"), 'w')
fail_handler.setLevel(logging.WARNING)
info_handler = logging.FileHandler(Path(log_folder,"Info_br.log"), 'w')
info_handler.setLevel(logging.INFO)
info_handler.addFilter(LevelFilter(logging.INFO))
debug_handler = logging.FileHandler(Path(log_folder,"Debug_br.log"), 'w')
debug_handler.setLevel(logging.DEBUG)
debug_handler.addFilter(LevelFilter(logging.DEBUG))
s_handler = logging.StreamHandler()
s_handler.setLevel(logging.INFO)
logger.addHandler(fail_handler)
logger.addHandler(info_handler)
logger.addHandler(debug_handler)
logger.addHandler(s_handler)
return logger
logger = create_logger()
@dataclass
class ExtractInstruction:
input_regex: Pattern
sheet_name: str
extract_method: Callable
@dataclass
class ReportFolder:
folder_name: Path
extraction_methods: list[ExtractInstruction]
def extract_date_path(path: Path) -> Path:
date_pattern = re.compile(r'^\d{4}\.\d{2}$')
for parent in path.parents:
if date_pattern.match(parent.name):
return parent
return None
def append_to_consolidated_report( report_path: Path, report_df: DataFrame, sheet_name: str):
"""
"""
report_month: Path = extract_date_path(report_path)
report_name: str = f"{str(report_month.name).replace('.','-')}_{sheet_name}_ConsolidatedReport.xlsx"
save_path = Path(r"\\leafnow.com\shared\Business Solutions\Griff\Code\InfoLeaseExtract\2023",report_name)
logger.debug(f"{save_path=}")
# Check if the current month has a consolidated report
if not save_path.exists():
logger.debug(f"Consolidated Report | No monthly summary file!\n\tCreating: {save_path}")
# No file exists yet
# Create it and add the current month
try:
with pd.ExcelWriter(save_path) as writer:
logger.debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}")
report_df.to_excel(writer, index=False, sheet_name=sheet_name)
except Exception as e:
logger.error(f"Failed to create to consolidated report! {report_name} | {sheet_name} | {report_path} :\n{e}")
else:
# We need to read the dataframe in the current monthly report
# Check that we are not adding matching data
# Save the new report
#FIXME: This is so hacky it's embaressing
try:
current_data_len = len(pd.read_excel(save_path,sheet_name=sheet_name))
with pd.ExcelWriter(save_path, engine='openpyxl', mode='a',if_sheet_exists="overlay") as writer:
logger.debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}")
report_df.to_excel(writer, index=False, sheet_name=sheet_name,startrow=current_data_len,header=False)
except Exception as e:
logger.error(f"Failed to append to consolidated report! {report_name} | {sheet_name} | {report_path} :\n{e}")
def process_report(file: Path, extract_inst: ExtractInstruction) -> bool:
try:
with open(str(file), errors="replace") as f:
report_str: str = f.read()
#logger.debug(f"{report_str}")
try:
df: DataFrame = extract_inst.extract_method(report_str, None)
if df.empty:
raise ValueError("Dataframe is empty!")
except Exception as e:
logger.warning(f"Failed to create report df: {extract_inst.sheet_name}:\n{e}")
return False
append_to_consolidated_report(file, df, extract_inst.sheet_name)
return True
except Exception as e:
logger.exception(f"could not process {file}:\n{e}")
return False
def process_folder(folder: ReportFolder):
# Search recurively through date directories
report_date: dt = dt(2023, 5, 1)
while report_date.date() < dt.now().date():
logger.info(f"{folder.folder_name} | Processing date: {report_date}")
report_folder: Path = Path(TOP_PATH,
report_date.strftime("%Y.%m"),
report_date.strftime("%Y.%m.%d"),
folder.folder_name
)
logger.debug(f"report_folder: {report_folder}")
if report_folder.exists():
for xi in folder.extraction_methods:
try:
files = report_folder.glob(f"*{xi.input_regex}*")
report_file: Path = next(files)
logger.debug(f"Report file: {report_file}")
except IndexError as ie:
logger.warning(f"No matching reports!: {ie}")
except Exception as e:
logger.debug(f"Could not get report_file: {report_folder.glob(f'*{xi.input_regex}*')} \n{e}")
continue
try:
success = process_report(report_file, xi)
if success:
logger.info(f"Report Processed: {report_file} | {xi.sheet_name}")
else:
logger.warning(f"Failed to process report: {report_file} | {xi.sheet_name}")
except Exception as e:
logger.exception(f"Could not process report ({report_file}) :\n{e}")
continue
else:
logger.debug(f"Folder '{report_folder}' does not exist!")
report_date = report_date + timedelta(days=1)
logger.debug(f"Finished scanning {folder.folder_name}!")
def combine():
WORK_DIR = Path(r"\\leafnow.com\shared\Business Solutions\Griff\Code\InfoLeaseExtract\2023")
REPORTS = [
"ACH",
"CHECKS LIVE",
"CREDIT CARDS",
"PAY BY PHONE",
"WIRE",
"RETURNS ACH",
"RETURNS PORTAL"
]
for i in range(1,6):
month = f"2023-0{i}"
mcr: Path = Path(f"{month} Consolidated Report.xlsx")
print(f"Creating monthly consolidated report: {mcr}")
with ExcelWriter(Path(WORK_DIR, "Monthly", mcr), engine="xlsxwriter") as wrtr:
for r in REPORTS:
report_path: Path = Path(WORK_DIR, f"{month}_{r}_ConsolidatedReport.xlsx")
print(f"Report Path ({r}): {report_path}")
rdf: DataFrame = read_excel(report_path, sheet_name=r)
rdf.to_excel(wrtr, sheet_name=r, freeze_panes=(1,0), index=False)
if __name__ == "__main__":
FOLDERS = [
ReportFolder("ACH", [
ExtractInstruction("_ACH_", "ACH", ilr.ach),
]),
ReportFolder("CHECKS LIVE", [
ExtractInstruction("_PROGPAY_BER", "CHECKS LIVE", ilr.payment_transactions)
]),
ReportFolder("CREDIT CARDS", [
ExtractInstruction("_VMCC_BER", "CREDIT CARDS", ilr.payment_transactions)
]),
ReportFolder("LOCKBOX", [
ExtractInstruction("_LOCKBOX_\d+_", "LOCKBOX", ilr.lockbox)
]),
ReportFolder("PAY BY PHONE", [
ExtractInstruction("_PBP_EPAY_DPS_BER", "PAY BY PHONE", ilr.payment_transactions)
]),
ReportFolder("RETURN REPORTING", [
ExtractInstruction("_PBP_EPAY_RETURNS_BER", "RETURNS ACH", ilr.payment_transactions),
ExtractInstruction("_RETURNS_BER", "RETURNS PORTAL", ilr.payment_transactions)]
),
ReportFolder("WIRES", [
ExtractInstruction("MTBWIRE_BER", "WIRE", ilr.payment_transactions)
]),
]
process_folder(FOLDERS[0])
# with Pool(cpu_count()) as pool:
# for folder in tqdm(pool.imap_unordered(process_folder,FOLDERS)):
# try:
# print(f"Completed!")
# except Exception as e:
# print(f"Failed to process\n {e}")
# continue
# for folder in tqdm(FOLDERS):
# try:
# process_folder(folder)
# print(f"Completed: {folder.folder_name}")
# except Exception as e:
# print(f"Failed to process {folder.folder_name} \n {e}")
# continue
# input("Complete!")
combine()

@ -0,0 +1,12 @@
{
"name": {
"report": "",
"excel": ""
},
"relative_position": {
"rows": 0,
"col": 0
},
"length": 0,
"data_type": "int"
}

@ -0,0 +1,184 @@
from typing import TypeAlias, TypeVar
from dataclasses import dataclass
from pathlib import Path
import pathlib as pl
from abc import ABC, abstractmethod, abstractproperty
from re import search, match, compile, Match, Pattern
from enum import Enum
ColumnIndex: TypeAlias = int
Money: TypeAlias = float
Numeric = TypeVar("Numeric", float, int)
class Line(Enum):
Header: str
Data: str
Erroneous: str
Top: str
Bottom: str
@dataclass
class RelativePosition:
"""
Coordinates for navigating from one point in a row to another
"""
rows: int
col: ColumnIndex
@dataclass
class DataValue:
position: RelativePosition
length : int
regex: Pattern
dtype: type
def correct_line(self, adj_lines_since_header: int) -> bool:
"""
"""
return adj_lines_since_header % self.position.rows == 0
def _line_slice(self, line: Line.Data) -> str|None:
"""
Attempts to get the data from the line.
Returns string in correct postion or None if out of range.
"""
try:
start: int = self.position.col
end: int = start + self.length
line_slice: str = line[start:end]
except IndexError:
#TODO: Add logging
line_slice = None
finally:
return line_slice
@staticmethod
def _to_float(number_str: str) -> float|None:
try:
f_value:float = float(number_str.replace(',',''))
return f_value
except:
return None
def extract(self, line: Line.Data) -> type|None:
"""
"""
line_slice: str|None = self._line_slice(line)
if isinstance(line_slice, None):
return None
value_match: Match|None = search(self.regex, line_slice)
if isinstance(value_match, None):
return None
value_str: str = value_match.group()
value_str.strip()
if self.dtype == int or self.dtype == float:
return self._to_float(value_str)
#TODO datetime
return value_str
class DataSet:
def __init__(self, config: dict) -> None:
self.r_name = config["naming"]["report"]
try:
self.e_name = config["naming"]["excel"]
except KeyError:
self.e_name = self.r_name
self.data_value: DataValue = DataValue(
position = RelativePosition(
rows= config["relative_position"]["rows"],
col= config["relative_position"]["col"]
),
length = config["length"],
dtype = config["data_type"],
)
def line_position(self, line: str) -> ColumnIndex|None:
"""
Searches a line for the report header for this dataset.
Returns:
- ColumnIndex(int) | None: The column index of the matches end position
or None if no match was found
"""
header_match: Match|None = search(self.r_name, line)
return header_match.end() if isinstance(header_match, Match) else None
@dataclass
class ReportConfig:
file_extension: str
name: str
datasets: list[DataSet]
data_line_regexes: list[Pattern]
class ILReport(ABC):
def __init__(self, file_path: Path, report_config: ReportConfig) -> None:
self.in_file_path: Path = file_path
self.line_gen = self._line_generator(file_path)
self.config: ReportConfig = report_config
self.name = report_config.name
self.line_type_history: list[Line] = []
self.last_header_line: int|None = None
self.data_dict: dict = {
header.e_name: []
for header in self.config.datasets
}
@staticmethod
def _line_generator(file_path: Path):
with open(file_path, 'r') as in_file:
line: str
for line in in_file.readlines():
yield line
def _add_line_history(self, line: Line, max_history: int = 10):
self.line_type_history.append(line)
while len(self.line_type_history) > max_history:
self.line_type_history.pop(0)
def _is_header_line(self, line: str) -> bool:
"""
Checks whether a report line has data headers.
"""
regex: Pattern
for regex in self.config.data_line_regexes:
if isinstance(search(regex,line), Match):
return True
return False
@abstractmethod
def _skip_line(self, line) -> bool:
"""
Tells whether we should skip this line
"""
@abstractmethod
def _process_line(self):
"""
"""
@abstractmethod
def _process_dataline(self, dataline: Line.Data):
"""
"""
# Search the row for a data set name, or list of data set names
# extract all the data until the next row
if __name__ == "__main__":
datasets = []

@ -11,10 +11,10 @@ from logging import debug, DEBUG, basicConfig
with open("settings.json") as s:
settings = json.loads(s.read())
if settings["debug"]:
basicConfig(filename='debug.log', encoding='utf-8', level=DEBUG)
#if settings["debug"]:
basicConfig(filename='debug.log', mode='w', encoding='utf-8', level=DEBUG)
debug("\n\n\n########################### VERSION = 3.10 ###########################\n\n\n")
debug("\n\n\n########################### VERSION = 3.2 ###########################\n\n\n")
debug("Running main.py...")
class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
"""
@ -290,14 +290,16 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
debug(f"report_type_change | inputFile: {self.inputFile}")
debug(f"report_type_change | outputFile: {self.outputFile}")
self.check_ready_to_process()
# Defines the app
app = QtWidgets.QApplication(sys.argv)
# Sets the style
app.setStyle("Fusion")
# Builds the main window
window = MainWindow()
window.setWindowTitle("IL Extract")
window.show()
# Starts the app
app.exec()
if __name__ == "__main__":
# Defines the app
app = QtWidgets.QApplication(sys.argv)
# Sets the style
app.setStyle("Fusion")
# Builds the main window
window = MainWindow()
window.setWindowTitle("IL Extract")
window.show()
# Starts the app
app.exec()

@ -8,7 +8,7 @@ import numpy as np
from glob import glob
from logging import debug, DEBUG, basicConfig, warn, error
# V3.1 | 01/19/23
# V3.2 | 04/21/23
with open("settings.json") as s:
settings = json.loads(s.read())
@ -70,10 +70,14 @@ class ILReport:
sheet_name = "CREDIT CARDS"
elif re.search("(?i)lockbox", self.location) != None:
sheet_name = "LOCKBOX"
elif re.search("(?i)PBP_EPAY_RETURNS_BER", self.location) != None:
sheet_name = "RETURNS ACH"
elif re.search("(?i)epay", self.location) != None:
sheet_name = "PAY BY PHONE"
elif re.search("(?i)wires", self.location) != None:
sheet_name = "WIRES"
elif re.search("(?i)RETURNS_BER", self.location) != None:
sheet_name = "RETURNS Portal"
else:
return None
@ -102,18 +106,25 @@ class ILReport:
# We need to read the dataframe in the current monthly report
# Check that we are not adding matching data
# Save the new report
current_data: DataFrame = pd.read_excel(month_summary_file[0], sheet_name=sheet_name)
new_data_len = len(dataframe_to_append)
cur_first_col = current_data.iloc[len(current_data)-new_data_len:,0].to_list()
new_first_col = dataframe_to_append.iloc[:,0].to_list()
if cur_first_col == new_first_col:
debug(f"Consolidated Report | Data is same as previous! Skipping!")
return None
#FIXME: This is so hacky it's embaressing
add_headers = False
try:
current_data: DataFrame = pd.read_excel(month_summary_file[0], sheet_name=sheet_name)
new_data_len = len(dataframe_to_append)
cur_first_col = current_data.iloc[len(current_data)-new_data_len:,0].to_list()
new_first_col = dataframe_to_append.iloc[:,0].to_list()
if cur_first_col == new_first_col:
debug(f"Consolidated Report | Data is same as previous! Skipping!")
return None
except ValueError as ve:
ve == ValueError(f"Worksheet named '{sheet_name} not found")
current_data = []
add_headers = True
# We need to find the start cols (where the new data should go)
try:
with pd.ExcelWriter(save_path, engine='openpyxl', mode='a',if_sheet_exists="overlay") as writer:
debug(f"Consolidated Report | {sheet_name}: Saving data as: {report_name}")
dataframe_to_append.to_excel(writer, index=False, sheet_name=sheet_name,startrow=len(current_data),header=False)
dataframe_to_append.to_excel(writer, index=False, sheet_name=sheet_name,startrow=len(current_data),header=add_headers)
except Exception as e:
error(f"[E] Failed to append to consolidated report! {sheet_name}:\n{e}")
@ -181,7 +192,7 @@ COMMON REGEX COMPONENTS
"""
def ach(report: str, save_name: str):
def ach(report: str, save_name: str|None):
debug(f"ACH Report {save_name} :\n{report}")
lines = report.splitlines()
extracted_data_dict = {
@ -235,11 +246,12 @@ def ach(report: str, save_name: str):
dataframe: DataFrame = DataFrame(extracted_data_dict)
# We're creating two sheets: data & summary so we need to open and excel writer
# This also helps with a bug caused by larger dataframes
with pd.ExcelWriter(save_name) as writer:
debug(f"ACH: Saving data as: {save_name}")
dataframe.to_excel(writer, index=False, sheet_name="data")
# The batches dictioanry is converted to a dataframe and added as it's own sheet
DataFrame(batches).to_excel(writer, index=False, sheet_name="Summary")
if save_name is not None:
with pd.ExcelWriter(save_name) as writer:
debug(f"ACH: Saving data as: {save_name}")
dataframe.to_excel(writer, index=False, sheet_name="data")
# The batches dictioanry is converted to a dataframe and added as it's own sheet
DataFrame(batches).to_excel(writer, index=False, sheet_name="Summary")
return dataframe
def disposition(report: str, save_name: str):
@ -614,7 +626,7 @@ def net_invest_trial_balance(report: str, save_name: str):
writer, index=True, sheet_name="Summary")
return dataframe
def lockbox(report: str, save_name: str):
def lockbox(report: str, save_name: str|None):
debug(f"LockBox Report {save_name}:\n{report}")
lines = report.splitlines()
extracted_data_dict = {
@ -667,7 +679,8 @@ def lockbox(report: str, save_name: str):
extracted_data_dict["CUST NAME"].append(lines[index+1].strip())
dataframe = DataFrame(extracted_data_dict)
debug(f"LockBox | Saving dataframe: {save_name}")
dataframe.to_excel(save_name, index=False)
if save_name is not None:
dataframe.to_excel(save_name, index=False)
return dataframe
@ -706,7 +719,7 @@ def minv(report: str, save_name: str):
# Good for PUB_WIRES, VMCC, PBP_EPAY, returned check
def payment_transactions(report: str, save_name: str):
def payment_transactions(report: str, save_name: str|None):
debug(f"PayTrans | {save_name}:\n{report}")
lines = report.splitlines()
data_extractor = create_line_divider([6,33,52,62,80,89,110,121])
@ -743,8 +756,9 @@ def payment_transactions(report: str, save_name: str):
extracted_data_dict['INV NO'].append(inv_no)
dataframe = DataFrame(extracted_data_dict)
debug(f"PayTrans | Complted Dataframe:\n{dataframe}")
dataframe.to_excel(save_name, index=False)
debug(f"PayTrans | Saved to {save_name}")
if save_name is not None:
dataframe.to_excel(save_name, index=False)
debug(f"PayTrans | Saved to {save_name}")
return dataframe

@ -1 +0,0 @@
{"debug": false, "consolidatedBasePath": "leafnow.com/shared/cashapps", "defaultLocations": {"ach": "", "disp": "", "gl": "", "lb": "", "minv": "", "niv": "", "ren": "", "pymt": "", "uap": "", "pastdue": ""}}

@ -1,3 +1,6 @@
"""
The user interface set up for the main window of the application
"""
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ILE_MainWindow.ui'

@ -2,19 +2,24 @@
## Priority
- [ ] Bring back in consolidated reports
- [ ] ACH
- [ ] CHECKS_LIVE
- [ ] CREDIT
- [ ] LOCKBOX
- [ ] PAY BY PHONE
- [ ] WIRES
- [X] ACH
- [X] CHECKS_LIVE
- [X] CREDIT
- [X] LOCKBOX
- [X] PAY BY PHONE
- [X] WIRES
- [ ] RETURNS ACH
- [ ] RETURNS Portal *(new addition)*
- [ ] Adjust pyinstaller spec for new file structure
- [ ] Function to recap year
- [ ] Fix Logging
## Feature Goals
- [ ] Year Walkthrough report
- [ ] 'In Progress' notification/spinner
- Speed up ACH/All
Generate monthly consolidated reports for each month in a year
- Must generate IL Extract report where nesseary

Loading…
Cancel
Save