Compare commits

..

2 Commits

Author SHA1 Message Date
MSVstudios d1b5e180fa refactor: update docker image name and enhance error logging; add docker-compose configuration 2026-03-17 09:51:10 +01:00
MSVstudios 90fef9b186 pre trigger disable techninique 2026-03-17 08:31:18 +01:00
7 changed files with 121 additions and 6 deletions

View File

@ -0,0 +1,27 @@
name: Build Docker Image
on:
push:
branches: [ main ]
jobs:
build:
name: Build Docker image
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
push: false
load: true
tags: |
ach-server-import-media:latest

3
.gitignore vendored
View File

@ -9,6 +9,9 @@ logs/
*.log
output/
input/
doc/
docs/
TODO-mime.md
s3_cache.json

View File

@ -104,7 +104,7 @@ If you want to execute the importer manually inside the running container (for d
docker compose exec app python /app/main.py
# Or using docker exec with the container name
docker exec -it ACH_server_media_importer python /app/main.py
docker exec -it ACH_server_media_importer02 python /app/main.py
```
### Stop

27
docker-compose.harbor.yml Normal file
View File

@ -0,0 +1,27 @@
services:
app:
image: reg.neurareel.com/ach-server-import-media:latest
container_name: ACH_server_media_importer02
volumes:
- logs:/app/logs
env_file:
- .env
environment:
- AWS_ACCESS_KEY_I=${AWS_ACCESS_KEY_ID}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
- AWS_REGION=${AWS_REGION}
- AWS_ENDPOINT_URL=${AWS_ENDPOINT_URL}
- BUCKET_NAME=${BUCKET_NAME}
- DB_HOST=${DB_HOST}
- DB_NAME=${DB_NAME}
- DB_USER=${DB_USER}
- SMTP_SERVER=${SMTP_SERVER}
- SMTP_PORT=${SMTP_PORT}
- SMTP_USER=${SMTP_USER}
- SMTP_PASSWORD=${SMTP_PASSWORD}
- SENDER_EMAIL=${SENDER_EMAIL}
- EMAIL_RECIPIENTS=${EMAIL_RECIPIENTS}
restart: unless-stopped
volumes:
logs:

View File

@ -92,7 +92,7 @@ def send_error_email(subject, body, recipients):
server.login(smtp_user, smtp_password)
server.sendmail(sender_email, recipients, msg.as_string())
logging.error("Error email sent successfully")
logging.info("Error notification email sent successfully")
except Exception as e:
logging.error(f"Failed to send error email: {e}")

View File

@ -102,7 +102,8 @@ def main_process(aws_config, db_config, ach_config, bucket_name, ach_variables):
valid_extensions = {'.mp3', '.mp4', '.md5', '.json', '.pdf'} # dont like this
# excluded_folders = {'DOCUMENTAZIONE_FOTOGRAFICA/', 'TEST-FOLDER-DEV/', 'FILE/'}
# excluded_folders = {'DOCUMENTAZIONE_FOTOGRAFICA/', 'TEST-FOLDER-DEV/', 'TST/', 'FILE/', 'DVD/', 'UMT/'}
excluded_folders = {'DOCUMENTAZIONE_FOTOGRAFICA/', 'TEST-FOLDER-DEV/', 'TST/', 'FILE/' ,'MCC/'}
# excluded_folders = {'DOCUMENTAZIONE_FOTOGRAFICA/', 'TEST-FOLDER-DEV/', 'TST/', 'FILE/' ,'MCC/'}
excluded_folders = {'DOCUMENTAZIONE_FOTOGRAFICA/', 'TEST-FOLDER-DEV/', 'TST/' }
# excluded_folders = {'DOCUMENTAZIONE_FOTOGRAFICA/', 'TEST-FOLDER-DEV/', 'TST/',}
# included_folders = {'FILE/'} # uncomment this to NOT use excluded folders
# included_folders = {'TEST-FOLDER-DEV/'} # uncomment this to NOT use excluded folders

View File

@ -4,6 +4,7 @@ import logging # for logging
import json # for json.loads
import os # for os.path
import psycopg2 # for PostgreSQL
from psycopg2 import sql
# Import custom modules
from file_utils import retrieve_file_contents, check_related_files, extract_and_validate_file_info # for file operations
@ -47,9 +48,21 @@ def parse_s3_files( s3, s3_files, ach_variables, excluded_folders=[], s3_listing
raise ValueError("Database configuration is not loaded")
# return # Exit the function if db_config is None
conn = None
cur = None
conn = psycopg2.connect(**db_config)
cur = conn.cursor()
# Disable the trigger during bulk import to avoid repeated expensive materialized view maintenance.
# We'll re-enable it in the finally block below.
try:
cur.execute("ALTER TABLE file DISABLE TRIGGER trg_refresh_file_view;")
conn.commit()
logging.info("Disabled trigger trg_refresh_file_view for bulk import.")
except Exception as e:
logging.warning("Could not disable trigger trg_refresh_file_view: %s", e)
# Filter files with the desired prefix
# excluded_prefix = ['TEST-FOLDER-DEV/', 'DOCUMENTAZIONE_FOTOGRAFICA/', 'BTC/', 'VHS/', 'UMT/', 'OV2/', 'OA4/']
excluded_prefix = excluded_folders
@ -305,8 +318,6 @@ def parse_s3_files( s3, s3_files, ach_variables, excluded_folders=[], s3_listing
except Exception:
# Ignore release errors; rollback already cleaned up state if needed
pass
cur.close()
conn.close()
except ValueError as e:
# Handle specific validation errors
logging.error(f"Validation error: {e}")
@ -317,6 +328,52 @@ def parse_s3_files( s3, s3_files, ach_variables, excluded_folders=[], s3_listing
import traceback
notify_error("FATAL ERROR in Phase 3 process", e)
raise e # Raise the exception to the calling function
finally:
# Ensure trigger is re-enabled even if the loop fails or the transaction is aborted.
try:
if conn and getattr(conn, 'closed', 1) == 0:
# Clear any aborted transaction state so we can run the enable statement.
conn.rollback()
except Exception:
pass
try:
if conn and getattr(conn, 'closed', 1) == 0:
if cur is None or getattr(cur, 'closed', True):
cur = conn.cursor()
cur.execute("ALTER TABLE file ENABLE TRIGGER trg_refresh_file_view;")
conn.commit()
logging.info("Re-enabled trigger trg_refresh_file_view.")
except Exception as e:
logging.critical("Failed to re-enable trigger trg_refresh_file_view: %s", e)
try:
if conn and getattr(conn, 'closed', 1) == 0:
# Refresh a materialized view. Default to mv_last_file if not configured.
view_name = os.getenv('REFRESH_MATERIALIZED_VIEW_NAME')
if not view_name or not view_name.strip():
view_name = 'mv_last_file'
if cur is None or getattr(cur, 'closed', False):
cur = conn.cursor()
cur.execute(sql.SQL("REFRESH MATERIALIZED VIEW {view};").format(
view=sql.Identifier(view_name)
))
conn.commit()
logging.info("Refreshed materialized view: %s", view_name)
except Exception as e:
logging.critical("Failed to refresh materialized view: %s", e)
try:
if cur:
cur.close()
except Exception:
pass
try:
if conn:
conn.close()
except Exception:
pass
# return the file saved
return uploaded_files_count, warning_files_count, error_files_count