- VERSION 5.10.27

- feat(arquitectura): Consolidación de estabilidad y diagnóstico.
- refactor: Arquitectura de base de datos local y políticas de logs.
- arch(sqlite): Aislamiento total de las conexiones SQLite en SQL_Auth y SQL_Logs. Esto protege las operaciones de autenticación críticas de la alta carga de I/O generada por el subsistema de logs.
- feat(logs): Implementación de modo de almacenamiento flexible para logs (disco o en memoria), mejorando la capacidad de testing.
- refactor(logs): Se estandariza el límite de retención de registros a 10,000 para todas las tablas de logs, y se renombra la subrutina de limpieza a borraArribaDe10000Logs.
This commit is contained in:
jaguerrau
2025-10-29 05:25:49 -06:00
parent 4c7639f867
commit 9c9e2975e9
12 changed files with 1390 additions and 1374 deletions

View File

@@ -30,26 +30,25 @@ Library7=jshell
Library8=json
Library9=jsql
Module1=Cambios
Module10=LoginHandler
Module11=LogoutHandler
Module12=Manager
Module13=ParameterValidationUtils
Module14=ping
Module15=RDCConnector
Module16=SSE
Module17=SSEHandler
Module18=TestHandler
Module10=LogoutHandler
Module11=Manager
Module12=ParameterValidationUtils
Module13=ping
Module14=RDCConnector
Module15=SSE
Module16=SSEHandler
Module17=TestHandler
Module2=ChangePassHandler
Module3=ConnectionPoolManager
Module4=DBHandlerB4X
Module5=DBHandlerJSON
Module6=DoLoginHandler
Module7=faviconHandler
Module8=GlobalParameters
Module9=HikariConnectionPool
Module3=DBHandlerB4X
Module4=DBHandlerJSON
Module5=DoLoginHandler
Module6=faviconHandler
Module7=GlobalParameters
Module8=HikariConnectionPool
Module9=LoginHandler
NumberOfFiles=9
NumberOfLibraries=9
NumberOfModules=18
NumberOfModules=17
Version=10.3
@EndOfDesignText@
'Non-UI application (console / server application)
@@ -95,51 +94,42 @@ Version=10.3
Sub Process_Globals
' The main B4J HTTP server object
Public srvr As Server
' The current version of this modified jRDC server
Public const VERSION As Float = 2.23
' Custom types for serializing/deserializing data
Type DBCommand (Name As String, Parameters() As Object)
Type DBResult (Tag As Object, Columns As Map, Rows As List)
' Holds a list of configured database identifiers (e.g., "DB1", "DB2")
Public listaDeCP As List
' A temporary list to store found configuration file names during startup
Private cpFiles As List
' Global maps to manage database connectors and loaded SQL commands
Public Connectors, commandsMap As Map
' SQL object for interacting with the local users and logs database (SQLite)
Public SQL1 As SQL
' SQL object for interacting with the local users database (Authentication)
Public SQL_Auth As SQL ' --- NEW INSTANCE FOR AUTHENTICATION
' SQL object for interacting with the local logs database (Performance/Errors)
Public SQL_Logs As SQL ' --- NEW INSTANCE FOR LOGS
' Defines the storage mode for the Log database (SQL_Logs). Default is DISK.
' Options: "DISK" (persistent) or "MEMORY" (in-memory, lost on exit).
Private const LOG_DB_MODE As String = "DISK"
' Object for securely hashing and verifying passwords
Private bc As BCrypt
' A Java ReentrantLock object to protect Main.Connectors during Hot-Swapping (thread-safety)
Public MainConnectorsLock As JavaObject
' A Java ReentrantLock object to protect the log caches (QueryLogCache and ErrorLogCache)
Public LogCacheLock As JavaObject
' Timer for executing periodic tasks, such as log cleanup
Public timerLogs As Timer
' Map to store the SQLite logging status (True/False) for each DBKey (DB1, DB2, etc.)
Public SQLiteLoggingStatusByDB As Map
' Global flag indicating if AT LEAST one database has SQLite logging enabled
Public IsAnySQLiteLoggingEnabled As Boolean
' Type to encapsulate the result of parameter validation
Type ParameterValidationResult ( _
Success As Boolean, _
ErrorMessage As String, _
ParamsToExecute As List _ ' The final list of parameters to use in the SQL execution
)
' In-memory cache for performance logs (query_logs)
Public QueryLogCache As List
' In-memory cache for error and warning logs
@@ -156,13 +146,11 @@ Sub Process_Globals
Private const VACUUM_CYCLES As Int = 48
' Granular control for TEXT file logging (CSV)
Public TextLoggingStatusByDB As Map
' Main object for managing all connection pools (RDCConnector instances)
Public ConnectionPoolManager1 As ConnectionPoolManager
End Sub
' --- Main application entry point ---
Sub AppStart (Args() As String)
' Initialize Server-Sent Events handler
SSE.Initialize
' Set logger flag based on build mode (DEBUG or RELEASE)
@@ -173,13 +161,13 @@ Sub AppStart (Args() As String)
#else
logger = False
#End If
Log("LOG_CACHE_THRESHOLD: " & LOG_CACHE_THRESHOLD)
Log("Log_Cache_Threshold: " & LOG_CACHE_THRESHOLD)
' Copy web admin panel files if they don't exist
CopiarRecursoSiNoExiste("manager.html", "www")
CopiarRecursoSiNoExiste("login.html", "www")
' Copy root files (configs, start/stop scripts) if they don't exist
CopiarRecursoSiNoExiste("config.properties", "")
' CopiarRecursoSiNoExiste("config.DB2.properties", "")
@@ -189,16 +177,16 @@ Sub AppStart (Args() As String)
CopiarRecursoSiNoExiste("stop.bat", "")
CopiarRecursoSiNoExiste("reiniciaProcesoBow.bat", "")
CopiarRecursoSiNoExiste("reiniciaProcesoPM2.bat", "")
' Initialize the BCrypt password hashing library
bc.Initialize("BC")
' Initialize in-memory log caches
QueryLogCache.Initialize
ErrorLogCache.Initialize
' === 1. Initialize the local user database (SQLite) and log tables ===
InitializeSQLiteDatabase
' === 2. Initialize global maps defined in GlobalParameters.bas ===
GlobalParameters.mpLogs.Initialize
GlobalParameters.mpTotalRequests.Initialize
@@ -228,11 +216,8 @@ Sub AppStart (Args() As String)
' Initialize the lock for log caches
LogCacheLock.InitializeNewInstance("java.util.concurrent.locks.ReentrantLock", Null)
' Initialize the Manager, which in turn initializes all pool wrappers.
ConnectionPoolManager1.Initialize
' === 4. INITIALIZATION OF THE MAIN DATABASE CONNECTOR (DB1) ===
Try
' Initialize the main 'DB1' connector
Dim con1 As RDCConnector
@@ -244,19 +229,19 @@ Sub AppStart (Args() As String)
' Add 'DB1' to the list of active database keys
listaDeCP.Add("DB1")
Log($"Main.AppStart: Connector 'DB1' initialized successfully on port: ${srvr.Port}"$)
' Read the 'enableSQLiteLogs' setting from config.properties (default to 0)
Dim enableLogsSetting As Int = con1.config.GetDefault("enableSQLiteLogs", 0).As(Int)
Dim isEnabled As Boolean = (enableLogsSetting = 1)
' Store the status in the granular map
SQLiteLoggingStatusByDB.Put("DB1", isEnabled)
' Read the 'enableTextLogging' setting
Dim enableTextLogsSetting As Int = con1.config.GetDefault("enableTextLogging", 0).As(Int)
Dim isTextEnabled As Boolean = (enableTextLogsSetting = 1)
' Store the text log status
TextLoggingStatusByDB.Put("DB1", isTextEnabled)
Catch
' This is a critical failure; the server cannot start without DB1
Dim ErrorMsg As String = $"Main.AppStart: CRITICAL ERROR initializing connector 'DB1': ${LastException.Message}"$
@@ -266,53 +251,53 @@ Sub AppStart (Args() As String)
' Stop the application
ExitApplication
End Try
' === 5. DETECTION AND INITIALIZATION OF ADDITIONAL DATABASES (DB2, DB3, DB4) ===
' Scan the application's root directory for configuration files
cpFiles = File.ListFiles("./")
If cpFiles.Size > 0 Then
For Each fileName As String In cpFiles
Dim keyPrefix As String = "config."
Dim keySuffix As String = ".properties"
' 1. Filter and exclude DB1 (which is already loaded)
' Find files matching "config.xxx.properties" but not "config.properties"
If fileName.StartsWith(keyPrefix) And fileName.EndsWith(keySuffix) And fileName <> "config.properties" Then
Try
' 2. Extract the key ("xxx" from config.xxx.properties)
Dim keyLength As Int = fileName.Length - keySuffix.Length
Dim dbKey As String = fileName.SubString2(keyPrefix.Length, keyLength)
' ROBUSTNESS: Ensure the key is UPPERCASE for consistency.
' Handlers normalize the key to uppercase, so we must match that.
dbKey = dbKey.ToUpperCase.Trim
Log($"Main.AppStart: Configuration file detected: '${fileName}'. Initializing connector '${dbKey}'."$)
Dim newCon As RDCConnector
' 3. Initialize the RDC Connector (which reads its own config.dbKey.properties file)
newCon.Initialize(dbKey)
' 4. Update global structures (Thread-Safe Maps)
Connectors.Put(dbKey, newCon)
listaDeCP.Add(dbKey)
' 5. Granular Logging Logic
' Capture the logging status for this new DB
Dim enableLogsSetting As Int = newCon.config.GetDefault("enableSQLiteLogs", 0).As(Int)
Dim isEnabled As Boolean = (enableLogsSetting = 1)
SQLiteLoggingStatusByDB.Put(dbKey, isEnabled)
' Capture text logging status for this new DB
Dim enableTextLogsSetting As Int = newCon.config.GetDefault("enableTextLogging", 0).As(Int)
Dim isTextEnabled As Boolean = (enableTextLogsSetting = 1)
TextLoggingStatusByDB.Put(dbKey, isTextEnabled)
Log("TEXT LOGGING STATUS BY DB: " & TextLoggingStatusByDB)
' Note: Global re-evaluation of IsAnySQLiteLoggingEnabled is done at the end of AppStart.
Catch
' 6. Error Handling: If a file is invalid (e.g., bad credentials, malformed URL),
' the server should log the error but continue trying with the next file.
@@ -323,7 +308,7 @@ Sub AppStart (Args() As String)
End If
Next
End If
' Final log of all databases the server is managing.
Dim sbListaDeCP_Log As StringBuilder
sbListaDeCP_Log.Initialize
@@ -336,10 +321,10 @@ Sub AppStart (Args() As String)
Log($"Main.AppStart: Configured and ready databases: [${sbListaDeCP_Log.ToString}]"$)
' <<<< Initialization block for the log cleanup Timer >>>>
' UNCONDITIONAL Initialization of the Timer (Ensures the object exists and prevents IllegalStateException)
timerLogs.Initialize("TimerLogs", 1800000) ' 30 minutes = 1800 * 1000 = 1800000 ms
' CONDITIONAL CONTROL BASED ON GRANULAR STATUS
IsAnySQLiteLoggingEnabled = False
For Each dbStatus As Boolean In SQLiteLoggingStatusByDB.Values
@@ -348,7 +333,7 @@ Sub AppStart (Args() As String)
Exit ' If one is active, it's enough to turn on the Timer
End If
Next
If IsAnySQLiteLoggingEnabled Then
timerLogs.Enabled = True
If logger Then Log("Main.AppStart: Log cleanup timer ACTIVATED (at least one DB requires logs).")
@@ -385,140 +370,75 @@ End Sub
' --- Subroutine to initialize the local user database (SQLite) ---
Sub InitializeSQLiteDatabase
Dim dbFileName As String = "users.db"
' Check if the database file already exists
If File.Exists(File.DirApp, dbFileName) = False Then
' --- Create a new database ---
Log("Creating new user database: " & dbFileName)
SQL1.InitializeSQLite(File.DirApp, dbFileName, True)
' Create 'users' table
Dim createUserTable As String = "CREATE TABLE users (username TEXT PRIMARY KEY, password_hash TEXT NOT NULL)"
SQL1.ExecNonQuery(createUserTable)
Private dbFileName As String = "users.db"
Private dbDirName As String = GlobalParameters.WorkingDirectory
' --- Configuration for SQL_Logs based on LOG_DB_MODE ---
Private logDirName As String = dbDirName
Private logFileName As String = dbFileName
Private isInMemoryMode As Boolean = (LOG_DB_MODE = "MEMORY")
If isInMemoryMode Then
' For in-memory databases, use the special filename ":memory:" and empty directory.
logDirName = ""
logFileName = ":memory:"
If logger Then Log("NOTICE: SQL_Logs initialized as IN-MEMORY database (data is non-persistent).")
Else
If logger Then Log($"NOTICE: SQL_Logs initialized as DISK database: ${dbFileName}"$)
End If
' Create 'query_logs' table
If logger Then Log("Creating 'query_logs' table with performance columns.")
Dim createQueryLogsTable As String = "CREATE TABLE query_logs (id INTEGER PRIMARY KEY AUTOINCREMENT, query_name TEXT, duration_ms INTEGER, timestamp INTEGER, db_key TEXT, client_ip TEXT, busy_connections INTEGER, handler_active_requests INTEGER)"
SQL1.ExecNonQuery(createQueryLogsTable)
' Initialize SQL_Auth (always points to the disk file for user persistence).
SQL_Auth.InitializeSQLite(dbDirName, dbFileName, True)
' Initialize SQL_Logs (points to disk file or :memory:)
SQL_Logs.InitializeSQLite(logDirName, logFileName, True)
' Set PRAGMA for better performance (Write-Ahead Logging)
SQL1.ExecNonQuery("PRAGMA journal_mode=WAL;")
SQL1.ExecNonQuery("PRAGMA synchronous=NORMAL;")
' Check if schema creation/migration is necessary.
' This is true if the disk file is brand new OR if we are running in memory mode.
Private isNewDbFile As Boolean = File.Exists(dbDirName, dbFileName) = False
' Insert default user
Dim defaultUser As String = "admin"
Dim defaultPass As String = "admin"
Dim hashedPass As String = bc.hashpw(defaultPass, bc.gensalt)
SQL1.ExecNonQuery2("INSERT INTO users (username, password_hash) VALUES (?, ?)", Array As Object(defaultUser, hashedPass))
Log($"Default user created -> user: ${defaultUser}, pass: ${defaultPass}"$)
' Create 'errores' (errors) table
Log("Creating 'errores' table for event logging.")
Dim createErrorsTable As String = "CREATE TABLE errores (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, type TEXT, source TEXT, message TEXT, db_key TEXT, command_name TEXT, client_ip TEXT)"
SQL1.ExecNonQuery(createErrorsTable)
If logger Then Log("Creating performance indexes on log tables.")
' Index on timestamp for fast cleanup (DELETE/ORDER BY) in query_logs
SQL1.ExecNonQuery("CREATE INDEX idx_query_timestamp ON query_logs(timestamp)")
' Index on duration_ms for the 'slowqueries' query (ORDER BY)
SQL1.ExecNonQuery("CREATE INDEX idx_query_duration ON query_logs(duration_ms)")
' Index on timestamp for fast cleanup of the errors table
SQL1.ExecNonQuery("CREATE INDEX idx_error_timestamp ON errores(timestamp)")
If isNewDbFile Or isInMemoryMode Then
If logger Then Log("Schema creation required (New DB file or In-Memory mode).")
' 1. TABLE CREATION (Done via SQL_Logs instance, as it handles the schema)
SQL_Logs.ExecNonQuery("CREATE TABLE users (username TEXT PRIMARY KEY, password_hash TEXT, last_login_timestamp INTEGER, is_admin INTEGER DEFAULT 0)")
SQL_Logs.ExecNonQuery("CREATE TABLE query_logs (query_name TEXT, duration_ms INTEGER, timestamp INTEGER, db_key TEXT, client_ip TEXT, busy_connections INTEGER, handler_active_requests INTEGER, timestamp_text_local TEXT)")
SQL_Logs.ExecNonQuery("CREATE TABLE errores (timestamp INTEGER, type TEXT, source TEXT, message TEXT, db_key TEXT, command_name TEXT, client_ip TEXT)")
' 2. INDEX CREATION (Done via SQL_Logs instance)
SQL_Logs.ExecNonQuery("CREATE INDEX idx_query_timestamp ON query_logs(timestamp)")
SQL_Logs.ExecNonQuery("CREATE INDEX idx_query_duration ON query_logs(duration_ms)")
SQL_Logs.ExecNonQuery("CREATE INDEX idx_query_dbkey ON query_logs(db_key)") ' --- NEW INDEX: CRITICAL FOR MULTI-DB REPORTS
SQL_Logs.ExecNonQuery("CREATE INDEX idx_error_timestamp ON errores(timestamp)")
' 3. PRAGMAS (Applied to both to ensure consistency in WAL mode)
SQL_Logs.ExecNonQuery("PRAGMA journal_mode=WAL")
SQL_Logs.ExecNonQuery("PRAGMA synchronous=NORMAL")
SQL_Auth.ExecNonQuery("PRAGMA journal_mode=WAL")
SQL_Auth.ExecNonQuery("PRAGMA synchronous=NORMAL")
Else
' --- Load existing database ---
SQL1.InitializeSQLite(File.DirApp, dbFileName, True)
Log("User database loaded.")
' Ensure WAL mode is set on existing DBs
SQL1.ExecNonQuery("PRAGMA journal_mode=WAL;")
SQL1.ExecNonQuery("PRAGMA synchronous=NORMAL;")
' Load existing database (DISK Mode)
If logger Then Log("Existing users.db found. Applying PRAGMAS and checking migrations.")
' >>> START: Migration logic (ALTER TABLE) if the DB already existed <<<
If logger Then Log("Verifying and migrating 'query_logs' table if necessary.")
' Check if 'query_logs' table exists
If SQL1.ExecQuerySingleResult("SELECT name FROM sqlite_master WHERE type='table' AND name='query_logs'") = Null Then
If logger Then Log("'query_logs' table not found, creating it with performance columns.")
Dim createQueryLogsTable As String = "CREATE TABLE query_logs (id INTEGER PRIMARY KEY AUTOINCREMENT, query_name TEXT, duration_ms INTEGER, timestamp INTEGER, db_key TEXT, client_ip TEXT, busy_connections INTEGER, handler_active_requests INTEGER)"
SQL1.ExecNonQuery(createQueryLogsTable)
Else
' If the query_logs table already exists, check and add missing columns
Dim columnExists As Boolean
Dim rs As ResultSet
' --- VERIFY AND ADD busy_connections ---
columnExists = False
rs = SQL1.ExecQuery("PRAGMA table_info(query_logs)")
Do While rs.NextRow
If rs.GetString("name").EqualsIgnoreCase("busy_connections") Then
columnExists = True
Exit ' Column already exists, exit loop
End If
Loop
rs.Close
If columnExists = False Then
If logger Then Log("Adding column 'busy_connections' to query_logs.")
SQL1.ExecNonQuery("ALTER TABLE query_logs ADD COLUMN busy_connections INTEGER DEFAULT 0")
End If
' --- VERIFY AND ADD handler_active_requests ---
columnExists = False
rs = SQL1.ExecQuery("PRAGMA table_info(query_logs)")
Do While rs.NextRow
If rs.GetString("name").EqualsIgnoreCase("handler_active_requests") Then
columnExists = True
Exit ' Column already exists, exit loop
End If
Loop
rs.Close
If columnExists = False Then
If logger Then Log("Adding column 'handler_active_requests' to query_logs.")
SQL1.ExecNonQuery("ALTER TABLE query_logs ADD COLUMN handler_active_requests INTEGER DEFAULT 0")
End If
' --- VERIFY AND ADD timestamp_text_local ---
columnExists = False
rs = SQL1.ExecQuery("PRAGMA table_info(query_logs)")
Do While rs.NextRow
If rs.GetString("name").EqualsIgnoreCase("timestamp_text_local") Then
columnExists = True
Exit ' Column already exists, exit loop
End If
Loop
rs.Close
If columnExists = False Then
If logger Then Log("Adding column 'timestamp_text_local' to query_logs.")
' Use 'TEXT' to store the formatted date/time string.
SQL1.ExecNonQuery("ALTER TABLE query_logs ADD COLUMN timestamp_text_local TEXT")
End If
' >>> START: Migration logic for 'errores' if DB already existed <<<
If logger Then Log("Verifying and migrating 'errores' table if necessary.")
If SQL1.ExecQuerySingleResult("SELECT name FROM sqlite_master WHERE type='table' AND name='errores'") = Null Then
If logger Then Log("'errores' table not found, creating it.")
Dim createErrorsTable As String = "CREATE TABLE errores (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, type TEXT, source TEXT, message TEXT, db_key TEXT, command_name TEXT, client_ip TEXT)"
SQL1.ExecNonQuery(createErrorsTable)
Else
If logger Then Log("'errores' table already exists.")
End If
' >>> END: Migration logic for 'errores' <<<
' Ensure PRAGMAS are set on both connections
SQL_Logs.ExecNonQuery("PRAGMA journal_mode=WAL")
SQL_Logs.ExecNonQuery("PRAGMA synchronous=NORMAL")
SQL_Auth.ExecNonQuery("PRAGMA journal_mode=WAL")
SQL_Auth.ExecNonQuery("PRAGMA synchronous=NORMAL")
' >>> Migration Logic (INDEX VERIFICATION) <<<
' Migration check must run on SQL_Logs
' --- VERIFY AND ADD idx_query_dbkey INDEX ---
If SQL_Logs.ExecQuerySingleResult($"SELECT name FROM sqlite_master WHERE type='index' AND name='idx_query_dbkey'"$) = Null Then
If logger Then Log("Adding index 'idx_query_dbkey' to query_logs.")
SQL_Logs.ExecNonQuery("CREATE INDEX idx_query_dbkey ON query_logs(db_key)")
End If
' >>> END: Migration logic (ALTER TABLE) <<<
' (Migration logic for other assumed columns/tables should use SQL_Logs)
End If
End Sub
@@ -606,61 +526,55 @@ Public Sub WriteQueryLogsBatch
logsToWrite.Initialize ' 1. Initialize the local list (CRITICAL)
' === STEP 1: Atomic Cache Swap (Protected by ReentrantLock) ===
LogCacheLock.RunMethod("lock", Null)
If QueryLogCache.Size = 0 Then
' Cache is empty, release lock and return
LogCacheLock.RunMethod("unlock", Null)
Return
End If
' *** CRITICAL FIX: Copy content (AddAll) instead of reference. ***
logsToWrite.AddAll(QueryLogCache)
Dim batchSize As Int = logsToWrite.Size
' Clear the global cache. logsToWrite now holds the copy of the items.
QueryLogCache.Initialize
LogCacheLock.RunMethod("unlock", Null)
' Check if text logging is enabled for any of these logs
If logsToWrite.Size > 0 Then
' Call the text archiving sub on a separate worker thread.
' This is NON-BLOCKING for the current thread, which will proceed to the SQLite transaction.
CallSubDelayed2(Me, "ArchiveQueryLogsToDailyFile", logsToWrite)
End If
' === STEP 2: Transactional Write to SQLite ===
' === STEP 2: Transactional Write to SQLite ===
Try
' 1. Begin the transaction: Everything that follows is a single disk operation.
SQL1.BeginTransaction
' 1. Begin the transaction on the dedicated LOGS instance.
SQL_Logs.BeginTransaction
For Each logEntry As Map In logsToWrite
' Insert the log entry
SQL1.ExecNonQuery2("INSERT INTO query_logs (query_name, duration_ms, timestamp, db_key, client_ip, busy_connections, handler_active_requests, timestamp_text_local) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", _
SQL_Logs.ExecNonQuery2("INSERT INTO query_logs (query_name, duration_ms, timestamp, db_key, client_ip, busy_connections, handler_active_requests, timestamp_text_local) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", _
Array As Object(logEntry.Get("query_name"), logEntry.Get("duration_ms"), logEntry.Get("timestamp"), logEntry.Get("db_key"), _
logEntry.Get("client_ip"), logEntry.Get("busy_connections"), logEntry.Get("handler_active_requests"), _
logEntry.Get("timestamp_text_local")))
logEntry.Get("client_ip"), logEntry.Get("busy_connections"), logEntry.Get("handler_active_requests"), _
logEntry.Get("timestamp_text_local")))
Next
' 2. Finalize the transaction: Efficient write to disk.
SQL1.TransactionSuccessful
SQL_Logs.TransactionSuccessful
If logger Then Log($"[LOG BATCH] Batch of ${batchSize} performance logs written successfully."$)
Catch
' If it fails, undo all logs in this batch and log the failure.
SQL1.Rollback
SQL_Logs.Rollback
Dim ErrorMsg As String = "CRITICAL ERROR: Failed to write performance log batch to SQLite: " & LastException.Message
Log(ErrorMsg)
' Use LogServerError so the failure is recorded in the 'errores' table (if logging is enabled)
LogServerError("ERROR", "Main.WriteQueryLogsBatch", ErrorMsg, Null, "log_batch_write_performance", Null)
End Try
End Sub
' --- Event subroutine for the 'timerLogs' Timer. ---
@@ -674,7 +588,7 @@ Sub TimerLogs_Tick
WriteErrorLogsBatch
' 3. Clean up and VACUUM (this sub also checks IsAnySQLiteLoggingEnabled)
borraArribaDe30000Logs
borraArribaDe10000Logs
Catch
Dim ErrorMsg As String = "ERROR in TimerLogs_Tick while trying to clear logs: " & LastException.Message
@@ -685,114 +599,115 @@ End Sub
' Writes the cached error logs to the SQLite DB in a single transaction
Public Sub WriteErrorLogsBatch
' List to store error logs copied from the cache
Dim logsToWrite As List
logsToWrite.Initialize
' === STEP 1: Atomic Cache Swap (Protected by ReentrantLock) ===
' Bloqueamos el LogCacheLock para garantizar la atomicidad de la copia y limpieza.
' Lock LogCacheLock to guarantee atomicity of copy and cleanup.
LogCacheLock.RunMethod("lock", Null)
If ErrorLogCache.Size = 0 Then
' La caché está vacía, liberamos el lock inmediatamente y salimos.
' Cache is empty, release the lock immediately and exit.
LogCacheLock.RunMethod("unlock", Null)
Return
End If
' *** Copiar el contenido de la caché global de forma atómica. ***
' *** Atomically copy global cache content. ***
logsToWrite.AddAll(ErrorLogCache)
' Usar el tamaño de la lista copiada para el procesamiento.
Dim batchSize As Int = logsToWrite.Size
' Log(logsToWrite)
' Limpiar la caché global. logsToWrite es ahora una lista independiente y poblada.
' Clean the global cache. logsToWrite is now an independent and populated list.
ErrorLogCache.Initialize
LogCacheLock.RunMethod("unlock", Null) ' Release the lock.
LogCacheLock.RunMethod("unlock", Null) ' Liberar el lock.
If logger Then Log($"[LOG BATCH] Starting transactional write of ${batchSize} ERROR logs to SQLite. Logs copied: ${batchSize}"$)
' === La corrección de Lógica ocurre aquí: La llamada a ArchiveErrorLogsToDailyFile
' y el proceso transaccional ocurren AHORA, después de asegurar que logsToWrite
' tiene contenido y que el lock fue liberado. ===
' 1. (Opcional, si el logging de texto CSV está habilitado)
' === STEP 1: Archive to daily CSV file (if enabled) ===
If batchSize > 0 Then
' Delegar a una nueva subrutina para manejar la I/O de disco CSV (CallSubDelayed2)
' Delegate to a new subroutine to handle CSV disk I/O (CallSubDelayed2)
CallSubDelayed2(Me, "ArchiveErrorLogsToDailyFile", logsToWrite)
End If
' === STEP 2: Escritura Transaccional a SQLite (Usa logsToWrite) ===
' === STEP 2: Transactional Write to SQLite (Uses logsToWrite) ===
If batchSize = 0 Then
' Este caso no debería ocurrir con la lógica anterior, pero es un chequeo de seguridad.
Log("WARNING: Failed to copy list. logsToWrite is empty. Aborting write.")
Return
End If
Try
' 1. Iniciar la transacción.
SQL1.BeginTransaction
' 1. Start transaction on the dedicated LOGS instance.
SQL_Logs.BeginTransaction
For Each logEntry As Map In logsToWrite
' Insertar la entrada de log
SQL1.ExecNonQuery2("INSERT INTO errores (timestamp, type, source, message, db_key, command_name, client_ip) VALUES (?, ?, ?, ?, ?, ?, ?)", _
Array As Object(logEntry.Get("timestamp"), logEntry.Get("type"), logEntry.Get("source"), logEntry.Get("message"), _
logEntry.Get("db_key"), logEntry.Get("command_name"), logEntry.Get("client_ip")))
' Insert the log entry
SQL_Logs.ExecNonQuery2("INSERT INTO errores (timestamp, type, source, message, db_key, command_name, client_ip) VALUES (?, ?, ?, ?, ?, ?, ?)", _
Array As Object(logEntry.Get("timestamp"), logEntry.Get("type"), logEntry.Get("source"), logEntry.Get("message"), _
logEntry.Get("db_key"), logEntry.Get("command_name"), logEntry.Get("client_ip")))
Next
' 2. Confirmar la transacción.
SQL1.TransactionSuccessful
' 2. Commit the transaction.
SQL_Logs.TransactionSuccessful
If logger Then Log($"[LOG BATCH] Batch of ${logsToWrite.Size} ERROR logs written successfully."$)
Catch
' 3. Rollback si falla.
SQL1.Rollback
' 3. Rollback if failed.
SQL_Logs.Rollback
Dim ErrorMsg As String = "CRITICAL ERROR: Failed to write ERROR log batch to SQLite: " & LastException.Message
Log(ErrorMsg)
End Try
End Sub
' Deletes the oldest records from 'query_logs' table and runs VACUUM.
Sub borraArribaDe30000Logs 'ignore
If IsAnySQLiteLoggingEnabled Then ' Only run if at least one DB requires logs.
' 1. Cleanup of Performance Logs (query_logs)
If logger Then Log("Trimming 'query_logs' table, limit of 30,000 records.")
Dim fechaCorte As Long ' (cutoff date/timestamp)
' First, try to find the timestamp of the 30,001st record.
Try ' OFFSET 30000 skips the 30,000 most recent.
fechaCorte = SQL1.ExecQuerySingleResult($"SELECT timestamp FROM query_logs ORDER BY timestamp DESC LIMIT 1 OFFSET 30000"$)
Catch ' If the table has fewer than 30,000 records, the result is NULL or throws an exception.
fechaCorte = 0 ' Force to 0 so it doesn't delete anything.
End Try
' If a cutoff time was found (i.e., there are more than 30,000 records)...
If fechaCorte > 0 Then ' Execute the simple DELETE, which is very fast using the idx_query_timestamp index.
SQL1.ExecNonQuery2("DELETE FROM query_logs WHERE timestamp < ?", Array As Object(fechaCorte))
End If
Sub borraArribaDe10000Logs 'ignore
Private Const LOG_LIMIT_PERFORMANCE As Int = 10000 ' New limit for performance logs
Private Const LOG_LIMIT_ERRORS As Int = 10000 ' Limit for error logs (retained 15,000)
If IsAnySQLiteLoggingEnabled Then ' Only run if at least one DB requires logs.
' 1. Cleanup of Performance Logs (query_logs)
If logger Then Log($"Trimming 'query_logs' table, limit of ${LOG_LIMIT_PERFORMANCE} records."$)
Dim fechaCorte As Long ' (cutoff date/timestamp)
' Find the timestamp of the (LOG_LIMIT_PERFORMANCE + 1)st record using SQL_Logs
Try ' OFFSET skips the most recent records.
fechaCorte = SQL_Logs.ExecQuerySingleResult($"SELECT timestamp FROM query_logs ORDER BY timestamp DESC LIMIT 1 OFFSET ${LOG_LIMIT_PERFORMANCE}"$)
Catch ' If the table has fewer records than the limit.
fechaCorte = 0
End Try
If fechaCorte > 0 Then ' Execute DELETE on SQL_Logs
SQL_Logs.ExecNonQuery2("DELETE FROM query_logs WHERE timestamp < ?", Array As Object(fechaCorte))
End If
' 2. Cleanup of Error Logs (errores)
Dim fechaCorteError As Long
Try ' OFFSET 15000 skips the 15,000 most recent.
fechaCorteError = SQL1.ExecQuerySingleResult($"SELECT timestamp FROM errores ORDER BY timestamp DESC LIMIT 1 OFFSET 15000"$)
Catch ' If the table has fewer than 15,000 records, result is NULL.
Try ' OFFSET LOG_LIMIT_ERRORS skips the most recent.
fechaCorteError = SQL_Logs.ExecQuerySingleResult($"SELECT timestamp FROM errores ORDER BY timestamp DESC LIMIT 1 OFFSET ${LOG_LIMIT_ERRORS}"$)
Catch ' If the table has fewer than 15,000 records.
fechaCorteError = 0
End Try
' If a cutoff time was found...
If fechaCorteError > 0 Then
SQL1.ExecNonQuery2("DELETE FROM errores WHERE timestamp < ?", Array As Object(fechaCorteError))
SQL_Logs.ExecNonQuery2("DELETE FROM errores WHERE timestamp < ?", Array As Object(fechaCorteError))
End If
' 3. Control and Conditional Execution of VACUUM
TimerTickCount = TimerTickCount + 1
If TimerTickCount >= VACUUM_CYCLES Then
If logger Then Log("EXECUTING VACUUM (24-hour cycle completed).")
SQL1.ExecNonQuery("vacuum;") ' Execute VACUUM.
SQL_Logs.ExecNonQuery("vacuum;") ' Execute VACUUM on SQL_Logs.
TimerTickCount = 0 ' Reset the counter.
Else
' Show how many cycles are left, only if logger is active.
If logger Then Log($"VACUUM skipped. ${VACUUM_CYCLES - TimerTickCount} cycles remaining until daily execution."$)
End If
Else
' If IsAnySQLiteLoggingEnabled is False, the Timer should not be active.
If logger Then Log("NOTICE: Log cleanup task skipped. Global SQLite logging is disabled.")
End If
End Sub