Compare commits

..

2 commits

Author SHA1 Message Date
ClaraCrazy
a2d3d54adf
[Feat]: Add full multi-instance support to db 2026-01-06 21:15:24 +01:00
ClaraCrazy
dc79d52245
[Chore]: Patch getLargestUid function
Now preventing uid-drift.
Also fixing the monstrocity that was statistics-store formatting
2026-01-06 19:46:45 +01:00
3 changed files with 112 additions and 68 deletions

View file

@ -31,6 +31,22 @@ class MailProcessingService extends EventEmitter {
setInterval(() => { setInterval(() => {
this._deleteOldMails() this._deleteOldMails()
}, this.config.imap.refreshIntervalSeconds * 1000) }, this.config.imap.refreshIntervalSeconds * 1000)
// Periodically ground largestUid to IMAP state every 5 minutes
setInterval(async() => {
try {
if (this.statisticsStore && this.imapService) {
const realLargestUid = await this.imapService.getLargestUid();
if (realLargestUid && realLargestUid !== this.statisticsStore.largestUid) {
this.statisticsStore.largestUid = realLargestUid;
this.statisticsStore._saveToDatabase && this.statisticsStore._saveToDatabase();
debug(`Grounded statisticsStore.largestUid to IMAP: ${realLargestUid}`);
}
}
} catch (err) {
debug('Error grounding largestUid to IMAP:', err.message);
}
}, 60 * 1000); // 1 minute
} }
_initCache() { _initCache() {

View file

@ -1,5 +1,6 @@
const debug = require('debug')('48hr-email:stats-store'); const debug = require('debug')('48hr-email:stats-store');
const config = require('../application/config'); const config = require('../application/config');
const crypto = require('crypto');
/** /**
* Statistics Store - Tracks email metrics and historical data * Statistics Store - Tracks email metrics and historical data
@ -20,12 +21,57 @@ class StatisticsStore {
this.enhancedStats = null; this.enhancedStats = null;
this.lastEnhancedStatsTime = 0; this.lastEnhancedStatsTime = 0;
this.enhancedStatsCacheDuration = 5 * 60 * 1000; // Cache for 5 minutes this.enhancedStatsCacheDuration = 5 * 60 * 1000; // Cache for 5 minutes
// Compute IMAP hash (user/server/port)
this.imapHash = this._computeImapHash();
if (this.db) { if (this.db) {
this._autoMigrateInstanceId();
this._autoMigrateImapHash();
this._loadFromDatabase(); this._loadFromDatabase();
} }
debug('Statistics store initialized'); debug('Statistics store initialized');
} }
_autoMigrateInstanceId() {
// Add and backfill instance_id for statistics table
try {
const pragma = this.db.prepare("PRAGMA table_info(statistics)").all();
const hasInstanceId = pragma.some(col => col.name === 'instance_id');
if (!hasInstanceId) {
this.db.prepare('ALTER TABLE statistics ADD COLUMN instance_id TEXT').run();
}
// Backfill all rows
this.db.prepare('UPDATE statistics SET instance_id = ? WHERE instance_id IS NULL OR instance_id = ""').run(this.imapHash);
debug('Auto-migrated: instance_id column added and backfilled');
} catch (e) {
debug('Auto-migration for instance_id failed:', e.message);
}
}
_autoMigrateImapHash() {
// Check if imap_hash column exists, add and backfill if missing
try {
const pragma = this.db.prepare("PRAGMA table_info(statistics)").all();
const hasImapHash = pragma.some(col => col.name === 'imap_hash');
if (!hasImapHash) {
this.db.prepare('ALTER TABLE statistics ADD COLUMN imap_hash TEXT NULL').run();
this.db.prepare('UPDATE statistics SET imap_hash = ?').run(this.imapHash);
debug('Auto-migrated: imap_hash column added and backfilled');
}
} catch (e) {
debug('Auto-migration for imap_hash failed:', e.message);
}
}
_computeImapHash() {
const user = config.imap.user || '';
const server = config.imap.server || '';
const port = config.imap.port || '';
const hash = crypto.createHash('sha256').update(`${user}:${server}:${port}`).digest('hex');
return hash;
}
_getPurgeCutoffMs() { _getPurgeCutoffMs() {
const time = config.email.purgeTime.time; const time = config.email.purgeTime.time;
const unit = config.email.purgeTime.unit; const unit = config.email.purgeTime.unit;
@ -48,8 +94,9 @@ class StatisticsStore {
_loadFromDatabase() { _loadFromDatabase() {
try { try {
const stmt = this.db.prepare('SELECT largest_uid, hourly_data, last_updated FROM statistics WHERE id = 1'); // Try to load row for current imap_hash
const row = stmt.get(); const stmt = this.db.prepare('SELECT largest_uid, hourly_data, last_updated FROM statistics WHERE imap_hash = ?');
const row = stmt.get(this.imapHash);
if (row) { if (row) {
this.largestUid = row.largest_uid || 0; this.largestUid = row.largest_uid || 0;
if (row.hourly_data) { if (row.hourly_data) {
@ -64,6 +111,13 @@ class StatisticsStore {
} }
} }
debug(`Loaded from database: largestUid=${this.largestUid}, hourlyData=${this.hourlyData.length} entries`); debug(`Loaded from database: largestUid=${this.largestUid}, hourlyData=${this.hourlyData.length} entries`);
} else {
// No row for this hash, insert new row
const insert = this.db.prepare('INSERT INTO statistics (imap_hash, largest_uid, hourly_data, last_updated) VALUES (?, ?, ?, ?)');
insert.run(this.imapHash, 0, JSON.stringify([]), Date.now());
this.largestUid = 0;
this.hourlyData = [];
debug('Created new statistics row for imap_hash');
} }
} catch (error) { } catch (error) {
debug('Failed to load statistics from database:', error.message); debug('Failed to load statistics from database:', error.message);
@ -76,10 +130,17 @@ class StatisticsStore {
const stmt = this.db.prepare(` const stmt = this.db.prepare(`
UPDATE statistics UPDATE statistics
SET largest_uid = ?, hourly_data = ?, last_updated = ? SET largest_uid = ?, hourly_data = ?, last_updated = ?
WHERE id = 1 WHERE imap_hash = ?
`); `);
stmt.run(this.largestUid, JSON.stringify(this.hourlyData), Date.now()); const result = stmt.run(this.largestUid, JSON.stringify(this.hourlyData), Date.now(), this.imapHash);
// If no row was updated, insert new row
if (result.changes === 0) {
const insert = this.db.prepare('INSERT INTO statistics (imap_hash, largest_uid, hourly_data, last_updated) VALUES (?, ?, ?, ?)');
insert.run(this.imapHash, this.largestUid, JSON.stringify(this.hourlyData), Date.now());
debug('Inserted new statistics row for imap_hash');
} else {
debug('Statistics saved to database'); debug('Statistics saved to database');
}
} catch (error) { } catch (error) {
debug('Failed to save statistics to database:', error.message); debug('Failed to save statistics to database:', error.message);
} }
@ -405,9 +466,7 @@ class StatisticsStore {
recordDelete() { recordDelete() {
this.currentCount = Math.max(0, this.currentCount - 1) this.currentCount = Math.max(0, this.currentCount - 1)
this._addDataPoint('delete') this._addDataPoint('delete')
debug(` debug(`Email deleted. Current: ${this.currentCount}`)
Email deleted.Current: $ { this.currentCount }
`)
} }
/** /**
@ -415,8 +474,7 @@ class StatisticsStore {
*/ */
recordForward() { recordForward() {
this._addDataPoint('forward') this._addDataPoint('forward')
debug(` debug(`Email forwarded. Current: ${this.currentCount}`)
Email forwarded `)
} }
/** /**
@ -470,17 +528,11 @@ class StatisticsStore {
const now = Date.now() const now = Date.now()
if (this.enhancedStats && (now - this.lastEnhancedStatsTime) < this.enhancedStatsCacheDuration) { if (this.enhancedStats && (now - this.lastEnhancedStatsTime) < this.enhancedStatsCacheDuration) {
debug(` debug(`Using cached enhanced stats(age: ${Math.round((now - this.lastEnhancedStatsTime) / 1000)}s)`)
Using cached enhanced stats(age: $ { Math.round((now - this.lastEnhancedStatsTime) / 1000) }
s)
`)
return return
} }
debug(` debug(`Calculating enhanced statistics from ${allMails.length} emails `)
Calculating enhanced statistics from $ { allMails.length }
emails `)
// Track sender domains (privacy-friendly: domain only, not full address) // Track sender domains (privacy-friendly: domain only, not full address)
const senderDomains = new Map() const senderDomains = new Map()
const recipientDomains = new Map() const recipientDomains = new Map()
@ -591,10 +643,7 @@ class StatisticsStore {
} }
this.lastEnhancedStatsTime = now this.lastEnhancedStatsTime = now
debug(` debug(`Enhanced stats calculated: ${this.enhancedStats.uniqueSenderDomains} unique sender domains, ${this.enhancedStats.busiestHours.length} busy hours `)
Enhanced stats calculated: $ { this.enhancedStats.uniqueSenderDomains }
unique sender domains, $ { this.enhancedStats.busiestHours.length }
busy hours `)
} }
/** /**
@ -610,18 +659,11 @@ class StatisticsStore {
// Check cache - if analysis was done recently, skip it // Check cache - if analysis was done recently, skip it
const now = Date.now() const now = Date.now()
if (this.historicalData && (now - this.lastAnalysisTime) < this.analysisCacheDuration) { if (this.historicalData && (now - this.lastAnalysisTime) < this.analysisCacheDuration) {
debug(` debug(`Using cached historical data(${this.historicalData.length} points, age: ${Math.round((now - this.lastAnalysisTime) / 1000)}s)`)
Using cached historical data($ { this.historicalData.length }
points, age: $ { Math.round((now - this.lastAnalysisTime) / 1000) }
s)
`)
return return
} }
debug(` debug(`Analyzing ${allMails.length} emails for historical statistics `)
Analyzing $ { allMails.length }
emails
for historical statistics `)
const startTime = Date.now() const startTime = Date.now()
// Group emails by minute // Group emails by minute
@ -651,10 +693,7 @@ class StatisticsStore {
this.lastAnalysisTime = now this.lastAnalysisTime = now
const elapsed = Date.now() - startTime const elapsed = Date.now() - startTime
debug(` debug(`Built historical data: ${this.historicalData.length} time buckets in ${elapsed} ms `)
Built historical data: $ { this.historicalData.length }
time buckets in $ { elapsed }
ms `)
} }
/** /**
@ -742,11 +781,7 @@ class StatisticsStore {
.map(([timestamp, receives]) => ({ timestamp, receives })) .map(([timestamp, receives]) => ({ timestamp, receives }))
.sort((a, b) => a.timestamp - b.timestamp) .sort((a, b) => a.timestamp - b.timestamp)
debug(` debug(`Historical timeline: ${intervalData.length} 15 - min interval points within ${config.email.purgeTime.time} ${config.email.purgeTime.unit} window `)
Historical timeline: $ { intervalData.length }
15 - min interval points within $ { config.email.purgeTime.time }
$ { config.email.purgeTime.unit }
window `)
return intervalData return intervalData
} }
@ -786,11 +821,7 @@ class StatisticsStore {
hourlyAverages.set(hour, avg) hourlyAverages.set(hour, avg)
}) })
debug(` debug(`Built hourly patterns for ${hourlyAverages.size} hours from ${this.historicalData.length} data points `)
Built hourly patterns
for $ { hourlyAverages.size }
hours from $ { this.historicalData.length }
data points `)
// Generate predictions for a reasonable future window // Generate predictions for a reasonable future window
// Limit to 20% of purge duration or 12 hours max to maintain chart balance // Limit to 20% of purge duration or 12 hours max to maintain chart balance
@ -826,9 +857,7 @@ class StatisticsStore {
}) })
} }
debug(` debug(`Generated ${predictions.length} prediction points based on hourly patterns `)
Generated $ { predictions.length }
prediction points based on hourly patterns `)
return predictions return predictions
} }
@ -881,12 +910,7 @@ class StatisticsStore {
if (beforeCount !== this.hourlyData.length) { if (beforeCount !== this.hourlyData.length) {
this._saveToDatabase() // Save after cleanup this._saveToDatabase() // Save after cleanup
debug(` debug(`Cleaned up ${beforeCount - this.hourlyData.length} old data points(keeping data for ${config.email.purgeTime.time} ${config.email.purgeTime.unit})`)
Cleaned up $ { beforeCount - this.hourlyData.length }
old data points(keeping data
for $ { config.email.purgeTime.time }
$ { config.email.purgeTime.unit })
`)
} }
this.lastCleanup = now this.lastCleanup = now

View file

@ -4,11 +4,13 @@
-- Users table -- Users table
CREATE TABLE IF NOT EXISTS users ( CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT UNIQUE NOT NULL COLLATE NOCASE, instance_id TEXT NOT NULL,
username TEXT NOT NULL COLLATE NOCASE,
password_hash TEXT NOT NULL, password_hash TEXT NOT NULL,
created_at INTEGER NOT NULL, created_at INTEGER NOT NULL,
last_login INTEGER, last_login INTEGER,
CHECK (length(username) >= 3 AND length(username) <= 20) CHECK (length(username) >= 3 AND length(username) <= 20),
UNIQUE(instance_id, username)
); );
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username); CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
@ -17,12 +19,13 @@ CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at);
-- User verified forwarding emails -- User verified forwarding emails
CREATE TABLE IF NOT EXISTS user_forward_emails ( CREATE TABLE IF NOT EXISTS user_forward_emails (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
instance_id TEXT NOT NULL,
user_id INTEGER NOT NULL, user_id INTEGER NOT NULL,
email TEXT NOT NULL COLLATE NOCASE, email TEXT NOT NULL COLLATE NOCASE,
verified_at INTEGER NOT NULL, verified_at INTEGER NOT NULL,
created_at INTEGER NOT NULL, created_at INTEGER NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
UNIQUE(user_id, email) UNIQUE(instance_id, user_id, email)
); );
CREATE INDEX IF NOT EXISTS idx_forward_emails_user_id ON user_forward_emails(user_id); CREATE INDEX IF NOT EXISTS idx_forward_emails_user_id ON user_forward_emails(user_id);
@ -31,13 +34,14 @@ CREATE INDEX IF NOT EXISTS idx_forward_emails_email ON user_forward_emails(email
-- User locked inboxes -- User locked inboxes
CREATE TABLE IF NOT EXISTS user_locked_inboxes ( CREATE TABLE IF NOT EXISTS user_locked_inboxes (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
instance_id TEXT NOT NULL,
user_id INTEGER NOT NULL, user_id INTEGER NOT NULL,
inbox_address TEXT NOT NULL COLLATE NOCASE, inbox_address TEXT NOT NULL COLLATE NOCASE,
password_hash TEXT NOT NULL, password_hash TEXT NOT NULL,
locked_at INTEGER NOT NULL, locked_at INTEGER NOT NULL,
last_accessed INTEGER NOT NULL, last_accessed INTEGER NOT NULL,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
UNIQUE(user_id, inbox_address) UNIQUE(instance_id, user_id, inbox_address)
); );
CREATE INDEX IF NOT EXISTS idx_locked_inboxes_user_id ON user_locked_inboxes(user_id); CREATE INDEX IF NOT EXISTS idx_locked_inboxes_user_id ON user_locked_inboxes(user_id);
@ -47,11 +51,13 @@ CREATE INDEX IF NOT EXISTS idx_locked_inboxes_last_accessed ON user_locked_inbox
-- API tokens (one per user for programmatic access) -- API tokens (one per user for programmatic access)
CREATE TABLE IF NOT EXISTS api_tokens ( CREATE TABLE IF NOT EXISTS api_tokens (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL UNIQUE, instance_id TEXT NOT NULL,
user_id INTEGER NOT NULL,
token TEXT NOT NULL UNIQUE, token TEXT NOT NULL UNIQUE,
created_at INTEGER NOT NULL, created_at INTEGER NOT NULL,
last_used INTEGER, last_used INTEGER,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
UNIQUE(instance_id, user_id)
); );
CREATE INDEX IF NOT EXISTS idx_api_tokens_token ON api_tokens(token); CREATE INDEX IF NOT EXISTS idx_api_tokens_token ON api_tokens(token);
@ -59,16 +65,14 @@ CREATE INDEX IF NOT EXISTS idx_api_tokens_user_id ON api_tokens(user_id);
-- Statistics storage for persistence across restarts -- Statistics storage for persistence across restarts
CREATE TABLE IF NOT EXISTS statistics ( CREATE TABLE IF NOT EXISTS statistics (
id INTEGER PRIMARY KEY CHECK (id = 1), -- Single row table id INTEGER PRIMARY KEY AUTOINCREMENT,
instance_id TEXT NOT NULL,
largest_uid INTEGER NOT NULL DEFAULT 0, largest_uid INTEGER NOT NULL DEFAULT 0,
hourly_data TEXT, -- JSON array of 24h rolling data hourly_data TEXT, -- JSON array of 24h rolling data
last_updated INTEGER NOT NULL last_updated INTEGER NOT NULL,
imap_hash TEXT NULL
); );
-- Initialize with default row if not exists
INSERT OR IGNORE INTO statistics (id, largest_uid, hourly_data, last_updated)
VALUES (1, 0, '[]', 0);
-- Trigger to enforce max 5 locked inboxes per user -- Trigger to enforce max 5 locked inboxes per user
CREATE TRIGGER IF NOT EXISTS check_locked_inbox_limit CREATE TRIGGER IF NOT EXISTS check_locked_inbox_limit
BEFORE INSERT ON user_locked_inboxes BEFORE INSERT ON user_locked_inboxes