mirror of
https://github.com/Crazyco-xyz/48hr.email.git
synced 2026-02-14 17:19:35 +01:00
Compare commits
No commits in common. "a2d3d54adf2113ade9b5e19170ae3a8db5af9b32" and "d8b19dcd2688eb93036aec4d9f31592d6f38d078" have entirely different histories.
a2d3d54adf
...
d8b19dcd26
3 changed files with 68 additions and 112 deletions
|
|
@ -31,22 +31,6 @@ class MailProcessingService extends EventEmitter {
|
|||
setInterval(() => {
|
||||
this._deleteOldMails()
|
||||
}, this.config.imap.refreshIntervalSeconds * 1000)
|
||||
|
||||
// Periodically ground largestUid to IMAP state every 5 minutes
|
||||
setInterval(async() => {
|
||||
try {
|
||||
if (this.statisticsStore && this.imapService) {
|
||||
const realLargestUid = await this.imapService.getLargestUid();
|
||||
if (realLargestUid && realLargestUid !== this.statisticsStore.largestUid) {
|
||||
this.statisticsStore.largestUid = realLargestUid;
|
||||
this.statisticsStore._saveToDatabase && this.statisticsStore._saveToDatabase();
|
||||
debug(`Grounded statisticsStore.largestUid to IMAP: ${realLargestUid}`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
debug('Error grounding largestUid to IMAP:', err.message);
|
||||
}
|
||||
}, 60 * 1000); // 1 minute
|
||||
}
|
||||
|
||||
_initCache() {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
const debug = require('debug')('48hr-email:stats-store');
|
||||
const config = require('../application/config');
|
||||
const crypto = require('crypto');
|
||||
|
||||
/**
|
||||
* Statistics Store - Tracks email metrics and historical data
|
||||
|
|
@ -21,57 +20,12 @@ class StatisticsStore {
|
|||
this.enhancedStats = null;
|
||||
this.lastEnhancedStatsTime = 0;
|
||||
this.enhancedStatsCacheDuration = 5 * 60 * 1000; // Cache for 5 minutes
|
||||
|
||||
// Compute IMAP hash (user/server/port)
|
||||
this.imapHash = this._computeImapHash();
|
||||
|
||||
if (this.db) {
|
||||
this._autoMigrateInstanceId();
|
||||
this._autoMigrateImapHash();
|
||||
this._loadFromDatabase();
|
||||
}
|
||||
debug('Statistics store initialized');
|
||||
}
|
||||
|
||||
_autoMigrateInstanceId() {
|
||||
// Add and backfill instance_id for statistics table
|
||||
try {
|
||||
const pragma = this.db.prepare("PRAGMA table_info(statistics)").all();
|
||||
const hasInstanceId = pragma.some(col => col.name === 'instance_id');
|
||||
if (!hasInstanceId) {
|
||||
this.db.prepare('ALTER TABLE statistics ADD COLUMN instance_id TEXT').run();
|
||||
}
|
||||
// Backfill all rows
|
||||
this.db.prepare('UPDATE statistics SET instance_id = ? WHERE instance_id IS NULL OR instance_id = ""').run(this.imapHash);
|
||||
debug('Auto-migrated: instance_id column added and backfilled');
|
||||
} catch (e) {
|
||||
debug('Auto-migration for instance_id failed:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
_autoMigrateImapHash() {
|
||||
// Check if imap_hash column exists, add and backfill if missing
|
||||
try {
|
||||
const pragma = this.db.prepare("PRAGMA table_info(statistics)").all();
|
||||
const hasImapHash = pragma.some(col => col.name === 'imap_hash');
|
||||
if (!hasImapHash) {
|
||||
this.db.prepare('ALTER TABLE statistics ADD COLUMN imap_hash TEXT NULL').run();
|
||||
this.db.prepare('UPDATE statistics SET imap_hash = ?').run(this.imapHash);
|
||||
debug('Auto-migrated: imap_hash column added and backfilled');
|
||||
}
|
||||
} catch (e) {
|
||||
debug('Auto-migration for imap_hash failed:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
_computeImapHash() {
|
||||
const user = config.imap.user || '';
|
||||
const server = config.imap.server || '';
|
||||
const port = config.imap.port || '';
|
||||
const hash = crypto.createHash('sha256').update(`${user}:${server}:${port}`).digest('hex');
|
||||
return hash;
|
||||
}
|
||||
|
||||
_getPurgeCutoffMs() {
|
||||
const time = config.email.purgeTime.time;
|
||||
const unit = config.email.purgeTime.unit;
|
||||
|
|
@ -94,9 +48,8 @@ class StatisticsStore {
|
|||
|
||||
_loadFromDatabase() {
|
||||
try {
|
||||
// Try to load row for current imap_hash
|
||||
const stmt = this.db.prepare('SELECT largest_uid, hourly_data, last_updated FROM statistics WHERE imap_hash = ?');
|
||||
const row = stmt.get(this.imapHash);
|
||||
const stmt = this.db.prepare('SELECT largest_uid, hourly_data, last_updated FROM statistics WHERE id = 1');
|
||||
const row = stmt.get();
|
||||
if (row) {
|
||||
this.largestUid = row.largest_uid || 0;
|
||||
if (row.hourly_data) {
|
||||
|
|
@ -111,13 +64,6 @@ class StatisticsStore {
|
|||
}
|
||||
}
|
||||
debug(`Loaded from database: largestUid=${this.largestUid}, hourlyData=${this.hourlyData.length} entries`);
|
||||
} else {
|
||||
// No row for this hash, insert new row
|
||||
const insert = this.db.prepare('INSERT INTO statistics (imap_hash, largest_uid, hourly_data, last_updated) VALUES (?, ?, ?, ?)');
|
||||
insert.run(this.imapHash, 0, JSON.stringify([]), Date.now());
|
||||
this.largestUid = 0;
|
||||
this.hourlyData = [];
|
||||
debug('Created new statistics row for imap_hash');
|
||||
}
|
||||
} catch (error) {
|
||||
debug('Failed to load statistics from database:', error.message);
|
||||
|
|
@ -130,17 +76,10 @@ class StatisticsStore {
|
|||
const stmt = this.db.prepare(`
|
||||
UPDATE statistics
|
||||
SET largest_uid = ?, hourly_data = ?, last_updated = ?
|
||||
WHERE imap_hash = ?
|
||||
WHERE id = 1
|
||||
`);
|
||||
const result = stmt.run(this.largestUid, JSON.stringify(this.hourlyData), Date.now(), this.imapHash);
|
||||
// If no row was updated, insert new row
|
||||
if (result.changes === 0) {
|
||||
const insert = this.db.prepare('INSERT INTO statistics (imap_hash, largest_uid, hourly_data, last_updated) VALUES (?, ?, ?, ?)');
|
||||
insert.run(this.imapHash, this.largestUid, JSON.stringify(this.hourlyData), Date.now());
|
||||
debug('Inserted new statistics row for imap_hash');
|
||||
} else {
|
||||
debug('Statistics saved to database');
|
||||
}
|
||||
stmt.run(this.largestUid, JSON.stringify(this.hourlyData), Date.now());
|
||||
debug('Statistics saved to database');
|
||||
} catch (error) {
|
||||
debug('Failed to save statistics to database:', error.message);
|
||||
}
|
||||
|
|
@ -466,7 +405,9 @@ class StatisticsStore {
|
|||
recordDelete() {
|
||||
this.currentCount = Math.max(0, this.currentCount - 1)
|
||||
this._addDataPoint('delete')
|
||||
debug(`Email deleted. Current: ${this.currentCount}`)
|
||||
debug(`
|
||||
Email deleted.Current: $ { this.currentCount }
|
||||
`)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -474,7 +415,8 @@ class StatisticsStore {
|
|||
*/
|
||||
recordForward() {
|
||||
this._addDataPoint('forward')
|
||||
debug(`Email forwarded. Current: ${this.currentCount}`)
|
||||
debug(`
|
||||
Email forwarded `)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -528,12 +470,18 @@ class StatisticsStore {
|
|||
|
||||
const now = Date.now()
|
||||
if (this.enhancedStats && (now - this.lastEnhancedStatsTime) < this.enhancedStatsCacheDuration) {
|
||||
debug(`Using cached enhanced stats(age: ${Math.round((now - this.lastEnhancedStatsTime) / 1000)}s)`)
|
||||
debug(`
|
||||
Using cached enhanced stats(age: $ { Math.round((now - this.lastEnhancedStatsTime) / 1000) }
|
||||
s)
|
||||
`)
|
||||
return
|
||||
}
|
||||
|
||||
debug(`Calculating enhanced statistics from ${allMails.length} emails `)
|
||||
// Track sender domains (privacy-friendly: domain only, not full address)
|
||||
debug(`
|
||||
Calculating enhanced statistics from $ { allMails.length }
|
||||
emails `)
|
||||
|
||||
// Track sender domains (privacy-friendly: domain only, not full address)
|
||||
const senderDomains = new Map()
|
||||
const recipientDomains = new Map()
|
||||
const hourlyActivity = Array(24).fill(0)
|
||||
|
|
@ -643,7 +591,10 @@ class StatisticsStore {
|
|||
}
|
||||
|
||||
this.lastEnhancedStatsTime = now
|
||||
debug(`Enhanced stats calculated: ${this.enhancedStats.uniqueSenderDomains} unique sender domains, ${this.enhancedStats.busiestHours.length} busy hours `)
|
||||
debug(`
|
||||
Enhanced stats calculated: $ { this.enhancedStats.uniqueSenderDomains }
|
||||
unique sender domains, $ { this.enhancedStats.busiestHours.length }
|
||||
busy hours `)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -659,11 +610,18 @@ class StatisticsStore {
|
|||
// Check cache - if analysis was done recently, skip it
|
||||
const now = Date.now()
|
||||
if (this.historicalData && (now - this.lastAnalysisTime) < this.analysisCacheDuration) {
|
||||
debug(`Using cached historical data(${this.historicalData.length} points, age: ${Math.round((now - this.lastAnalysisTime) / 1000)}s)`)
|
||||
debug(`
|
||||
Using cached historical data($ { this.historicalData.length }
|
||||
points, age: $ { Math.round((now - this.lastAnalysisTime) / 1000) }
|
||||
s)
|
||||
`)
|
||||
return
|
||||
}
|
||||
|
||||
debug(`Analyzing ${allMails.length} emails for historical statistics `)
|
||||
debug(`
|
||||
Analyzing $ { allMails.length }
|
||||
emails
|
||||
for historical statistics `)
|
||||
const startTime = Date.now()
|
||||
|
||||
// Group emails by minute
|
||||
|
|
@ -693,7 +651,10 @@ class StatisticsStore {
|
|||
this.lastAnalysisTime = now
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
debug(`Built historical data: ${this.historicalData.length} time buckets in ${elapsed} ms `)
|
||||
debug(`
|
||||
Built historical data: $ { this.historicalData.length }
|
||||
time buckets in $ { elapsed }
|
||||
ms `)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -781,7 +742,11 @@ class StatisticsStore {
|
|||
.map(([timestamp, receives]) => ({ timestamp, receives }))
|
||||
.sort((a, b) => a.timestamp - b.timestamp)
|
||||
|
||||
debug(`Historical timeline: ${intervalData.length} 15 - min interval points within ${config.email.purgeTime.time} ${config.email.purgeTime.unit} window `)
|
||||
debug(`
|
||||
Historical timeline: $ { intervalData.length }
|
||||
15 - min interval points within $ { config.email.purgeTime.time }
|
||||
$ { config.email.purgeTime.unit }
|
||||
window `)
|
||||
return intervalData
|
||||
}
|
||||
|
||||
|
|
@ -821,7 +786,11 @@ class StatisticsStore {
|
|||
hourlyAverages.set(hour, avg)
|
||||
})
|
||||
|
||||
debug(`Built hourly patterns for ${hourlyAverages.size} hours from ${this.historicalData.length} data points `)
|
||||
debug(`
|
||||
Built hourly patterns
|
||||
for $ { hourlyAverages.size }
|
||||
hours from $ { this.historicalData.length }
|
||||
data points `)
|
||||
|
||||
// Generate predictions for a reasonable future window
|
||||
// Limit to 20% of purge duration or 12 hours max to maintain chart balance
|
||||
|
|
@ -857,7 +826,9 @@ class StatisticsStore {
|
|||
})
|
||||
}
|
||||
|
||||
debug(`Generated ${predictions.length} prediction points based on hourly patterns `)
|
||||
debug(`
|
||||
Generated $ { predictions.length }
|
||||
prediction points based on hourly patterns `)
|
||||
return predictions
|
||||
}
|
||||
|
||||
|
|
@ -910,7 +881,12 @@ class StatisticsStore {
|
|||
|
||||
if (beforeCount !== this.hourlyData.length) {
|
||||
this._saveToDatabase() // Save after cleanup
|
||||
debug(`Cleaned up ${beforeCount - this.hourlyData.length} old data points(keeping data for ${config.email.purgeTime.time} ${config.email.purgeTime.unit})`)
|
||||
debug(`
|
||||
Cleaned up $ { beforeCount - this.hourlyData.length }
|
||||
old data points(keeping data
|
||||
for $ { config.email.purgeTime.time }
|
||||
$ { config.email.purgeTime.unit })
|
||||
`)
|
||||
}
|
||||
|
||||
this.lastCleanup = now
|
||||
|
|
@ -952,4 +928,4 @@ class StatisticsStore {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = StatisticsStore
|
||||
module.exports = StatisticsStore
|
||||
28
schema.sql
28
schema.sql
|
|
@ -4,13 +4,11 @@
|
|||
-- Users table
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
instance_id TEXT NOT NULL,
|
||||
username TEXT NOT NULL COLLATE NOCASE,
|
||||
username TEXT UNIQUE NOT NULL COLLATE NOCASE,
|
||||
password_hash TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
last_login INTEGER,
|
||||
CHECK (length(username) >= 3 AND length(username) <= 20),
|
||||
UNIQUE(instance_id, username)
|
||||
CHECK (length(username) >= 3 AND length(username) <= 20)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);
|
||||
|
|
@ -19,13 +17,12 @@ CREATE INDEX IF NOT EXISTS idx_users_created_at ON users(created_at);
|
|||
-- User verified forwarding emails
|
||||
CREATE TABLE IF NOT EXISTS user_forward_emails (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
instance_id TEXT NOT NULL,
|
||||
user_id INTEGER NOT NULL,
|
||||
email TEXT NOT NULL COLLATE NOCASE,
|
||||
verified_at INTEGER NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
UNIQUE(instance_id, user_id, email)
|
||||
UNIQUE(user_id, email)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_forward_emails_user_id ON user_forward_emails(user_id);
|
||||
|
|
@ -34,14 +31,13 @@ CREATE INDEX IF NOT EXISTS idx_forward_emails_email ON user_forward_emails(email
|
|||
-- User locked inboxes
|
||||
CREATE TABLE IF NOT EXISTS user_locked_inboxes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
instance_id TEXT NOT NULL,
|
||||
user_id INTEGER NOT NULL,
|
||||
inbox_address TEXT NOT NULL COLLATE NOCASE,
|
||||
password_hash TEXT NOT NULL,
|
||||
locked_at INTEGER NOT NULL,
|
||||
last_accessed INTEGER NOT NULL,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
UNIQUE(instance_id, user_id, inbox_address)
|
||||
UNIQUE(user_id, inbox_address)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_locked_inboxes_user_id ON user_locked_inboxes(user_id);
|
||||
|
|
@ -51,13 +47,11 @@ CREATE INDEX IF NOT EXISTS idx_locked_inboxes_last_accessed ON user_locked_inbox
|
|||
-- API tokens (one per user for programmatic access)
|
||||
CREATE TABLE IF NOT EXISTS api_tokens (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
instance_id TEXT NOT NULL,
|
||||
user_id INTEGER NOT NULL,
|
||||
user_id INTEGER NOT NULL UNIQUE,
|
||||
token TEXT NOT NULL UNIQUE,
|
||||
created_at INTEGER NOT NULL,
|
||||
last_used INTEGER,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
||||
UNIQUE(instance_id, user_id)
|
||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_api_tokens_token ON api_tokens(token);
|
||||
|
|
@ -65,14 +59,16 @@ CREATE INDEX IF NOT EXISTS idx_api_tokens_user_id ON api_tokens(user_id);
|
|||
|
||||
-- Statistics storage for persistence across restarts
|
||||
CREATE TABLE IF NOT EXISTS statistics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
instance_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY CHECK (id = 1), -- Single row table
|
||||
largest_uid INTEGER NOT NULL DEFAULT 0,
|
||||
hourly_data TEXT, -- JSON array of 24h rolling data
|
||||
last_updated INTEGER NOT NULL,
|
||||
imap_hash TEXT NULL
|
||||
last_updated INTEGER NOT NULL
|
||||
);
|
||||
|
||||
-- Initialize with default row if not exists
|
||||
INSERT OR IGNORE INTO statistics (id, largest_uid, hourly_data, last_updated)
|
||||
VALUES (1, 0, '[]', 0);
|
||||
|
||||
-- Trigger to enforce max 5 locked inboxes per user
|
||||
CREATE TRIGGER IF NOT EXISTS check_locked_inbox_limit
|
||||
BEFORE INSERT ON user_locked_inboxes
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue