diff --git a/.env.example b/.env.example index d78e661..7d3c9a7 100644 --- a/.env.example +++ b/.env.example @@ -17,7 +17,7 @@ IMAP_USER="user@example.com" # IMAP username IMAP_PASSWORD="password" # IMAP password IMAP_SERVER="imap.example.com" # IMAP server address IMAP_PORT=993 # IMAP port (default 993) -IMAP_TLS=true # Use secure TLS connection (true/false) +IMAP_SECURE=true # Use secure TLS connection (true/false) IMAP_AUTH_TIMEOUT=3000 # Authentication timeout in ms IMAP_REFRESH_INTERVAL_SECONDS=60 # Refresh interval for checking new emails IMAP_FETCH_CHUNK=200 # Number of UIDs per fetch chunk during initial load @@ -25,11 +25,11 @@ IMAP_CONCURRENCY=6 # Number of conc # --- SMTP CONFIGURATION (for email forwarding) --- SMTP_ENABLED=false # Enable SMTP forwarding functionality (default: false) -SMTP_HOST="smtp.example.com" # SMTP server address (e.g., smtp.gmail.com, smtp.sendgrid.net) -SMTP_PORT=465 # SMTP port (587 for TLS, 465 for SSL, 25 for unencrypted) -SMTP_SECURE=true # Use SSL (true for port 465, false for other ports) SMTP_USER="noreply@48hr.email" # SMTP authentication username (also used as from address) SMTP_PASSWORD="password" # SMTP authentication password +SMTP_SERVER="smtp.example.com" # SMTP server address (e.g., smtp.gmail.com, smtp.sendgrid.net) +SMTP_PORT=465 # SMTP port (587 for TLS, 465 for SSL, 25 for unencrypted) +SMTP_SECURE=true # Use SSL (true for port 465, false for other ports) # --- HTTP / WEB CONFIGURATION --- HTTP_PORT=3000 # Port @@ -41,6 +41,7 @@ HTTP_DISPLAY_SORT=2 # Domain display # 2 = alphabetical + first item shuffled, # 3 = shuffle all HTTP_HIDE_OTHER=false # true = only show first domain, false = show all +HTTP_STATISTICS_ENABLED=false # Enable statistics page at /stats (true/false) # --- USER AUTHENTICATION & INBOX LOCKING --- USER_AUTH_ENABLED=false # Enable user registration/login system (default: false) diff --git a/README.md b/README.md index 7827b60..72c0736 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,7 @@ All data is being removed 48hrs after they have reached the mail server. - View the raw email, showing all the headers etc. - Download Attachments with one click - Optional User Account System with email forwarding and inbox locking +- Optional Statistics System, tracking public data for as long as your mails stay - and more...
diff --git a/app.js b/app.js index 031a2da..0bbff3a 100644 --- a/app.js +++ b/app.js @@ -30,15 +30,13 @@ const verificationStore = new VerificationStore() debug('Verification store initialized') app.set('verificationStore', verificationStore) -const statisticsStore = new StatisticsStore() -debug('Statistics store initialized') -app.set('statisticsStore', statisticsStore) - // Set config in app for route access app.set('config', config) // Initialize user repository and auth service (if enabled) let inboxLock = null +let statisticsStore = null + if (config.user.authEnabled) { // Migrate legacy database files for backwards compatibility Helper.migrateDatabase(config.user.databasePath) @@ -47,6 +45,11 @@ if (config.user.authEnabled) { debug('User repository initialized') app.set('userRepository', userRepository) + // Initialize statistics store with database connection + statisticsStore = new StatisticsStore(userRepository.db) + debug('Statistics store initialized with database persistence') + app.set('statisticsStore', statisticsStore) + const authService = new AuthService(userRepository, config) debug('Auth service initialized') app.set('authService', authService) @@ -74,6 +77,11 @@ if (config.user.authEnabled) { console.log('User authentication system enabled') } else { + // No auth enabled - initialize statistics store without persistence + statisticsStore = new StatisticsStore() + debug('Statistics store initialized (in-memory only, no database)') + app.set('statisticsStore', statisticsStore) + app.set('userRepository', null) app.set('authService', null) app.set('inboxLock', null) diff --git a/app.json b/app.json index 02195d8..f90d7b1 100644 --- a/app.json +++ b/app.json @@ -44,7 +44,7 @@ "description": "Port of the server (usually 993)", "value": 993 }, - "IMAP_TLS": { + "IMAP_SECURE": { "description": "Use tls or not", "value": true }, @@ -81,4 +81,4 @@ "value": false } } -} \ No newline at end of file +} diff --git a/application/config.js b/application/config.js index d18b3e2..edf2a23 100644 --- a/application/config.js +++ b/application/config.js @@ -49,7 +49,7 @@ const config = { password: parseValue(process.env.IMAP_PASSWORD), host: parseValue(process.env.IMAP_SERVER), port: Number(process.env.IMAP_PORT), - tls: parseBool(process.env.IMAP_TLS), + secure: parseBool(process.env.IMAP_SECURE), authTimeout: Number(process.env.IMAP_AUTH_TIMEOUT), refreshIntervalSeconds: Number(process.env.IMAP_REFRESH_INTERVAL_SECONDS), fetchChunkSize: Number(process.env.IMAP_FETCH_CHUNK) || 100, @@ -58,11 +58,11 @@ const config = { smtp: { enabled: parseBool(process.env.SMTP_ENABLED) || false, - host: parseValue(process.env.SMTP_HOST), - port: Number(process.env.SMTP_PORT) || 465, - secure: parseBool(process.env.SMTP_SECURE) || true, user: parseValue(process.env.SMTP_USER), - password: parseValue(process.env.SMTP_PASSWORD) + password: parseValue(process.env.SMTP_PASSWORD), + server: parseValue(process.env.SMTP_SERVER), + port: Number(process.env.SMTP_PORT) || 465, + secure: parseBool(process.env.SMTP_SECURE) || true }, http: { @@ -70,7 +70,8 @@ const config = { baseUrl: parseValue(process.env.HTTP_BASE_URL) || 'http://localhost:3000', branding: parseValue(process.env.HTTP_BRANDING), displaySort: Number(process.env.HTTP_DISPLAY_SORT), - hideOther: parseBool(process.env.HTTP_HIDE_OTHER) + hideOther: parseBool(process.env.HTTP_HIDE_OTHER), + statisticsEnabled: parseBool(process.env.HTTP_STATISTICS_ENABLED) || false }, user: { diff --git a/application/helper.js b/application/helper.js index a91cc96..605ed36 100644 --- a/application/helper.js +++ b/application/helper.js @@ -106,6 +106,25 @@ class Helper { return footer } + /** + * Build a mail count html element with tooltip for the footer + * @param {number} count - Current mail count + * @returns {String} + */ + mailCountBuilder(count) { + const imapService = require('./imap-service') + const largestUid = imapService.getLargestUid ? imapService.getLargestUid() : null + let tooltip = '' + + if (largestUid && largestUid > 0) { + tooltip = `All-time total: ${largestUid} emails` + } + + return `` + } + /** * Shuffle an array using the Durstenfeld shuffle algorithm * @param {Array} array diff --git a/application/smtp-service.js b/application/smtp-service.js index f84aed0..2ff4d6f 100644 --- a/application/smtp-service.js +++ b/application/smtp-service.js @@ -25,7 +25,7 @@ class SmtpService { _isConfigured() { return !!( this.config.smtp.enabled && - this.config.smtp.host && + this.config.smtp.server && this.config.smtp.user && this.config.smtp.password ) @@ -38,7 +38,7 @@ class SmtpService { _initializeTransporter() { try { this.transporter = nodemailer.createTransport({ - host: this.config.smtp.host, + host: this.config.smtp.server, port: this.config.smtp.port, secure: this.config.smtp.secure, auth: { @@ -52,7 +52,7 @@ class SmtpService { } }) - debug(`SMTP transporter initialized: ${this.config.smtp.host}:${this.config.smtp.port}`) + debug(`SMTP transporter initialized: ${this.config.smtp.server}:${this.config.smtp.port}`) } catch (error) { debug('Failed to initialize SMTP transporter:', error.message) throw new Error(`SMTP initialization failed: ${error.message}`) diff --git a/domain/statistics-store.js b/domain/statistics-store.js index 753fde6..05cd9b0 100644 --- a/domain/statistics-store.js +++ b/domain/statistics-store.js @@ -1,11 +1,15 @@ const debug = require('debug')('48hr-email:stats-store') +const config = require('../application/config') /** * Statistics Store - Tracks email metrics and historical data * Stores 24-hour rolling statistics for receives, deletes, and forwards + * Persists data to database for survival across restarts */ class StatisticsStore { - constructor() { + constructor(db = null) { + this.db = db + // Current totals this.currentCount = 0 this.largestUid = 0 @@ -17,9 +21,99 @@ class StatisticsStore { // Track last cleanup to avoid too frequent operations this.lastCleanup = Date.now() + // Historical data caching to prevent repeated analysis + this.historicalData = null + this.lastAnalysisTime = 0 + this.analysisCacheDuration = 5 * 60 * 1000 // Cache for 5 minutes + + // Load persisted data if database is available + if (this.db) { + this._loadFromDatabase() + } + debug('Statistics store initialized') } + /** + * Get cutoff time based on email purge configuration + * @returns {number} Timestamp in milliseconds + * @private + */ + _getPurgeCutoffMs() { + const time = config.email.purgeTime.time + const unit = config.email.purgeTime.unit + + let cutoffMs = 0 + switch (unit) { + case 'minutes': + cutoffMs = time * 60 * 1000 + break + case 'hours': + cutoffMs = time * 60 * 60 * 1000 + break + case 'days': + cutoffMs = time * 24 * 60 * 60 * 1000 + break + default: + cutoffMs = 48 * 60 * 60 * 1000 // Fallback to 48 hours + } + + return cutoffMs + } + + /** + * Load statistics from database + * @private + */ + _loadFromDatabase() { + try { + const stmt = this.db.prepare('SELECT largest_uid, hourly_data, last_updated FROM statistics WHERE id = 1') + const row = stmt.get() + + if (row) { + this.largestUid = row.largest_uid || 0 + + // Parse hourly data + if (row.hourly_data) { + try { + const parsed = JSON.parse(row.hourly_data) + // Filter out stale data based on config purge time + const cutoff = Date.now() - this._getPurgeCutoffMs() + this.hourlyData = parsed.filter(entry => entry.timestamp >= cutoff) + debug(`Loaded ${this.hourlyData.length} hourly data points from database (cutoff: ${new Date(cutoff).toISOString()})`) + } catch (e) { + debug('Failed to parse hourly data:', e.message) + this.hourlyData = [] + } + } + + debug(`Loaded from database: largestUid=${this.largestUid}, hourlyData=${this.hourlyData.length} entries`) + } + } catch (error) { + debug('Failed to load statistics from database:', error.message) + } + } + + /** + * Save statistics to database + * @private + */ + _saveToDatabase() { + if (!this.db) return + + try { + const stmt = this.db.prepare(` + UPDATE statistics + SET largest_uid = ?, hourly_data = ?, last_updated = ? + WHERE id = 1 + `) + stmt.run(this.largestUid, JSON.stringify(this.hourlyData), Date.now()) + debug('Statistics saved to database') + } catch (error) { + debug('Failed to save statistics to database:', error.message) + } + } + /** * Initialize with current email count * @param {number} count - Current email count @@ -36,6 +130,7 @@ class StatisticsStore { updateLargestUid(uid) { if (uid >= 0 && uid > this.largestUid) { this.largestUid = uid + this._saveToDatabase() debug(`Largest UID updated to ${uid}`) } } @@ -103,6 +198,216 @@ class StatisticsStore { } } + /** + * Analyze all existing emails to build historical statistics + * @param {Array} allMails - Array of all mail summaries with date property + */ + analyzeHistoricalData(allMails) { + if (!allMails || allMails.length === 0) { + debug('No historical data to analyze') + return + } + + // Check cache - if analysis was done recently, skip it + const now = Date.now() + if (this.historicalData && (now - this.lastAnalysisTime) < this.analysisCacheDuration) { + debug(`Using cached historical data (${this.historicalData.length} points, age: ${Math.round((now - this.lastAnalysisTime) / 1000)}s)`) + return + } + + debug(`Analyzing ${allMails.length} emails for historical statistics`) + const startTime = Date.now() + + // Group emails by minute + const histogram = new Map() + + allMails.forEach(mail => { + try { + const date = new Date(mail.date) + if (isNaN(date.getTime())) return + + const minute = Math.floor(date.getTime() / 60000) * 60000 + + if (!histogram.has(minute)) { + histogram.set(minute, 0) + } + histogram.set(minute, histogram.get(minute) + 1) + } catch (e) { + // Skip invalid dates + } + }) + + // Convert to array and sort by timestamp + this.historicalData = Array.from(histogram.entries()) + .map(([timestamp, count]) => ({ timestamp, receives: count })) + .sort((a, b) => a.timestamp - b.timestamp) + + this.lastAnalysisTime = now + + const elapsed = Date.now() - startTime + debug(`Built historical data: ${this.historicalData.length} time buckets in ${elapsed}ms`) + } + + /** + * Get enhanced statistics with historical data and predictions + * @returns {Object} Enhanced stats with historical timeline and predictions + */ + getEnhancedStats() { + this._cleanup() + + const last24h = this._getLast24Hours() + const timeline = this._getTimeline() + const historicalTimeline = this._getHistoricalTimeline() + const prediction = this._generatePrediction() + + // Calculate historical receives from purge time window + const cutoff = Date.now() - this._getPurgeCutoffMs() + const historicalReceives = historicalTimeline + .filter(point => point.timestamp >= cutoff) + .reduce((sum, point) => sum + point.receives, 0) + + return { + currentCount: this.currentCount, + allTimeTotal: this.largestUid, + last24Hours: { + receives: last24h.receives + historicalReceives, + deletes: last24h.deletes, + forwards: last24h.forwards, + timeline: timeline + }, + historical: historicalTimeline, + prediction: prediction + } + } + + /** + * Get lightweight statistics without historical analysis (for API updates) + * @returns {Object} Stats with only realtime data + */ + getLightweightStats() { + this._cleanup() + + const last24h = this._getLast24Hours() + const timeline = this._getTimeline() + + return { + currentCount: this.currentCount, + allTimeTotal: this.largestUid, + last24Hours: { + receives: last24h.receives, + deletes: last24h.deletes, + forwards: last24h.forwards, + timeline: timeline + } + } + } + + /** + * Get historical timeline for visualization + * Shows data for the configured purge duration, aggregated by hour + * @returns {Array} Historical data points + * @private + */ + _getHistoricalTimeline() { + if (!this.historicalData || this.historicalData.length === 0) { + return [] + } + + // Show historical data up to the purge time window + const cutoff = Date.now() - this._getPurgeCutoffMs() + const relevantHistory = this.historicalData.filter(point => point.timestamp >= cutoff) + + // Aggregate by hour + const hourlyBuckets = new Map() + relevantHistory.forEach(point => { + const hour = Math.floor(point.timestamp / 3600000) * 3600000 + if (!hourlyBuckets.has(hour)) { + hourlyBuckets.set(hour, 0) + } + hourlyBuckets.set(hour, hourlyBuckets.get(hour) + point.receives) + }) + + // Convert to array and sort + const hourlyData = Array.from(hourlyBuckets.entries()) + .map(([timestamp, receives]) => ({ timestamp, receives })) + .sort((a, b) => a.timestamp - b.timestamp) + + debug(`Historical timeline: ${hourlyData.length} hourly points within ${config.email.purgeTime.time} ${config.email.purgeTime.unit} window`) + return hourlyData + } + + /** + * Generate prediction for next period based on historical patterns + * Uses config purge time to determine prediction window + * Predicts based on time-of-day patterns with randomization + * @returns {Array} Predicted data points + * @private + */ + _generatePrediction() { + if (!this.historicalData || this.historicalData.length < 100) { + return [] // Not enough data to predict + } + + const now = Date.now() + const predictions = [] + + // Build hourly patterns from historical data + // Map hour-of-day to average receives count + const hourlyPatterns = new Map() + + this.historicalData.forEach(point => { + const date = new Date(point.timestamp) + const hour = date.getHours() + + if (!hourlyPatterns.has(hour)) { + hourlyPatterns.set(hour, []) + } + hourlyPatterns.get(hour).push(point.receives) + }) + + // Calculate average for each hour + const hourlyAverages = new Map() + hourlyPatterns.forEach((values, hour) => { + const avg = values.reduce((sum, v) => sum + v, 0) / values.length + hourlyAverages.set(hour, avg) + }) + + debug(`Built hourly patterns for ${hourlyAverages.size} hours from ${this.historicalData.length} data points`) + + // Generate predictions for purge duration (in 1-hour intervals) + const purgeMs = this._getPurgeCutoffMs() + const predictionHours = Math.ceil(purgeMs / (60 * 60 * 1000)) + + for (let i = 1; i <= predictionHours; i++) { + const timestamp = now + (i * 60 * 60 * 1000) // 1 hour intervals + const futureDate = new Date(timestamp) + const futureHour = futureDate.getHours() + + // Get average for this hour, or fallback to overall average + let baseCount = hourlyAverages.get(futureHour) + if (baseCount === undefined) { + // Fallback to overall average if no data for this hour + const allValues = Array.from(hourlyAverages.values()) + baseCount = allValues.reduce((sum, v) => sum + v, 0) / allValues.length + } + + // baseCount is already per-minute average, scale to full hour + const scaledCount = baseCount * 60 + + // Add randomization (±20%) + const randomFactor = 0.8 + (Math.random() * 0.4) // 0.8 to 1.2 + const predictedCount = Math.round(scaledCount * randomFactor) + + predictions.push({ + timestamp, + receives: Math.max(0, predictedCount) + }) + } + + debug(`Generated ${predictions.length} prediction points based on hourly patterns`) + return predictions + } + /** * Add a data point to the rolling history * @param {string} type - Type of event (receive, delete, forward) @@ -127,10 +432,15 @@ class StatisticsStore { entry[type + 's']++ this._cleanup() + + // Save to database periodically (every 10 data points to reduce I/O) + if (Math.random() < 0.1) { // ~10% chance = every ~10 events + this._saveToDatabase() + } } /** - * Clean up old data points (older than 24 hours) + * Clean up old data points (older than email purge time) * @private */ _cleanup() { @@ -141,24 +451,25 @@ class StatisticsStore { return } - const cutoff = now - (24 * 60 * 60 * 1000) + const cutoff = now - this._getPurgeCutoffMs() const beforeCount = this.hourlyData.length this.hourlyData = this.hourlyData.filter(entry => entry.timestamp >= cutoff) if (beforeCount !== this.hourlyData.length) { - debug(`Cleaned up ${beforeCount - this.hourlyData.length} old data points`) + this._saveToDatabase() // Save after cleanup + debug(`Cleaned up ${beforeCount - this.hourlyData.length} old data points (keeping data for ${config.email.purgeTime.time} ${config.email.purgeTime.unit})`) } this.lastCleanup = now } /** - * Get aggregated stats for last 24 hours + * Get aggregated stats for the purge time window * @returns {Object} Aggregated counts * @private */ _getLast24Hours() { - const cutoff = Date.now() - (24 * 60 * 60 * 1000) + const cutoff = Date.now() - this._getPurgeCutoffMs() const recent = this.hourlyData.filter(e => e.timestamp >= cutoff) return { @@ -170,12 +481,13 @@ class StatisticsStore { /** * Get timeline data for graphing (hourly aggregates) + * Uses purge time for consistent timeline length * @returns {Array} Array of hourly data points * @private */ _getTimeline() { const now = Date.now() - const cutoff = now - (24 * 60 * 60 * 1000) + const cutoff = now - this._getPurgeCutoffMs() const hourly = {} // Aggregate by hour diff --git a/infrastructure/web/public/javascripts/stats.js b/infrastructure/web/public/javascripts/stats.js index 9b7542b..6570fe5 100644 --- a/infrastructure/web/public/javascripts/stats.js +++ b/infrastructure/web/public/javascripts/stats.js @@ -1,6 +1,6 @@ /** * Statistics page functionality - * Handles Chart.js initialization and real-time updates via Socket.IO + * Handles Chart.js initialization with historical, real-time, and predicted data */ // Initialize stats chart if on stats page @@ -8,22 +8,25 @@ document.addEventListener('DOMContentLoaded', function() { const chartCanvas = document.getElementById('statsChart'); if (!chartCanvas) return; // Not on stats page - // Get initial data from global variable (set by template) + // Get data from global variables (set by template) if (typeof window.initialStatsData === 'undefined') { console.error('Initial stats data not found'); return; } - const initialData = window.initialStatsData; + const realtimeData = window.initialStatsData || []; + const historicalData = window.historicalData || []; + const predictionData = window.predictionData || []; + + console.log(`Loaded data: ${historicalData.length} historical, ${realtimeData.length} realtime, ${predictionData.length} predictions`); // Set up Socket.IO connection for real-time updates if (typeof io !== 'undefined') { const socket = io(); - // Listen for stats updates (any email event: receive, delete, forward) socket.on('stats-update', () => { - console.log('Stats update received, reloading page...'); - location.reload(); + console.log('Stats update received (page will not auto-reload)'); + // Don't auto-reload - user can manually refresh if needed }); socket.on('reconnect', () => { @@ -31,58 +34,123 @@ document.addEventListener('DOMContentLoaded', function() { }); } - // Prepare chart data - const labels = initialData.map(d => { + // Combine all data and create labels + const now = Date.now(); + + // Use a reasonable historical window (show data within the purge time range) + // This will adapt based on whether purge time is 48 hours, 7 days, etc. + const allTimePoints = [ + ...historicalData.map(d => ({...d, type: 'historical' })), + ...realtimeData.map(d => ({...d, type: 'realtime' })), + ...predictionData.map(d => ({...d, type: 'prediction' })) + ].sort((a, b) => a.timestamp - b.timestamp); + + // Create labels + const labels = allTimePoints.map(d => { const date = new Date(d.timestamp); - return date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' }); + return date.toLocaleString('en-US', { + month: 'short', + day: 'numeric', + hour: '2-digit', + minute: '2-digit' + }); }); + // Prepare datasets + const historicalPoints = allTimePoints.map(d => d.type === 'historical' ? d.receives : null); + const realtimePoints = allTimePoints.map(d => d.type === 'realtime' ? d.receives : null); + const predictionPoints = allTimePoints.map(d => d.type === 'prediction' ? d.receives : null); + + // Create gradient for fading effect on historical data const ctx = chartCanvas.getContext('2d'); + const historicalGradient = ctx.createLinearGradient(0, 0, chartCanvas.width * 0.3, 0); + historicalGradient.addColorStop(0, 'rgba(100, 100, 255, 0.05)'); + historicalGradient.addColorStop(1, 'rgba(100, 100, 255, 0.15)'); + + // Track visibility state for each dataset + const datasetVisibility = [true, true, true]; + const chart = new Chart(ctx, { type: 'line', data: { labels: labels, datasets: [{ - label: 'Received', - data: initialData.map(d => d.receives), - borderColor: '#9b4dca', - backgroundColor: 'rgba(155, 77, 202, 0.1)', + label: 'Historical', + data: historicalPoints, + borderColor: 'rgba(100, 149, 237, 0.8)', + backgroundColor: historicalGradient, + borderWidth: 2, tension: 0.4, - fill: true + pointRadius: 4, + pointBackgroundColor: 'rgba(100, 149, 237, 0.8)', + spanGaps: false, + fill: true, + hidden: false }, { - label: 'Deleted', - data: initialData.map(d => d.deletes), - borderColor: '#e74c3c', - backgroundColor: 'rgba(231, 76, 60, 0.1)', + label: 'Current Activity', + data: realtimePoints, + borderColor: '#2ecc71', + backgroundColor: 'rgba(46, 204, 113, 0.15)', + borderWidth: 4, tension: 0.4, - fill: true + pointRadius: 4, + pointBackgroundColor: '#2ecc71', + spanGaps: false, + fill: true, + hidden: false }, { - label: 'Forwarded', - data: initialData.map(d => d.forwards), - borderColor: '#3498db', - backgroundColor: 'rgba(52, 152, 219, 0.1)', + label: 'Predicted', + data: predictionPoints, + borderColor: '#ff9f43', + backgroundColor: 'rgba(255, 159, 67, 0.08)', + borderWidth: 3, + borderDash: [8, 4], tension: 0.4, - fill: true + pointRadius: 4, + pointBackgroundColor: '#ff9f43', + spanGaps: false, + fill: true, + hidden: false } ] }, options: { responsive: true, maintainAspectRatio: false, + interaction: { + mode: 'index', + intersect: false + }, plugins: { legend: { - display: true, - position: 'top', - labels: { - color: getComputedStyle(document.documentElement).getPropertyValue('--color-text-light'), - font: { size: 14 } - } + display: false // Disable default legend, we'll create custom }, tooltip: { mode: 'index', - intersect: false + intersect: false, + callbacks: { + title: function(context) { + const dataIndex = context[0].dataIndex; + const point = allTimePoints[dataIndex]; + const date = new Date(point.timestamp); + return date.toLocaleString('en-US', { + dateStyle: 'medium', + timeStyle: 'short' + }); + }, + label: function(context) { + let label = context.dataset.label || ''; + if (label) { + label += ': '; + } + if (context.parsed.y !== null) { + label += context.parsed.y + ' emails'; + } + return label; + } + } } }, scales: { @@ -90,17 +158,26 @@ document.addEventListener('DOMContentLoaded', function() { beginAtZero: true, ticks: { color: getComputedStyle(document.documentElement).getPropertyValue('--color-text-dim'), - stepSize: 1 + stepSize: 1, + callback: function(value) { + return Math.round(value); + } }, grid: { color: 'rgba(255, 255, 255, 0.1)' + }, + title: { + display: true, + text: 'Emails Received', + color: getComputedStyle(document.documentElement).getPropertyValue('--color-text-light') } }, x: { ticks: { color: getComputedStyle(document.documentElement).getPropertyValue('--color-text-dim'), maxRotation: 45, - minRotation: 45 + minRotation: 45, + maxTicksLimit: 20 }, grid: { color: 'rgba(255, 255, 255, 0.05)' @@ -109,4 +186,52 @@ document.addEventListener('DOMContentLoaded', function() { } } }); + + // Create custom legend buttons + const chartContainer = chartCanvas.parentElement; + const legendContainer = document.createElement('div'); + legendContainer.className = 'chart-legend-custom'; + legendContainer.innerHTML = ` + + + + `; + + chartContainer.insertBefore(legendContainer, chartCanvas); + + // Handle legend button clicks + legendContainer.querySelectorAll('.legend-btn').forEach(btn => { + btn.addEventListener('click', function() { + const index = parseInt(this.getAttribute('data-index')); + const isActive = this.classList.contains('active'); + + // Toggle button state + this.classList.toggle('active'); + + // Toggle dataset visibility with fade effect + const meta = chart.getDatasetMeta(index); + const dataset = chart.data.datasets[index]; + + if (isActive) { + // Fade out + meta.hidden = true; + datasetVisibility[index] = false; + } else { + // Fade in + meta.hidden = false; + datasetVisibility[index] = true; + } + + chart.update('active'); + }); + }); }); diff --git a/infrastructure/web/public/stylesheets/custom.css b/infrastructure/web/public/stylesheets/custom.css index 023584d..3b086c2 100644 --- a/infrastructure/web/public/stylesheets/custom.css +++ b/infrastructure/web/public/stylesheets/custom.css @@ -2494,6 +2494,20 @@ body.light-mode .theme-icon-light { margin-bottom: 3rem; } +.stats-subtitle .purge-time-inline { + display: inline; +} + +.stats-subtitle .purge-time-inline label, +.stats-subtitle .purge-time-inline h4 { + display: inline; + margin: 0; + padding: 0; + font-size: inherit; + line-height: inherit; + font-weight: inherit; +} + .stats-grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); @@ -2551,6 +2565,77 @@ body.light-mode .theme-icon-light { } +/* Custom legend for stats chart */ + +.chart-legend-custom { + display: flex; + justify-content: center; + gap: 1rem; + margin-bottom: 2rem; + flex-wrap: wrap; +} + +.legend-btn { + display: flex; + align-items: center; + gap: 0.5rem; + padding: 0.6rem 1.2rem; + background: var(--overlay-white-05); + border: 2px solid var(--overlay-purple-30); + border-radius: 8px; + color: var(--color-text-light); + font-size: 0.95rem; + font-weight: 500; + cursor: pointer; + transition: all 0.3s ease; + outline: none; +} + +.legend-btn:hover { + background: var(--overlay-white-10); + border-color: var(--color-accent-purple); + transform: translateY(-2px); + box-shadow: 0 4px 12px rgba(155, 77, 202, 0.2); +} + +.legend-btn.active { + background: var(--overlay-purple-20); + border-color: var(--color-accent-purple); +} + +.legend-btn:not(.active) { + opacity: 0.4; + background: var(--overlay-white-02); +} + +.legend-btn:not(.active):hover { + opacity: 0.6; +} + +.legend-indicator { + width: 12px; + height: 12px; + border-radius: 3px; + flex-shrink: 0; + box-sizing: border-box; +} + +.legend-label { + white-space: nowrap; +} + +@media (max-width: 640px) { + .chart-legend-custom { + flex-direction: column; + gap: 0.75rem; + } + .legend-btn { + width: 100%; + justify-content: center; + } +} + + /* Responsive Styles */ @media (max-width: 768px) { diff --git a/infrastructure/web/routes/stats.js b/infrastructure/web/routes/stats.js index c971921..95091cd 100644 --- a/infrastructure/web/routes/stats.js +++ b/infrastructure/web/routes/stats.js @@ -6,8 +6,15 @@ const debug = require('debug')('48hr-email:stats-routes') router.get('/', async(req, res) => { try { const config = req.app.get('config') + + // Check if statistics are enabled + if (!config.http.statisticsEnabled) { + return res.status(404).send('Statistics are disabled') + } + const statisticsStore = req.app.get('statisticsStore') const imapService = req.app.get('imapService') + const mailProcessingService = req.app.get('mailProcessingService') const Helper = require('../../../application/helper') const helper = new Helper() @@ -17,10 +24,16 @@ router.get('/', async(req, res) => { statisticsStore.updateLargestUid(largestUid) } - const stats = statisticsStore.getStats() + // Analyze all existing emails for historical data + if (mailProcessingService) { + const allMails = mailProcessingService.getAllMailSummaries() + statisticsStore.analyzeHistoricalData(allMails) + } + + const stats = statisticsStore.getEnhancedStats() const purgeTime = helper.purgeTimeElemetBuilder() - debug(`Stats page requested: ${stats.currentCount} current, ${stats.allTimeTotal} all-time total`) + debug(`Stats page requested: ${stats.currentCount} current, ${stats.allTimeTotal} all-time total, ${stats.historical.length} historical points`) res.render('stats', { title: `Statistics | ${config.http.branding[0]}`, @@ -51,7 +64,8 @@ router.get('/api', async(req, res) => { statisticsStore.updateLargestUid(largestUid) } - const stats = statisticsStore.getStats() + // Use lightweight stats - no historical analysis on API calls + const stats = statisticsStore.getLightweightStats() res.json(stats) } catch (error) { diff --git a/infrastructure/web/views/layout.twig b/infrastructure/web/views/layout.twig index 6015f0d..e3e7566 100644 --- a/infrastructure/web/views/layout.twig +++ b/infrastructure/web/views/layout.twig @@ -92,7 +92,11 @@ {% block footer %} {% endblock %} diff --git a/infrastructure/web/views/loading.twig b/infrastructure/web/views/loading.twig index 9d31553..15b5b34 100644 --- a/infrastructure/web/views/loading.twig +++ b/infrastructure/web/views/loading.twig @@ -2,6 +2,8 @@ {% set bodyClass = 'loading-page' %} +{% block title %}Loading... | {{ branding[0] }}{% endblock %} + {% block header %}{% endblock %} {% block footer %}{% endblock %} diff --git a/infrastructure/web/views/stats.twig b/infrastructure/web/views/stats.twig index 3d78d19..1522d1f 100644 --- a/infrastructure/web/views/stats.twig +++ b/infrastructure/web/views/stats.twig @@ -34,7 +34,7 @@ {% block body %}

Email Statistics

-

Real-time email activity and 24-hour trends

+

Historical patterns, real-time activity, and predictions over {{ purgeTime|striptags }}

@@ -43,34 +43,34 @@
Emails in System
- +
{{ stats.allTimeTotal }}
-
All Time Total
+
All-Time Total
- +
{{ stats.last24Hours.receives }}
-
Received (24h)
+
Received
- +
{{ stats.last24Hours.deletes }}
-
Deleted (24h)
+
Deleted
- +
{{ stats.last24Hours.forwards }}
-
Forwarded (24h)
+
Forwarded
-

Activity Timeline (24 Hours)

+

Email Activity Timeline

@@ -78,5 +78,7 @@ {% endblock %} diff --git a/infrastructure/web/web.js b/infrastructure/web/web.js index f289878..d9b6ac4 100644 --- a/infrastructure/web/web.js +++ b/infrastructure/web/web.js @@ -95,6 +95,7 @@ Twig.extendFilter('readablePurgeTime', readablePurgeTime) // Middleware to expose user session to all templates app.use((req, res, next) => { res.locals.authEnabled = config.user.authEnabled + res.locals.config = config res.locals.currentUser = null if (req.session && req.session.userId && req.session.username && req.session.isAuthenticated) { res.locals.currentUser = { @@ -105,6 +106,21 @@ app.use((req, res, next) => { next() }) +// Middleware to expose mail count to all templates +app.use((req, res, next) => { + const mailProcessingService = req.app.get('mailProcessingService') + const Helper = require('../../application/helper') + const helper = new Helper() + + if (mailProcessingService) { + const count = mailProcessingService.getCount() + res.locals.mailCount = helper.mailCountBuilder(count) + } else { + res.locals.mailCount = '' + } + next() +}) + // Middleware to show loading page until IMAP is ready app.use((req, res, next) => { const isImapReady = req.app.get('isImapReady') diff --git a/package.json b/package.json index 5228561..391f32d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "48hr.email", - "version": "2.0.0", + "version": "2.1.0", "private": false, "description": "48hr.email is your favorite open-source tempmail client.", "keywords": [ @@ -68,17 +68,15 @@ } ] }, - "overrides": [ - { - "files": "public/javascripts/*.js", - "esnext": false, - "env": [ - "browser" - ], - "globals": [ - "io" - ] - } - ] + "overrides": [{ + "files": "public/javascripts/*.js", + "esnext": false, + "env": [ + "browser" + ], + "globals": [ + "io" + ] + }] } } diff --git a/schema.sql b/schema.sql index a30b2a5..60481de 100644 --- a/schema.sql +++ b/schema.sql @@ -44,6 +44,18 @@ CREATE INDEX IF NOT EXISTS idx_locked_inboxes_user_id ON user_locked_inboxes(use CREATE INDEX IF NOT EXISTS idx_locked_inboxes_address ON user_locked_inboxes(inbox_address); CREATE INDEX IF NOT EXISTS idx_locked_inboxes_last_accessed ON user_locked_inboxes(last_accessed); +-- Statistics storage for persistence across restarts +CREATE TABLE IF NOT EXISTS statistics ( + id INTEGER PRIMARY KEY CHECK (id = 1), -- Single row table + largest_uid INTEGER NOT NULL DEFAULT 0, + hourly_data TEXT, -- JSON array of 24h rolling data + last_updated INTEGER NOT NULL +); + +-- Initialize with default row if not exists +INSERT OR IGNORE INTO statistics (id, largest_uid, hourly_data, last_updated) +VALUES (1, 0, '[]', 0); + -- Trigger to enforce max 5 locked inboxes per user CREATE TRIGGER IF NOT EXISTS check_locked_inbox_limit BEFORE INSERT ON user_locked_inboxes