Files
Mastermind/web/src/index.js

1216 lines
46 KiB
JavaScript

import 'dotenv/config';
import express from 'express';
import session from 'express-session';
import pg from 'pg';
import connectPgSimple from 'connect-pg-simple';
import helmet from 'helmet';
import rateLimit from 'express-rate-limit';
import passport from 'passport';
import { Strategy as LocalStrategy } from 'passport-local';
import bcrypt from 'bcryptjs';
import GoogleStrategy from 'passport-google-oauth20';
import AzureAdOAuth2Strategy from 'passport-azure-ad-oauth2';
import multer from 'multer';
import { simpleParser } from 'mailparser';
import crypto from 'crypto';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { marked } from 'marked';
import MarkdownIt from 'markdown-it';
import { createMemoryPool } from './lib/memory-db.js';
import { applyRulesToEmail } from './lib/email-rules.js';
import {
buildPCOBody,
buildRFIBody,
extractDomain,
extractFirstJobNumber,
validatePassword
} from './lib/utils.js';
const { Pool } = pg;
const PgStore = connectPgSimple(session);
const app = express();
app.set('view engine', 'ejs');
app.set('views', new URL('./views', import.meta.url).pathname);
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const isMainModule = process.argv[1] && path.resolve(process.argv[1]) === __filename;
const PORT = process.env.PORT || 3005;
const BASE_URL = process.env.BASE_URL || `http://localhost:${PORT}`;
const isProd = process.env.NODE_ENV === 'production';
const trustProxy = process.env.TRUST_PROXY === '1' || process.env.TRUST_PROXY === 'true';
const isMemoryDb = !process.env.DATABASE_URL || process.env.DATABASE_URL.startsWith('memory://');
const appRoot = process.env.APP_ROOT || fileURLToPath(new URL('../../', import.meta.url));
const dataRoot = process.env.DATA_DIR || path.join(appRoot, 'data');
if (trustProxy) app.set('trust proxy', 1);
if (isProd && (!process.env.SESSION_SECRET || process.env.SESSION_SECRET.length < 24)) {
throw new Error('SESSION_SECRET must be set (>=24 chars) in production');
}
function baseHost() {
try {
return new URL(BASE_URL).host;
} catch {
return null;
}
}
const pool = isMemoryDb ? createMemoryPool() : new Pool({ connectionString: process.env.DATABASE_URL });
app.use(
helmet({
// We use inline styles in EJS templates; keep CSP permissive but present.
contentSecurityPolicy: {
useDefaults: true,
directives: {
defaultSrc: ["'self'"],
imgSrc: ["'self'", 'data:'],
styleSrc: ["'self'", "'unsafe-inline'"],
scriptSrc: ["'self'"],
connectSrc: ["'self'"],
frameAncestors: ["'none'"]
}
},
crossOriginEmbedderPolicy: false
})
);
app.use(express.urlencoded({ extended: false }));
app.use(express.json());
// Basic CSRF mitigation: require same-origin POSTs
app.use((req, res, next) => {
if (req.method !== 'POST') return next();
// Allow OAuth callbacks (they are GET in our app anyway) and health checks
const origin = req.get('origin');
const referer = req.get('referer');
const host = baseHost();
if (!host) return next();
const ok = (value) => {
if (!value) return false;
try {
return new URL(value).host === host;
} catch {
return false;
}
};
// Some clients omit Origin; allow Referer as fallback.
if ((origin && ok(origin)) || (!origin && referer && ok(referer))) return next();
return res.status(403).send('Blocked (CSRF)');
});
const uploadDir = path.join(dataRoot, 'uploads');
const attachmentDir = path.join(dataRoot, 'attachments');
fs.mkdirSync(uploadDir, { recursive: true });
fs.mkdirSync(attachmentDir, { recursive: true });
const upload = multer({ dest: uploadDir, limits: { fileSize: 50 * 1024 * 1024 } });
app.use(rateLimit({ windowMs: 60_000, limit: 120 }));
const loginLimiter = rateLimit({ windowMs: 60_000, limit: 10 });
const uploadLimiter = rateLimit({ windowMs: 60_000, limit: 20 });
const sessionConfig = {
name: 'mm.sid',
secret: process.env.SESSION_SECRET || 'change-me',
resave: false,
saveUninitialized: false,
rolling: true,
cookie: {
httpOnly: true,
sameSite: 'lax',
secure: (process.env.COOKIE_SECURE === 'true') || BASE_URL.startsWith('https://'),
maxAge: 1000 * 60 * 60 * 12
}
};
if (!isMemoryDb) {
sessionConfig.store = new PgStore({ pool, createTableIfMissing: true });
}
app.use(session(sessionConfig));
app.use(passport.initialize());
app.use(passport.session());
async function ensureSchema() {
await pool.query(`
create table if not exists users (
id uuid primary key default gen_random_uuid(),
email text unique,
display_name text,
role text not null default 'owner',
disabled boolean not null default false,
created_at timestamptz not null default now()
);
create table if not exists identities (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
provider text not null,
provider_user_id text,
email text,
password_hash text,
created_at timestamptz not null default now(),
unique(provider, provider_user_id),
unique(provider, email)
);
create table if not exists app_settings (
key text primary key,
value jsonb not null
);
create table if not exists audit_logs (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
actor_user_id uuid references users(id) on delete set null,
actor_email text,
action text not null,
target_type text,
target_id text,
ip text,
user_agent text,
metadata jsonb not null default '{}'::jsonb
);
create table if not exists projects (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
created_by_user_id uuid references users(id) on delete set null,
name text not null,
job_number text,
role_mode text not null default 'ec',
gc_name text,
address text,
city text,
state text,
postal_code text,
keywords text,
active boolean not null default true
);
create table if not exists project_members (
project_id uuid references projects(id) on delete cascade,
user_id uuid references users(id) on delete cascade,
project_role text not null default 'apm',
created_at timestamptz not null default now(),
primary key(project_id, user_id)
);
create table if not exists ingested_emails (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
created_by_user_id uuid references users(id) on delete set null,
source text not null default 'upload',
source_message_id text,
thread_key text,
from_addr text,
to_addr text,
cc_addr text,
subject text,
date timestamptz,
body_text text,
body_html text,
has_attachments boolean not null default false,
raw_path text,
sha256 text,
project_id uuid references projects(id) on delete set null,
confidence real,
status text not null default 'unsorted'
);
create table if not exists email_attachments (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
email_id uuid references ingested_emails(id) on delete cascade,
filename text,
content_type text,
size_bytes int,
sha256 text,
storage_path text
);
create index if not exists email_attachments_email_id_idx on email_attachments(email_id);
create index if not exists ingested_emails_project_id_idx on ingested_emails(project_id);
create index if not exists ingested_emails_status_idx on ingested_emails(status);
create table if not exists email_connectors (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
provider text not null, -- gmail|microsoft
enabled boolean not null default false,
configured boolean not null default false,
authorized boolean not null default false,
last_sync_at timestamptz,
last_error text,
unique(provider)
);
create table if not exists email_rules (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
created_by_user_id uuid references users(id) on delete set null,
enabled boolean not null default true,
priority int not null default 100,
project_id uuid references projects(id) on delete cascade,
match_type text not null, -- from_contains|from_domain|subject_contains|body_contains|thread_key
match_value text not null
);
create table if not exists pco_drafts (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
created_by_user_id uuid references users(id) on delete set null,
project_id uuid references projects(id) on delete set null,
source_email_id uuid references ingested_emails(id) on delete set null,
status text not null default 'draft',
title text,
body text
);
create table if not exists rfi_drafts (
id uuid primary key default gen_random_uuid(),
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
created_by_user_id uuid references users(id) on delete set null,
project_id uuid references projects(id) on delete set null,
source_email_id uuid references ingested_emails(id) on delete set null,
status text not null default 'draft',
title text,
body text
);
create index if not exists email_rules_project_id_idx on email_rules(project_id);
`);
}
passport.serializeUser((user, done) => done(null, user.id));
passport.deserializeUser(async (id, done) => {
try {
const { rows } = await pool.query('select * from users where id=$1', [id]);
done(null, rows[0] || null);
} catch (e) {
done(e);
}
});
passport.use(
new LocalStrategy({ usernameField: 'email' }, async (email, password, done) => {
try {
const { rows } = await pool.query(
`select u.*, i.password_hash from users u
join identities i on i.user_id=u.id
where i.provider='local' and lower(i.email)=lower($1)
limit 1`,
[email]
);
const row = rows[0];
if (!row?.password_hash) return done(null, false, { message: 'Invalid login' });
if (row.disabled) return done(null, false, { message: 'Account disabled' });
const ok = await bcrypt.compare(password, row.password_hash);
if (!ok) return done(null, false, { message: 'Invalid login' });
return done(null, { id: row.id });
} catch (e) {
return done(e);
}
})
);
// Google OAuth (optional)
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
passport.use(
new GoogleStrategy.Strategy(
{
clientID: process.env.GOOGLE_CLIENT_ID,
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
callbackURL: `${BASE_URL}/auth/google/callback`
},
async (_accessToken, _refreshToken, profile, done) => {
try {
const email = (profile.emails?.[0]?.value || '').toLowerCase();
const providerUserId = profile.id;
const displayName = profile.displayName || email;
// find identity
const { rows: idRows } = await pool.query(
`select user_id from identities where provider='google' and provider_user_id=$1 limit 1`,
[providerUserId]
);
let userId = idRows[0]?.user_id;
if (!userId) {
// create/get user by email
const { rows: uRows } = await pool.query(
`insert into users(email, display_name) values ($1,$2)
on conflict(email) do update set display_name=excluded.display_name
returning id`,
[email || null, displayName]
);
userId = uRows[0].id;
await pool.query(
`insert into identities(user_id, provider, provider_user_id, email)
values ($1,'google',$2,$3)
on conflict(provider, provider_user_id) do nothing`,
[userId, providerUserId, email || null]
);
}
return done(null, { id: userId });
} catch (e) {
return done(e);
}
}
)
);
}
// Microsoft OAuth (optional) via Azure AD v2
if (process.env.MICROSOFT_CLIENT_ID && process.env.MICROSOFT_CLIENT_SECRET) {
passport.use(
new AzureAdOAuth2Strategy(
{
clientID: process.env.MICROSOFT_CLIENT_ID,
clientSecret: process.env.MICROSOFT_CLIENT_SECRET,
callbackURL: `${BASE_URL}/auth/microsoft/callback`,
// for multi-tenant use 'common'; can be configured later
tenant: 'common',
resource: '00000003-0000-0000-c000-000000000000' // Microsoft Graph
},
async (_accessToken, _refreshToken, params, _profile, done) => {
try {
// id_token is a JWT; we can decode basic claims without verifying for MVP
const idToken = params?.id_token;
if (!idToken) return done(null, false);
const payload = JSON.parse(Buffer.from(idToken.split('.')[1], 'base64').toString('utf-8'));
const providerUserId = payload.oid || payload.sub;
const email = (payload.preferred_username || payload.upn || '').toLowerCase();
const displayName = payload.name || email;
const { rows: idRows } = await pool.query(
`select user_id from identities where provider='microsoft' and provider_user_id=$1 limit 1`,
[providerUserId]
);
let userId = idRows[0]?.user_id;
if (!userId) {
const { rows: uRows } = await pool.query(
`insert into users(email, display_name) values ($1,$2)
on conflict(email) do update set display_name=excluded.display_name
returning id`,
[email || null, displayName]
);
userId = uRows[0].id;
await pool.query(
`insert into identities(user_id, provider, provider_user_id, email)
values ($1,'microsoft',$2,$3)
on conflict(provider, provider_user_id) do nothing`,
[userId, providerUserId, email || null]
);
}
return done(null, { id: userId });
} catch (e) {
return done(e);
}
}
)
);
}
function requireAuth(req, res, next) {
if (req.user) return next();
return res.redirect('/login');
}
function requireOwner(req, res, next) {
if (!req.user) return res.redirect('/login');
if (req.user.role !== 'owner') return res.status(403).send('Forbidden');
return next();
}
app.get('/health', (_req, res) => res.json({ ok: true }));
app.get('/login', (req, res) => {
res.render('login', {
googleEnabled: Boolean(process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET),
microsoftEnabled: Boolean(process.env.MICROSOFT_CLIENT_ID && process.env.MICROSOFT_CLIENT_SECRET)
});
});
app.post('/login', loginLimiter, (req, res, next) => {
passport.authenticate('local', async (err, user) => {
if (err) return next(err);
if (!user) {
await audit(req, 'auth.login_failed', { metadata: { provider: 'local', email: req.body.email } });
return res.redirect('/login');
}
req.logIn(user, async (e) => {
if (e) return next(e);
await audit(req, 'auth.login_success', { metadata: { provider: 'local' } });
return res.redirect('/');
});
})(req, res, next);
});
app.get('/logout', async (req, res) => {
await audit(req, 'auth.logout');
req.logout(() => res.redirect('/login'));
});
app.get('/auth/google', passport.authenticate('google', { scope: ['profile', 'email'] }));
app.get('/auth/google/callback', (req, res, next) => {
passport.authenticate('google', async (err, user) => {
if (err) return next(err);
if (!user) {
await audit(req, 'auth.login_failed', { metadata: { provider: 'google' } });
return res.redirect('/login');
}
req.logIn(user, async (e) => {
if (e) return next(e);
await audit(req, 'auth.login_success', { metadata: { provider: 'google' } });
return res.redirect('/');
});
})(req, res, next);
});
app.get('/auth/microsoft', passport.authenticate('azure_ad_oauth2', { scope: ['openid', 'profile', 'email', 'offline_access', 'Mail.Read'] }));
app.get('/auth/microsoft/callback', (req, res, next) => {
passport.authenticate('azure_ad_oauth2', async (err, user) => {
if (err) return next(err);
if (!user) {
await audit(req, 'auth.login_failed', { metadata: { provider: 'microsoft' } });
return res.redirect('/login');
}
req.logIn(user, async (e) => {
if (e) return next(e);
await audit(req, 'auth.login_success', { metadata: { provider: 'microsoft' } });
return res.redirect('/');
});
})(req, res, next);
});
async function audit(req, action, { targetType=null, targetId=null, metadata={} } = {}) {
try {
const actor = req.user || null;
await pool.query(
`insert into audit_logs(actor_user_id, actor_email, action, target_type, target_id, ip, user_agent, metadata)
values ($1,$2,$3,$4,$5,$6,$7,$8)`,
[
actor?.id || null,
actor?.email || null,
action,
targetType,
targetId,
req.ip || null,
req.get('user-agent') || null,
metadata
]
);
} catch (_) {
// do not block main flow
}
}
app.get('/', requireAuth, async (req, res) => {
res.render('home', { user: req.user, baseUrl: BASE_URL });
});
app.get('/setup', requireAuth, (req, res) => {
res.render('setup', { baseUrl: BASE_URL });
});
// Projects
app.get('/projects', requireAuth, async (req, res) => {
const { rows } = await pool.query(
`select p.*
from projects p
join project_members pm on pm.project_id=p.id and pm.user_id=$1
order by p.updated_at desc`,
[req.user.id]
);
res.render('projects', { projects: rows });
});
app.post('/projects/create', requireAuth, async (req, res) => {
const name = (req.body.name || '').trim();
const jobNumber = (req.body.jobNumber || '').trim();
const roleMode = (req.body.roleMode || 'ec').trim();
const gcName = (req.body.gcName || '').trim();
const city = (req.body.city || '').trim();
const state = (req.body.state || '').trim();
const keywords = (req.body.keywords || '').trim();
if (!name) return res.status(400).send('name required');
const { rows } = await pool.query(
`insert into projects(created_by_user_id,name,job_number,role_mode,gc_name,city,state,keywords)
values ($1,$2,$3,$4,$5,$6,$7,$8)
returning id`,
[req.user.id, name, jobNumber || null, roleMode, gcName || null, city || null, state || null, keywords || null]
);
const projectId = rows[0].id;
await pool.query(
`insert into project_members(project_id,user_id,project_role) values ($1,$2,'owner')
on conflict do nothing`,
[projectId, req.user.id]
);
await audit(req, 'project.created', { targetType: 'project', targetId: projectId, metadata: { name, jobNumber, roleMode } });
res.redirect(`/projects/${projectId}`);
});
app.get('/projects/:id', requireAuth, async (req, res) => {
const projectId = req.params.id;
const { rows } = await pool.query(
`select p.*
from projects p
join project_members pm on pm.project_id=p.id and pm.user_id=$1
where p.id=$2
limit 1`,
[req.user.id, projectId]
);
if (!rows[0]) return res.status(404).send('Not found');
res.render('project_detail', { project: rows[0] });
});
app.get('/projects/:id/inbox', requireAuth, async (req, res) => {
const projectId = req.params.id;
const { rows: pr } = await pool.query(
`select p.*
from projects p
join project_members pm on pm.project_id=p.id and pm.user_id=$1
where p.id=$2
limit 1`,
[req.user.id, projectId]
);
if (!pr[0]) return res.status(404).send('Not found');
const { rows: emails } = await pool.query(
`select id, from_addr, subject, date, source
from ingested_emails
where project_id=$1
order by date desc nulls last, created_at desc
limit 300`,
[projectId]
);
res.render('project_inbox', { project: pr[0], emails });
});
app.post('/projects/:id/update', requireAuth, async (req, res) => {
const projectId = req.params.id;
const name = (req.body.name || '').trim();
const jobNumber = (req.body.jobNumber || '').trim();
const roleMode = (req.body.roleMode || 'ec').trim();
const gcName = (req.body.gcName || '').trim();
const keywords = (req.body.keywords || '').trim();
await pool.query(
`update projects
set name=$1, job_number=$2, role_mode=$3, gc_name=$4, keywords=$5, updated_at=now()
where id=$6`,
[name, jobNumber || null, roleMode, gcName || null, keywords || null, projectId]
);
await audit(req, 'project.updated', { targetType: 'project', targetId: projectId, metadata: { name, jobNumber, roleMode } });
res.redirect(`/projects/${projectId}`);
});
app.post('/projects/:id/toggle', requireAuth, async (req, res) => {
const projectId = req.params.id;
await pool.query('update projects set active = not active, updated_at=now() where id=$1', [projectId]);
const { rows } = await pool.query('select active from projects where id=$1', [projectId]);
await audit(req, 'project.toggled', { targetType: 'project', targetId: projectId, metadata: { active: rows[0]?.active } });
res.redirect('/projects');
});
// Inbox (manual import until OAuth)
app.get('/inbox', requireAuth, async (req, res) => {
const { rows: projects } = await pool.query(
`select p.id, p.name, p.job_number
from projects p
join project_members pm on pm.project_id=p.id and pm.user_id=$1
where p.active=true
order by p.updated_at desc`,
[req.user.id]
);
const { rows: emails } = await pool.query(
`select id, from_addr, subject, date, status
from ingested_emails
where status='unsorted'
order by created_at desc
limit 200`
);
res.render('inbox', { emails, projects });
});
app.post('/inbox/upload', requireAuth, uploadLimiter, upload.array('emls', 50), async (req, res) => {
const source = (req.body.source || 'upload').trim();
const files = req.files || [];
const { rows: rules } = await pool.query(
`select * from email_rules where enabled=true order by priority asc, created_at asc`
);
for (const f of files) {
const raw = fs.readFileSync(f.path);
const sha256 = crypto.createHash('sha256').update(raw).digest('hex');
const parsed = await simpleParser(raw);
const fromAddr = parsed.from?.text || '';
const toAddr = parsed.to?.text || '';
const ccAddr = parsed.cc?.text || '';
const subject = parsed.subject || '';
const date = parsed.date ? new Date(parsed.date) : null;
const bodyText = (parsed.text || '').slice(0, 200000);
const bodyHtml = (parsed.html ? String(parsed.html) : '').slice(0, 200000);
const hasAttachments = (parsed.attachments || []).length > 0;
// insert as unsorted first
const { rows } = await pool.query(
`insert into ingested_emails(created_by_user_id, source, from_addr, to_addr, cc_addr, subject, date, body_text, body_html, has_attachments, raw_path, sha256, status)
values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,'unsorted')
returning *`,
[req.user.id, source, fromAddr, toAddr, ccAddr, subject, date, bodyText, bodyHtml, hasAttachments, f.path, sha256]
);
const emailRow = rows[0];
// persist attachments (if any)
for (const att of (parsed.attachments || [])) {
const buf = att.content;
const attSha = crypto.createHash('sha256').update(buf).digest('hex');
const safeName = (att.filename || 'attachment').replace(/[^A-Za-z0-9._-]+/g, '_');
const outPath = `${attachmentDir}/${emailRow.id}_${attSha}_${safeName}`;
fs.writeFileSync(outPath, buf);
await pool.query(
`insert into email_attachments(email_id, filename, content_type, size_bytes, sha256, storage_path)
values ($1,$2,$3,$4,$5,$6)`,
[emailRow.id, att.filename || safeName, att.contentType || null, buf.length, attSha, outPath]
);
}
await audit(req, 'inbox.email_imported', { targetType: 'email', targetId: emailRow.id, metadata: { source, subject, attachments: (parsed.attachments||[]).length } });
const match = await applyRulesToEmail(emailRow, rules);
if (match?.projectId) {
await pool.query(
`update ingested_emails set project_id=$1, status='assigned', confidence=$2 where id=$3`,
[match.projectId, match.confidence, emailRow.id]
);
await audit(req, 'inbox.email_auto_assigned', { targetType: 'email', targetId: emailRow.id, metadata: { projectId: match.projectId, ruleId: match.ruleId } });
}
}
res.redirect('/inbox');
});
app.get('/inbox/:id', requireAuth, async (req, res) => {
const emailId = req.params.id;
const { rows } = await pool.query('select * from ingested_emails where id=$1 limit 1', [emailId]);
if (!rows[0]) return res.status(404).send('Not found');
const { rows: attachments } = await pool.query('select * from email_attachments where email_id=$1 order by created_at asc', [emailId]);
res.render('inbox_email', { email: rows[0], attachments });
});
app.post('/inbox/:id/assign', requireAuth, async (req, res) => {
const emailId = req.params.id;
const projectId = (req.body.projectId || '').trim();
if (!projectId) return res.redirect('/inbox');
await pool.query(
`update ingested_emails set project_id=$1, status='assigned', confidence=1.0 where id=$2`,
[projectId, emailId]
);
await audit(req, 'inbox.email_assigned', { targetType: 'email', targetId: emailId, metadata: { projectId } });
res.redirect('/inbox');
});
// Create rule from an email (from domain)
app.post('/inbox/:id/rule/from_domain', requireOwner, async (req, res) => {
const emailId = req.params.id;
const projectId = (req.body.projectId || '').trim();
const { rows } = await pool.query('select from_addr from ingested_emails where id=$1', [emailId]);
const fromAddr = rows[0]?.from_addr || '';
const domain = extractDomain(fromAddr);
if (!projectId || !domain) return res.redirect('/inbox');
const { rows: r } = await pool.query(
`insert into email_rules(created_by_user_id, project_id, match_type, match_value, priority)
values ($1,$2,'from_domain',$3,50)
returning id`,
[req.user.id, projectId, domain]
);
await audit(req, 'inbox.rule_created', { targetType: 'email_rule', targetId: r[0].id, metadata: { emailId, projectId, matchType: 'from_domain', matchValue: domain } });
res.redirect('/admin/email-rules');
});
// Create rule from an email (subject job number)
app.post('/inbox/:id/rule/subject_job', requireOwner, async (req, res) => {
const emailId = req.params.id;
const projectId = (req.body.projectId || '').trim();
const { rows } = await pool.query('select subject from ingested_emails where id=$1', [emailId]);
const subject = rows[0]?.subject || '';
const job = extractFirstJobNumber(subject);
if (!projectId || !job) return res.redirect('/inbox');
const { rows: r } = await pool.query(
`insert into email_rules(created_by_user_id, project_id, match_type, match_value, priority)
values ($1,$2,'subject_contains',$3,40)
returning id`,
[req.user.id, projectId, job]
);
await audit(req, 'inbox.rule_created', { targetType: 'email_rule', targetId: r[0].id, metadata: { emailId, projectId, matchType: 'subject_contains', matchValue: job } });
res.redirect('/admin/email-rules');
});
app.post('/setup/base-url', requireAuth, async (req, res) => {
const baseUrl = req.body.baseUrl?.trim();
if (!baseUrl) return res.status(400).send('baseUrl required');
await pool.query(
`insert into app_settings(key,value) values('base_url',$1)
on conflict(key) do update set value=excluded.value`,
[JSON.stringify({ baseUrl })]
);
res.redirect('/setup');
});
let initialized = false;
export async function initializeApp() {
if (initialized) return;
await ensureSchema();
// Backfill schema changes (safe alters)
await pool.query(`alter table users add column if not exists disabled boolean not null default false;`);
try {
await pool.query(`alter table identities add constraint identities_provider_email_unique unique (provider, email);`);
} catch (_) {
// constraint may already exist
}
// Create an initial local owner account if none exists (bootstrap)
// For safety, we do NOT create a default owner/owner in production.
const { rows: existing } = await pool.query("select count(*)::int as c from identities where provider='local'");
if (existing[0].c === 0) {
const email = (process.env.BOOTSTRAP_OWNER_EMAIL || '').trim().toLowerCase();
const password = (process.env.BOOTSTRAP_OWNER_PASSWORD || '').trim();
if (!email || !password) {
console.warn('No local identities exist. Set BOOTSTRAP_OWNER_EMAIL and BOOTSTRAP_OWNER_PASSWORD to create the initial owner account.');
} else {
const hash = await bcrypt.hash(password, 12);
const { rows } = await pool.query(
"insert into users(email, display_name, role) values ($1,'Owner','owner') returning id",
[email]
);
await pool.query(
"insert into identities(user_id, provider, email, password_hash) values ($1,'local',$2,$3)",
[rows[0].id, email, hash]
);
console.log(`Created bootstrap local owner: ${email} (change password ASAP)`);
}
}
initialized = true;
}
export { app };
// Admin: user management
app.get('/admin/users', requireOwner, async (_req, res) => {
const { rows } = await pool.query(`
select u.id, u.email, u.display_name, u.role, u.disabled,
coalesce(string_agg(distinct i.provider, ','), '') as providers
from users u
left join identities i on i.user_id=u.id
group by u.id
order by u.created_at desc
`);
res.render('admin_users', { users: rows });
});
app.post('/admin/users/create', requireOwner, async (req, res) => {
const email = (req.body.email || '').trim().toLowerCase();
const displayName = (req.body.displayName || '').trim();
const role = (req.body.role || 'apm').trim();
const tempPassword = (req.body.tempPassword || '').trim();
if (!email || !tempPassword) return res.status(400).send('email and tempPassword required');
const pwErr = validatePassword(tempPassword);
if (pwErr) return res.status(400).send(pwErr);
const hash = await bcrypt.hash(tempPassword, 12);
const { rows } = await pool.query(
`insert into users(email, display_name, role) values ($1,$2,$3)
on conflict(email) do update set display_name=excluded.display_name, role=excluded.role
returning id`,
[email, displayName || email, role]
);
const userId = rows[0].id;
await pool.query(
`insert into identities(user_id, provider, email, password_hash) values ($1,'local',$2,$3)
on conflict(provider, email) do update set password_hash=excluded.password_hash, user_id=excluded.user_id`,
[userId, email, hash]
);
await audit(req, 'admin.user_created', { targetType: 'user', targetId: userId, metadata: { email, role } });
res.redirect('/admin/users');
});
app.post('/admin/users/:id/reset', requireOwner, async (req, res) => {
const userId = req.params.id;
const newPassword = (req.body.newPassword || '').trim();
if (!newPassword) return res.redirect('/admin/users');
const pwErr = validatePassword(newPassword);
if (pwErr) return res.status(400).send(pwErr);
const hash = await bcrypt.hash(newPassword, 12);
const { rows } = await pool.query('select email from users where id=$1', [userId]);
const email = rows[0]?.email;
if (!email) return res.redirect('/admin/users');
await pool.query(
`insert into identities(user_id, provider, email, password_hash) values ($1,'local',$2,$3)
on conflict(provider, email) do update set password_hash=excluded.password_hash, user_id=excluded.user_id`,
[userId, email, hash]
);
await audit(req, 'admin.password_reset', { targetType: 'user', targetId: userId, metadata: { provider: 'local' } });
res.redirect('/admin/users');
});
app.post('/admin/users/:id/toggle', requireOwner, async (req, res) => {
const userId = req.params.id;
await pool.query('update users set disabled = not disabled where id=$1', [userId]);
const { rows } = await pool.query('select disabled,email from users where id=$1', [userId]);
await audit(req, 'admin.user_toggled', { targetType: 'user', targetId: userId, metadata: { disabled: rows[0]?.disabled, email: rows[0]?.email } });
res.redirect('/admin/users');
});
app.post('/admin/users/:id/delete', requireOwner, async (req, res) => {
const userId = req.params.id;
const { rows } = await pool.query('select email,role from users where id=$1', [userId]);
await pool.query('delete from users where id=$1', [userId]);
await audit(req, 'admin.user_deleted', { targetType: 'user', targetId: userId, metadata: { email: rows[0]?.email, role: rows[0]?.role } });
res.redirect('/admin/users');
});
// Account: change password
const md = new MarkdownIt({ html: false, linkify: true, breaks: true });
async function upsertConnector(provider) {
const configured = (provider === 'gmail')
? Boolean(process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET)
: Boolean(process.env.MICROSOFT_CLIENT_ID && process.env.MICROSOFT_CLIENT_SECRET);
await pool.query(
`insert into email_connectors(provider, configured) values ($1,$2)
on conflict(provider) do update set configured=excluded.configured, updated_at=now()`,
[provider, configured]
);
}
await upsertConnector('gmail');
await upsertConnector('microsoft');
app.get('/account/password', requireAuth, (req, res) => {
res.render('account_password');
});
app.post('/account/password', requireAuth, async (req, res) => {
const currentPassword = (req.body.currentPassword || '').trim();
const newPassword = (req.body.newPassword || '').trim();
const confirmPassword = (req.body.confirmPassword || '').trim();
if (!newPassword || newPassword !== confirmPassword) return res.status(400).send('Password mismatch');
// do they have a local identity?
const { rows: idRows } = await pool.query(
`select password_hash from identities where user_id=$1 and provider='local' limit 1`,
[req.user.id]
);
if (idRows[0]?.password_hash) {
// verify current if set
const ok = currentPassword ? await bcrypt.compare(currentPassword, idRows[0].password_hash) : false;
if (!ok) return res.status(400).send('Current password incorrect');
}
const hash = await bcrypt.hash(newPassword, 12);
const email = req.user.email;
await pool.query(
`insert into identities(user_id, provider, email, password_hash) values ($1,'local',$2,$3)
on conflict(provider, email) do update set password_hash=excluded.password_hash, user_id=excluded.user_id`,
[req.user.id, email, hash]
);
await audit(req, 'account.password_changed', { targetType: 'user', targetId: req.user.id, metadata: { provider: 'local' } });
res.redirect('/');
});
// Admin: email connectors
app.get('/admin/email', requireOwner, async (_req, res) => {
// ensure connector rows exist + configured flag reflects env
await upsertConnector('gmail');
await upsertConnector('microsoft');
const { rows } = await pool.query(`select * from email_connectors order by provider asc`);
res.render('email_settings', { connectors: rows });
});
app.post('/admin/email-connectors/:provider/toggle', requireOwner, async (req, res) => {
const provider = req.params.provider;
await pool.query(`update email_connectors set enabled = not enabled, updated_at=now() where provider=$1`, [provider]);
const { rows } = await pool.query(`select enabled, configured, authorized from email_connectors where provider=$1`, [provider]);
await audit(req, 'admin.email_connector_toggled', { targetType: 'connector', targetId: provider, metadata: rows[0] || {} });
res.redirect('/admin/email');
});
// Admin: email rules
// Docs (in-app)
const DOCS = [
{ slug: 'readme', title: 'README', path: path.join(appRoot, 'README.md') },
{ slug: 'install', title: 'INSTALL', path: path.join(appRoot, 'INSTALL.md') },
{ slug: 'operations', title: 'OPERATIONS', path: path.join(appRoot, 'OPERATIONS.md') },
{ slug: 'development', title: 'DEVELOPMENT', path: path.join(appRoot, 'DEVELOPMENT.md') },
{ slug: 'changelog', title: 'CHANGELOG', path: path.join(appRoot, 'CHANGELOG.md') }
];
app.get('/docs', requireAuth, (req, res) => res.redirect('/docs/readme'));
app.get('/docs/:slug', requireAuth, (req, res) => {
const slug = req.params.slug;
const current = DOCS.find((d) => d.slug === slug) || DOCS[0];
const md = fs.readFileSync(current.path, 'utf-8');
const html = marked.parse(md);
res.render('docs', { docs: DOCS, current, html });
});
// Attachments download
app.get('/attachments/:id', requireAuth, async (req, res) => {
const id = req.params.id;
const { rows } = await pool.query('select * from email_attachments where id=$1', [id]);
const a = rows[0];
if (!a) return res.status(404).send('Not found');
res.setHeader('Content-Type', a.content_type || 'application/octet-stream');
res.setHeader('Content-Disposition', `attachment; filename="${(a.filename || 'attachment').replace(/\"/g,'')}"`);
fs.createReadStream(a.storage_path).pipe(res);
});
// Drafts list
app.get('/drafts', requireAuth, async (req, res) => {
const { rows: pcos } = await pool.query(
`select d.*, p.name as project_name
from pco_drafts d left join projects p on p.id=d.project_id
order by d.updated_at desc limit 200`
);
const { rows: rfis } = await pool.query(
`select d.*, p.name as project_name
from rfi_drafts d left join projects p on p.id=d.project_id
order by d.updated_at desc limit 200`
);
res.render('drafts_list', { pcos, rfis });
});
// Create drafts from email
app.post('/drafts/pco/from-email', requireAuth, async (req, res) => {
const emailId = (req.body.emailId || '').trim();
const { rows } = await pool.query('select * from ingested_emails where id=$1', [emailId]);
const email = rows[0];
if (!email) return res.status(404).send('Email not found');
let project = null;
if (email.project_id) {
const pr = await pool.query('select * from projects where id=$1', [email.project_id]);
project = pr.rows[0] || null;
}
const title = `PCO: ${email.subject || 'Untitled'}`.slice(0, 200);
const body = buildPCOBody(email, project);
const ins = await pool.query(
`insert into pco_drafts(created_by_user_id, project_id, source_email_id, title, body)
values ($1,$2,$3,$4,$5) returning id`,
[req.user.id, email.project_id || null, emailId, title, body]
);
const draftId = ins.rows[0].id;
await audit(req, 'draft.pco_created', { targetType: 'pco_draft', targetId: draftId, metadata: { emailId } });
res.redirect(`/drafts/pco/${draftId}`);
});
app.post('/drafts/rfi/from-email', requireAuth, async (req, res) => {
const emailId = (req.body.emailId || '').trim();
const { rows } = await pool.query('select * from ingested_emails where id=$1', [emailId]);
const email = rows[0];
if (!email) return res.status(404).send('Email not found');
let project = null;
if (email.project_id) {
const pr = await pool.query('select * from projects where id=$1', [email.project_id]);
project = pr.rows[0] || null;
}
const title = `RFI: ${email.subject || 'Untitled'}`.slice(0, 200);
const body = buildRFIBody(email, project);
const ins = await pool.query(
`insert into rfi_drafts(created_by_user_id, project_id, source_email_id, title, body)
values ($1,$2,$3,$4,$5) returning id`,
[req.user.id, email.project_id || null, emailId, title, body]
);
const draftId = ins.rows[0].id;
await audit(req, 'draft.rfi_created', { targetType: 'rfi_draft', targetId: draftId, metadata: { emailId } });
res.redirect(`/drafts/rfi/${draftId}`);
});
// Draft edit + save
app.get('/drafts/pco/:id', requireAuth, async (req, res) => {
const id = req.params.id;
const d = await pool.query('select * from pco_drafts where id=$1', [id]);
const draft = d.rows[0];
if (!draft) return res.status(404).send('Not found');
const e = draft.source_email_id ? await pool.query('select * from ingested_emails where id=$1', [draft.source_email_id]) : { rows: [] };
const email = e.rows[0] || null;
const p = draft.project_id ? await pool.query('select * from projects where id=$1', [draft.project_id]) : { rows: [] };
const project = p.rows[0] || null;
const previewHtml = md.render(draft.body || '');
res.render('draft_edit', { kind: 'pco', draft, email, project, previewHtml });
});
app.post('/drafts/pco/:id/save', requireAuth, async (req, res) => {
const id = req.params.id;
const title = (req.body.title || '').trim();
const status = (req.body.status || 'draft').trim();
const body = (req.body.body || '').trim();
await pool.query('update pco_drafts set title=$1, status=$2, body=$3, updated_at=now() where id=$4', [title, status, body, id]);
await audit(req, 'draft.pco_saved', { targetType: 'pco_draft', targetId: id, metadata: { status } });
res.redirect(`/drafts/pco/${id}`);
});
app.get('/drafts/pco/:id/export.md', requireAuth, async (req, res) => {
const id = req.params.id;
const d = await pool.query('select * from pco_drafts where id=$1', [id]);
const draft = d.rows[0];
if (!draft) return res.status(404).send('Not found');
res.setHeader('Content-Type', 'text/markdown; charset=utf-8');
res.setHeader('Content-Disposition', `attachment; filename="PCO_${id}.md"`);
await audit(req, 'draft.pco_exported', { targetType: 'pco_draft', targetId: id, metadata: { format: 'md' } });
res.send(draft.body || '');
});
app.get('/drafts/rfi/:id', requireAuth, async (req, res) => {
const id = req.params.id;
const d = await pool.query('select * from rfi_drafts where id=$1', [id]);
const draft = d.rows[0];
if (!draft) return res.status(404).send('Not found');
const e = draft.source_email_id ? await pool.query('select * from ingested_emails where id=$1', [draft.source_email_id]) : { rows: [] };
const email = e.rows[0] || null;
const p = draft.project_id ? await pool.query('select * from projects where id=$1', [draft.project_id]) : { rows: [] };
const project = p.rows[0] || null;
const previewHtml = md.render(draft.body || '');
res.render('draft_edit', { kind: 'rfi', draft, email, project, previewHtml });
});
app.post('/drafts/rfi/:id/save', requireAuth, async (req, res) => {
const id = req.params.id;
const title = (req.body.title || '').trim();
const status = (req.body.status || 'draft').trim();
const body = (req.body.body || '').trim();
await pool.query('update rfi_drafts set title=$1, status=$2, body=$3, updated_at=now() where id=$4', [title, status, body, id]);
await audit(req, 'draft.rfi_saved', { targetType: 'rfi_draft', targetId: id, metadata: { status } });
res.redirect(`/drafts/rfi/${id}`);
});
app.get('/drafts/rfi/:id/export.md', requireAuth, async (req, res) => {
const id = req.params.id;
const d = await pool.query('select * from rfi_drafts where id=$1', [id]);
const draft = d.rows[0];
if (!draft) return res.status(404).send('Not found');
res.setHeader('Content-Type', 'text/markdown; charset=utf-8');
res.setHeader('Content-Disposition', `attachment; filename="RFI_${id}.md"`);
await audit(req, 'draft.rfi_exported', { targetType: 'rfi_draft', targetId: id, metadata: { format: 'md' } });
res.send(draft.body || '');
});
app.get('/admin/email-rules', requireOwner, async (req, res) => {
const { rows: projects } = await pool.query(
`select p.id, p.name, p.job_number
from projects p
join project_members pm on pm.project_id=p.id and pm.user_id=$1
where p.active=true
order by p.updated_at desc`,
[req.user.id]
);
const { rows: rules } = await pool.query(
`select r.*, p.name as project_name
from email_rules r
join projects p on p.id=r.project_id
order by r.enabled desc, r.priority asc, r.created_at asc`
);
res.render('email_rules', { projects, rules });
});
app.post('/admin/email-rules/create', requireOwner, async (req, res) => {
const projectId = (req.body.projectId || '').trim();
const matchType = (req.body.matchType || '').trim();
const matchValue = (req.body.matchValue || '').trim();
const priority = parseInt(req.body.priority || '100', 10);
if (!projectId || !matchType || !matchValue) return res.status(400).send('missing fields');
const { rows } = await pool.query(
`insert into email_rules(created_by_user_id, project_id, match_type, match_value, priority)
values ($1,$2,$3,$4,$5)
returning id`,
[req.user.id, projectId, matchType, matchValue, isNaN(priority) ? 100 : priority]
);
await audit(req, 'admin.email_rule_created', { targetType: 'email_rule', targetId: rows[0].id, metadata: { projectId, matchType, matchValue, priority } });
res.redirect('/admin/email-rules');
});
app.post('/admin/email-rules/:id/toggle', requireOwner, async (req, res) => {
const ruleId = req.params.id;
await pool.query(`update email_rules set enabled = not enabled where id=$1`, [ruleId]);
const { rows } = await pool.query('select enabled from email_rules where id=$1', [ruleId]);
await audit(req, 'admin.email_rule_toggled', { targetType: 'email_rule', targetId: ruleId, metadata: { enabled: rows[0]?.enabled } });
res.redirect('/admin/email-rules');
});
app.post('/admin/email-rules/:id/delete', requireOwner, async (req, res) => {
const ruleId = req.params.id;
await pool.query('delete from email_rules where id=$1', [ruleId]);
await audit(req, 'admin.email_rule_deleted', { targetType: 'email_rule', targetId: ruleId });
res.redirect('/admin/email-rules');
});
// Admin: audit log viewer
app.get('/admin/audit', requireOwner, async (_req, res) => {
const { rows } = await pool.query(
`select created_at, actor_email, action, target_type, target_id, ip, metadata
from audit_logs
order by created_at desc
limit 250`
);
res.render('admin_audit', { logs: rows });
});
if (isMainModule) {
await initializeApp();
if (process.env.MASTERMIND_DISABLE_LISTEN === '1') {
console.log(`Mastermind web ready (${isMemoryDb ? 'memory' : 'postgres'})`);
} else {
app.listen(PORT, () => {
console.log(`Mastermind web listening on ${BASE_URL}`);
});
}
}