initial (open-source)

This commit is contained in:
James Long 2022-03-31 13:19:08 -04:00
commit dd541e5f70
35 changed files with 4899 additions and 0 deletions

8
.gitignore vendored Normal file
View file

@ -0,0 +1,8 @@
.DS_Store
config.json
node_modules
log
supervise
bin/large-sync-data.txt
user-files
server-files

13
account-db.js Normal file
View file

@ -0,0 +1,13 @@
let { join } = require('path');
let { openDatabase } = require('./db');
let accountDb = null;
function getAccountDb() {
if (accountDb == null) {
accountDb = openDatabase(join(__dirname, 'server-files/account.sqlite'));
}
return accountDb;
}
module.exports = { getAccountDb };

113
app-account.js Normal file
View file

@ -0,0 +1,113 @@
let express = require('express');
let bcrypt = require('bcrypt');
let uuid = require('uuid');
let errorMiddleware = require('./util/error-middleware');
let { validateUser } = require('./util/validate-user');
let { getAccountDb } = require('./account-db');
let app = express();
app.use(errorMiddleware);
function init() {}
function hashPassword(password) {
return bcrypt.hashSync(password, 12);
}
// Non-authenticated endpoints:
//
// /boostrap (special endpoint for setting up the instance, cant call again)
// /login
app.get('/needs-bootstrap', (req, res) => {
let accountDb = getAccountDb();
let rows = accountDb.all('SELECT * FROM auth');
res.send({
status: 'ok',
data: { bootstrapped: rows.length > 0 }
});
});
app.post('/bootstrap', (req, res) => {
let { password } = req.body;
let accountDb = getAccountDb();
let rows = accountDb.all('SELECT * FROM auth');
if (rows.length !== 0) {
res.status(400).send({
status: 'error',
reason: 'already-bootstrapped'
});
return;
}
if (password == null || password === '') {
res.status(400).send({ status: 'error', reason: 'invalid-password' });
return;
}
// Hash the password. There's really not a strong need for this
// since this is a self-hosted instance owned by the user.
// However, just in case we do it.
let hashed = hashPassword(password);
accountDb.mutate('INSERT INTO auth (password) VALUES (?)', [hashed]);
let token = uuid.v4();
accountDb.mutate('INSERT INTO sessions (token) VALUES (?)', [token]);
res.send({ status: 'ok', data: { token } });
});
app.post('/login', (req, res) => {
let { password } = req.body;
let accountDb = getAccountDb();
let row = accountDb.first('SELECT * FROM auth');
let confirmed = row && bcrypt.compareSync(password, row.password);
let token = null;
if (confirmed) {
// Right now, tokens are permanent and there's just one in the
// system. In the future this should probably evolve to be a
// "session" that times out after a long time or something, and
// maybe each device has a different token
let row = accountDb.first('SELECT * FROM sessions');
token = row.token;
}
res.send({ status: 'ok', data: { token } });
});
app.post('/change-password', (req, res) => {
let user = validateUser(req, res);
if (!user) return;
let accountDb = getAccountDb();
let { password } = req.body;
if (password == null || password === '') {
res.send({ status: 'error', reason: 'invalid-password' });
return;
}
let hashed = hashPassword(password);
// Note that this doesn't have a WHERE. This table only ever has 1
// row (maybe that will change in the future? if this this will not work)
accountDb.mutate('UPDATE auth SET password = ?', [hashed]);
res.send({ status: 'ok', data: {} });
});
app.get('/validate', (req, res) => {
let user = validateUser(req, res);
if (user) {
res.send({ status: 'ok', data: { validated: true } });
}
});
app.use(errorMiddleware);
module.exports.handlers = app;
module.exports.init = init;

394
app-plaid.js Normal file
View file

@ -0,0 +1,394 @@
const express = require('express');
const uuid = require('uuid');
const fetch = require('node-fetch');
const plaid = require('plaid');
const { middleware: connectDb } = require('./db');
const { handleError } = require('./util/handle-error');
const { validateSubscribedUser } = require('./util/validate-user');
const config = require('./config');
const app = express();
let plaidClient;
function init() {
plaidClient = new plaid.Client({
clientID: config.plaid.client_id,
secret: config.plaid.secret,
env: config.plaid.env,
options: { version: '2019-05-29' }
});
}
async function validateToken(req, res) {
let { token } = req.body;
let rows = await req.runQuery(
'SELECT * FROM webTokens WHERE token_id = $1',
[token],
true
);
if (rows.length === 0) {
res.send(JSON.stringify({ status: 'error', reason: 'not-found' }));
return null;
}
// Tokens are only valid for 10 minutes
let validTime = 1000 * 60 * 10;
let row = rows[0];
let timeCreated = JSON.parse(row.time_created);
if (Date.now() - timeCreated >= validTime) {
res.send(JSON.stringify({ status: 'error', reason: 'expired' }));
return null;
}
return row;
}
app.post(
'/create-web-token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let token = uuid.v4();
await req.runQuery('DELETE FROM webTokens WHERE user_id = $1', [user.id]);
await req.runQuery(
'INSERT INTO webTokens (user_id, token_id, time_created) VALUES ($1, $2, $3)',
[user.id, token, Date.now()]
);
res.send(
JSON.stringify({
status: 'ok',
data: token
})
);
})
);
app.post(
'/validate-web-token',
connectDb,
handleError(async (req, res) => {
let token = await validateToken(req, res);
if (!token) {
return;
}
res.send(JSON.stringify({ status: 'ok' }));
})
);
app.post(
'/put-web-token-contents',
connectDb,
handleError(async (req, res) => {
let token = await validateToken(req, res);
if (!token) {
return;
}
let { data } = req.body;
await req.runQuery(
'UPDATE webTokens SET contents = $1 WHERE token_id = $2',
[JSON.stringify(data), token.token_id]
);
res.send(
JSON.stringify({
status: 'ok',
data: null
})
);
})
);
app.post(
'/get-web-token-contents',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let token = await validateToken(req, res);
if (!token) {
return;
}
let rows = await req.runQuery(
'SELECT * FROM webTokens WHERE user_id = $1 AND token_id = $2',
[user.id, token.token_id],
true
);
if (rows.length === 0) {
res.send(
JSON.stringify({
status: 'error',
reason: 'not-found'
})
);
}
res.send(
JSON.stringify({
status: 'ok',
data: JSON.parse(rows[0].contents)
})
);
})
);
app.post(
'/make_link_token',
connectDb,
handleError(async (req, res) => {
let token = await validateToken(req, res);
if (!token) {
return;
}
let result = await plaidClient.createLinkToken({
user: {
client_user_id: token.user_id
},
client_name: 'Actual',
products: ['transactions'],
country_codes: ['US'],
language: 'en'
});
res.send(JSON.stringify({ status: 'ok', data: result.link_token }));
})
);
app.post(
'/handoff_public_token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id, public_token } = req.body;
let url = config.plaid.env + '/item/public_token/exchange';
let resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
public_token: public_token
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
await req.runQuery(
'INSERT INTO access_tokens (item_id, user_id, access_token) VALUES ($1, $2, $3)',
[item_id, user.id, resData.access_token]
);
res.send(JSON.stringify({ status: 'ok' }));
})
);
app.post(
'/remove-access-token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id } = req.body;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2',
[user.id, item_id],
true
);
if (rows.length === 0) {
throw new Error('access token not found');
}
const { access_token } = rows[0];
const url = config.plaid.env + '/item/remove';
const resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
access_token: access_token
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
if (resData.removed !== true) {
console.log('[Error] Item not removed: ' + access_token.slice(0, 3));
}
await req.runQuery(
'UPDATE access_tokens SET deleted = TRUE WHERE access_token = $1',
[access_token]
);
res.send(
JSON.stringify({
status: 'ok',
data: resData
})
);
})
);
app.post(
'/accounts',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
const { item_id } = req.body;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2',
[user.id, item_id],
true
);
if (rows.length === 0) {
throw new Error('access token not found');
}
const { access_token } = rows[0];
const url = config.plaid.env + '/accounts/get';
const resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
access_token: access_token
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
res.send(
JSON.stringify({
status: 'ok',
data: resData
})
);
})
);
app.post(
'/transactions',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id, start_date, end_date, account_id, count, offset } = req.body;
let resData;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2 AND deleted = FALSE',
[user.id, item_id],
true
);
if (rows.length === 0) {
res.status(400);
res.send('access-token-not-found');
return;
}
const { access_token } = rows[0];
const url = config.plaid.env + '/transactions/get';
resData = await fetch(url, {
method: 'POST',
body: JSON.stringify({
client_id: config.plaid.client_id,
secret: config.plaid.secret,
access_token: access_token,
start_date: start_date,
end_date: end_date,
options: {
account_ids: [account_id],
count: count,
offset: offset
}
}),
headers: {
'Content-Type': 'application/json',
'User-Agent': 'Actual Budget'
}
}).then(res => res.json());
res.send(
JSON.stringify({
status: 'ok',
data: resData
})
);
})
);
app.post(
'/make-public-token',
connectDb,
handleError(async (req, res) => {
let user = await validateSubscribedUser(req, res);
if (!user) {
return;
}
let { item_id } = req.body;
const rows = await req.runQuery(
'SELECT * FROM access_tokens WHERE user_id = $1 AND item_id = $2',
[user.id, item_id],
true
);
if (rows.length === 0) {
throw new Error('access token not found');
}
const { access_token } = rows[0];
let result = await plaidClient.createLinkToken({
user: {
client_user_id: user.id
},
client_name: 'Actual',
country_codes: ['US'],
language: 'en',
access_token: access_token
});
res.send(
JSON.stringify({
status: 'ok',
data: result
})
);
})
);
module.exports.handlers = app;
module.exports.init = init;

440
app-sync.js Normal file
View file

@ -0,0 +1,440 @@
let { Buffer } = require('buffer');
let fs = require('fs/promises');
let { join } = require('path');
let express = require('express');
let uuid = require('uuid');
let AdmZip = require('adm-zip');
let { validateUser } = require('./util/validate-user');
let errorMiddleware = require('./util/error-middleware');
let config = require('./config');
let { getAccountDb } = require('./account-db');
let simpleSync = require('./sync-simple');
let fullSync = require('./sync-full');
let actual = require('@actual-app/api');
let SyncPb = actual.internal.SyncProtoBuf;
const app = express();
app.use(errorMiddleware);
async function init() {
await actual.init({
config: {
dataDir: join(__dirname, 'user-files')
}
});
}
// This is a version representing the internal format of sync
// messages. When this changes, all sync files need to be reset. We
// will check this version when syncing and notify the user if they
// need to reset.
const SYNC_FORMAT_VERSION = 2;
app.post('/sync', async (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let requestPb;
try {
requestPb = SyncPb.SyncRequest.deserializeBinary(req.body);
} catch (e) {
res.status(500);
res.send({ status: 'error', reason: 'internal-error' });
return;
}
let accountDb = getAccountDb();
let file_id = requestPb.getFileid() || null;
let group_id = requestPb.getGroupid() || null;
let key_id = requestPb.getKeyid() || null;
let since = requestPb.getSince() || null;
let messages = requestPb.getMessagesList();
if (!since) {
throw new Error('`since` is required');
}
let currentFiles = accountDb.all(
'SELECT group_id, encrypt_keyid, encrypt_meta, sync_version FROM files WHERE id = ?',
[file_id]
);
if (currentFiles.length === 0) {
res.status(400);
res.send('file-not-found');
return;
}
let currentFile = currentFiles[0];
if (
currentFile.sync_version == null ||
currentFile.sync_version < SYNC_FORMAT_VERSION
) {
res.status(400);
res.send('file-old-version');
return;
}
// When resetting sync state, something went wrong. There is no
// group id and it's awaiting a file to be uploaded.
if (currentFile.group_id == null) {
res.status(400);
res.send('file-needs-upload');
return;
}
// Check to make sure the uploaded file is valid and has been
// encrypted with the same key it is registered with (this might
// be wrong if there was an error during the key creation
// process)
let uploadedKeyId = currentFile.encrypt_meta
? JSON.parse(currentFile.encrypt_meta).keyId
: null;
if (uploadedKeyId !== currentFile.encrypt_keyid) {
res.status(400);
res.send('file-key-mismatch');
return;
}
// The changes being synced are part of an old group, which
// means the file has been reset. User needs to re-download.
if (group_id !== currentFile.group_id) {
res.status(400);
res.send('file-has-reset');
return;
}
// The data is encrypted with a different key which is
// unacceptable. We can't accept these changes. Reject them and
// tell the user that they need to generate the correct key
// (which necessitates a sync reset so they need to re-download).
if (key_id !== currentFile.encrypt_keyid) {
res.status(400);
res.send('file-has-new-key');
return false;
}
// TODO: We also provide a "simple" sync method which currently isn't
// used. This method just stores the messages locally and doesn't
// load the whole app at all. If we want to support end-to-end
// encryption, this method is required because we can't read the
// messages. Using it looks like this:
//
// let simpleSync = require('./sync-simple');
// let {trie, newMessages } = simpleSync.sync(messages, since, file_id);
let { trie, newMessages } = await fullSync.sync(messages, since, file_id);
// encode it back...
let responsePb = new SyncPb.SyncResponse();
responsePb.setMerkle(JSON.stringify(trie));
for (let i = 0; i < newMessages.length; i++) {
let msg = newMessages[i];
let envelopePb = new SyncPb.MessageEnvelope();
envelopePb.setTimestamp(msg.timestamp);
envelopePb.setIsencrypted(msg.is_encrypted === 1);
envelopePb.setContent(msg.content);
responsePb.addMessages(envelopePb);
}
res.set('Content-Type', 'application/actual-sync');
res.send(Buffer.from(responsePb.serializeBinary()));
});
app.post('/user-get-key', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId } = req.body;
let rows = accountDb.all(
'SELECT encrypt_salt, encrypt_keyid, encrypt_test FROM files WHERE id = ?',
[fileId]
);
if (rows.length === 0) {
res.status(400).send('file-not-found');
return;
}
let { encrypt_salt, encrypt_keyid, encrypt_test } = rows[0];
res.send(
JSON.stringify({
status: 'ok',
data: { id: encrypt_keyid, salt: encrypt_salt, test: encrypt_test }
})
);
});
app.post('/user-create-key', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId, keyId, keySalt, testContent } = req.body;
accountDb.mutate(
'UPDATE files SET encrypt_salt = ?, encrypt_keyid = ?, encrypt_test = ? WHERE id = ?',
[keySalt, keyId, testContent, fileId]
);
res.send(JSON.stringify({ status: 'ok' }));
});
app.post('/reset-user-file', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId } = req.body;
let files = accountDb.all('SELECT group_id FROM files WHERE id = ?', [
fileId
]);
if (files.length === 0) {
res.status(400).send('User or file not found');
return;
}
let { group_id } = files[0];
accountDb.mutate('UPDATE files SET group_id = NULL WHERE id = ?', [fileId]);
if (group_id) {
// TODO: Instead of doing this, just delete the db file named
// after the group
// db.mutate('DELETE FROM messages_binary WHERE group_id = ?', [group_id]);
// db.mutate('DELETE FROM messages_merkles WHERE group_id = ?', [group_id]);
}
res.send(JSON.stringify({ status: 'ok' }));
});
app.post('/upload-user-file', async (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let name = decodeURIComponent(req.headers['x-actual-name']);
let fileId = req.headers['x-actual-file-id'];
let groupId = req.headers['x-actual-group-id'] || null;
let encryptMeta = req.headers['x-actual-encrypt-meta'] || null;
let syncFormatVersion = req.headers['x-actual-format'] || null;
let keyId = encryptMeta ? JSON.parse(encryptMeta).keyId : null;
if (!fileId) {
throw new Error('fileId is required');
}
let currentFiles = accountDb.all(
'SELECT group_id, encrypt_keyid, encrypt_meta FROM files WHERE id = ?',
[fileId]
);
if (currentFiles.length) {
let currentFile = currentFiles[0];
// The uploading file is part of an old group, so reject
// it. All of its internal sync state is invalid because its
// old. The sync state has been reset, so user needs to
// either reset again or download from the current group.
if (groupId !== currentFile.group_id) {
res.status(400);
res.send('file-has-reset');
return;
}
// The key that the file is encrypted with is different than
// the current registered key. All data must always be
// encrypted with the registered key for consistency. Key
// changes always necessitate a sync reset, which means this
// upload is trying to overwrite another reset. That might
// be be fine, but since we definitely cannot accept a file
// encrypted with the wrong key, we bail and suggest the
// user download the latest file.
if (keyId !== currentFile.encrypt_keyid) {
res.status(400);
res.send('file-has-new-key');
return;
}
}
// TODO: If we want to support end-to-end encryption, we'd write the
// raw file down because it's an encrypted blob. This isn't
// supported yet in the self-hosted version because it's unclear if
// it's still needed, given that you own your server
//
// await fs.writeFile(join(config.files, `${fileId}.blob`), req.body);
let zip = new AdmZip(req.body);
try {
zip.extractAllTo(join(config.files, fileId), true);
} catch (err) {
console.log('Error writing file', err);
res.send(JSON.stringify({ status: 'error' }));
return;
}
let rows = accountDb.all('SELECT id FROM files WHERE id = ?', [fileId]);
if (rows.length === 0) {
// it's new
groupId = uuid.v4();
accountDb.mutate(
'INSERT INTO files (id, group_id, sync_version, name, encrypt_meta) VALUES (?, ?, ?, ?, ?)',
[fileId, groupId, syncFormatVersion, name, encryptMeta]
);
res.send(JSON.stringify({ status: 'ok', groupId }));
} else {
if (!groupId) {
// sync state was reset, create new group
groupId = uuid.v4();
accountDb.mutate('UPDATE files SET group_id = ? WHERE id = ?', [
groupId,
fileId
]);
}
// Regardless, update some properties
accountDb.mutate(
'UPDATE files SET sync_version = ?, encrypt_meta = ?, name = ? WHERE id = ?',
[syncFormatVersion, encryptMeta, name, fileId]
);
res.send(JSON.stringify({ status: 'ok', groupId }));
}
});
app.get('/download-user-file', async (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let fileId = req.headers['x-actual-file-id'];
// Do some authentication
let rows = accountDb.all(
'SELECT id FROM files WHERE id = ? AND deleted = FALSE',
[fileId]
);
if (rows.length === 0) {
res.status(400).send('User or file not found');
return;
}
let zip = new AdmZip();
try {
zip.addLocalFolder(join(config.files, fileId), '/');
} catch (e) {
res.status(500).send('Error reading files');
return;
}
let buffer = zip.toBuffer();
res.setHeader('Content-Disposition', `attachment;filename=${fileId}`);
res.send(buffer);
});
app.post('/update-user-filename', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId, name } = req.body;
// Do some authentication
let rows = accountDb.all(
'SELECT id FROM files WHERE id = ? AND deleted = FALSE',
[fileId]
);
if (rows.length === 0) {
res.status(500).send('User or file not found');
return;
}
accountDb.mutate('UPDATE files SET name = ? WHERE id = ?', [name, fileId]);
res.send(JSON.stringify({ status: 'ok' }));
});
app.get('/list-user-files', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let rows = accountDb.all('SELECT * FROM files');
res.send(
JSON.stringify({
status: 'ok',
data: rows.map(row => ({
deleted: row.deleted,
fileId: row.id,
groupId: row.group_id,
name: row.name,
encryptKeyId: row.encrypt_keyid
}))
})
);
});
app.get('/get-user-file-info', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let fileId = req.headers['x-actual-file-id'];
let rows = accountDb.all(
'SELECT * FROM files WHERE id = ? AND deleted = FALSE',
[fileId]
);
if (rows.length === 0) {
res.send(JSON.stringify({ status: 'error' }));
return;
}
let row = rows[0];
res.send(
JSON.stringify({
status: 'ok',
data: {
deleted: row.deleted,
fileId: row.id,
groupId: row.group_id,
name: row.name,
encryptMeta: row.encrypt_meta ? JSON.parse(row.encrypt_meta) : null
}
})
);
});
app.post('/delete-user-file', (req, res) => {
let user = validateUser(req, res);
if (!user) {
return;
}
let accountDb = getAccountDb();
let { fileId } = req.body;
accountDb.mutate('UPDATE files SET deleted = TRUE WHERE id = ?', [fileId]);
res.send(JSON.stringify({ status: 'ok' }));
});
module.exports.handlers = app;
module.exports.init = init;

39
app.js Normal file
View file

@ -0,0 +1,39 @@
require('source-map-support').install();
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const config = require('./config');
const accountApp = require('./app-account');
const syncApp = require('./app-sync');
const app = express();
process.on('unhandledRejection', reason => {
console.log('Rejection:', reason);
});
app.use(cors());
app.use(bodyParser.json({ limit: '20mb' }));
app.use(bodyParser.raw({ type: 'application/actual-sync', limit: '20mb' }));
app.use(bodyParser.raw({ type: 'application/encrypted-file', limit: '50mb' }));
app.use('/sync', syncApp.handlers);
app.use('/account', accountApp.handlers);
app.get('/', (req, res) => {
res.send(config.mode);
});
async function run() {
await accountApp.init();
await syncApp.init();
console.log('Listening on ' + config.port + '...');
app.listen(config.port);
}
run().catch(err => {
console.log('Error starting app:', err);
process.exit(1);
});

41
db.js Normal file
View file

@ -0,0 +1,41 @@
let Database = require('better-sqlite3');
class WrappedDatabase {
constructor(db) {
this.db = db;
}
all(sql, params = []) {
let stmt = this.db.prepare(sql);
return stmt.all(...params);
}
first(sql, params = []) {
let rows = this.all(sql, params);
return rows.length === 0 ? null : rows[0];
}
exec(sql) {
this.db.exec(sql);
}
mutate(sql, params = []) {
let stmt = this.db.prepare(sql);
let info = stmt.run(...params);
return { changes: info.changes, insertId: info.lastInsertRowid };
}
transaction(fn) {
return this.db.transaction(fn)();
}
close() {
this.db.close();
}
}
function openDatabase(filename) {
return new WrappedDatabase(new Database(filename));
}
module.exports = { openDatabase };

View file

View file

@ -0,0 +1,5 @@
BEGIN TRANSACTION;
DROP TABLE db_version;
COMMIT;

View file

@ -0,0 +1,23 @@
BEGIN TRANSACTION;
CREATE TABLE payees
(id TEXT PRIMARY KEY,
name TEXT,
category TEXT,
tombstone INTEGER DEFAULT 0,
transfer_acct TEXT);
CREATE TABLE payee_rules
(id TEXT PRIMARY KEY,
payee_id TEXT,
type TEXT,
value TEXT,
tombstone INTEGER DEFAULT 0);
CREATE INDEX payee_rules_lowercase_index ON payee_rules(LOWER(value));
CREATE TABLE payee_mapping
(id TEXT PRIMARY KEY,
targetId TEXT);
COMMIT;

View file

@ -0,0 +1,25 @@
BEGIN TRANSACTION;
CREATE TEMPORARY TABLE category_groups_tmp
(id TEXT PRIMARY KEY,
name TEXT UNIQUE,
is_income INTEGER DEFAULT 0,
sort_order REAL,
tombstone INTEGER DEFAULT 0);
INSERT INTO category_groups_tmp SELECT * FROM category_groups;
DROP TABLE category_groups;
CREATE TABLE category_groups
(id TEXT PRIMARY KEY,
name TEXT,
is_income INTEGER DEFAULT 0,
sort_order REAL,
tombstone INTEGER DEFAULT 0);
INSERT INTO category_groups SELECT * FROM category_groups_tmp;
DROP TABLE category_groups_tmp;
COMMIT;

View file

@ -0,0 +1,7 @@
BEGIN TRANSACTION;
CREATE INDEX trans_category_date ON transactions(category, date);
CREATE INDEX trans_category ON transactions(category);
CREATE INDEX trans_date ON transactions(date);
COMMIT;

View file

@ -0,0 +1,38 @@
BEGIN TRANSACTION;
DELETE FROM spreadsheet_cells WHERE
name NOT LIKE '%!budget\_%' ESCAPE '\' AND
name NOT LIKE '%!carryover\_%' ESCAPE '\' AND
name NOT LIKE '%!buffered';
UPDATE OR REPLACE spreadsheet_cells SET name = REPLACE(name, '_', '-');
UPDATE OR REPLACE spreadsheet_cells SET
name =
SUBSTR(name, 1, 28) ||
'-' ||
SUBSTR(name, 29, 4) ||
'-' ||
SUBSTR(name, 33, 4) ||
'-' ||
SUBSTR(name, 37, 4) ||
'-' ||
SUBSTR(name, 41, 12)
WHERE name LIKE '%!budget-%' AND LENGTH(name) = 52;
UPDATE OR REPLACE spreadsheet_cells SET
name =
SUBSTR(name, 1, 31) ||
'-' ||
SUBSTR(name, 32, 4) ||
'-' ||
SUBSTR(name, 36, 4) ||
'-' ||
SUBSTR(name, 40, 4) ||
'-' ||
SUBSTR(name, 44, 12)
WHERE name LIKE '%!carryover-%' AND LENGTH(name) = 55;
UPDATE spreadsheet_cells SET expr = SUBSTR(expr, 2) WHERE name LIKE '%!carryover-%';
COMMIT;

View file

@ -0,0 +1,6 @@
BEGIN TRANSACTION;
ALTER TABLE transactions ADD COLUMN cleared INTEGER DEFAULT 1;
ALTER TABLE transactions ADD COLUMN pending INTEGER DEFAULT 0;
COMMIT;

View file

@ -0,0 +1,10 @@
BEGIN TRANSACTION;
CREATE TABLE rules
(id TEXT PRIMARY KEY,
stage TEXT,
conditions TEXT,
actions TEXT,
tombstone INTEGER DEFAULT 0);
COMMIT;

View file

@ -0,0 +1,13 @@
BEGIN TRANSACTION;
ALTER TABLE transactions ADD COLUMN parent_id TEXT;
UPDATE transactions SET
parent_id = CASE
WHEN isChild THEN SUBSTR(id, 1, INSTR(id, '/') - 1)
ELSE NULL
END;
CREATE INDEX trans_parent_id ON transactions(parent_id);
COMMIT;

View file

@ -0,0 +1,56 @@
BEGIN TRANSACTION;
DROP VIEW IF EXISTS v_transactions_layer2;
CREATE VIEW v_transactions_layer2 AS
SELECT
t.id AS id,
t.isParent AS is_parent,
t.isChild AS is_child,
t.acct AS account,
CASE WHEN t.isChild = 0 THEN NULL ELSE t.parent_id END AS parent_id,
CASE WHEN t.isParent = 1 THEN NULL ELSE cm.transferId END AS category,
pm.targetId AS payee,
t.imported_description AS imported_payee,
IFNULL(t.amount, 0) AS amount,
t.notes AS notes,
t.date AS date,
t.financial_id AS imported_id,
t.error AS error,
t.starting_balance_flag AS starting_balance_flag,
t.transferred_id AS transfer_id,
t.sort_order AS sort_order,
t.cleared AS cleared,
t.tombstone AS tombstone
FROM transactions t
LEFT JOIN category_mapping cm ON cm.id = t.category
LEFT JOIN payee_mapping pm ON pm.id = t.description
WHERE
t.date IS NOT NULL AND
t.acct IS NOT NULL;
CREATE INDEX trans_sorted ON transactions(date desc, starting_balance_flag, sort_order desc, id);
DROP VIEW IF EXISTS v_transactions_layer1;
CREATE VIEW v_transactions_layer1 AS
SELECT t.* FROM v_transactions_layer2 t
LEFT JOIN transactions t2 ON (t.is_child = 1 AND t2.id = t.parent_id)
WHERE IFNULL(t.tombstone, 0) = 0 AND IFNULL(t2.tombstone, 0) = 0;
DROP VIEW IF EXISTS v_transactions;
CREATE VIEW v_transactions AS
SELECT t.* FROM v_transactions_layer1 t
ORDER BY t.date desc, t.starting_balance_flag, t.sort_order desc, t.id;
DROP VIEW IF EXISTS v_categories;
CREATE VIEW v_categories AS
SELECT
id,
name,
is_income,
cat_group AS "group",
sort_order,
tombstone
FROM categories;
COMMIT;

View file

@ -0,0 +1,7 @@
BEGIN TRANSACTION;
CREATE INDEX messages_crdt_search ON messages_crdt(dataset, row, column, timestamp);
ANALYZE;
COMMIT;

View file

@ -0,0 +1,33 @@
BEGIN TRANSACTION;
-- This adds the isChild/parent_id constraint in `where`
DROP VIEW IF EXISTS v_transactions_layer2;
CREATE VIEW v_transactions_layer2 AS
SELECT
t.id AS id,
t.isParent AS is_parent,
t.isChild AS is_child,
t.acct AS account,
CASE WHEN t.isChild = 0 THEN NULL ELSE t.parent_id END AS parent_id,
CASE WHEN t.isParent = 1 THEN NULL ELSE cm.transferId END AS category,
pm.targetId AS payee,
t.imported_description AS imported_payee,
IFNULL(t.amount, 0) AS amount,
t.notes AS notes,
t.date AS date,
t.financial_id AS imported_id,
t.error AS error,
t.starting_balance_flag AS starting_balance_flag,
t.transferred_id AS transfer_id,
t.sort_order AS sort_order,
t.cleared AS cleared,
t.tombstone AS tombstone
FROM transactions t
LEFT JOIN category_mapping cm ON cm.id = t.category
LEFT JOIN payee_mapping pm ON pm.id = t.description
WHERE
t.date IS NOT NULL AND
t.acct IS NOT NULL AND
(t.isChild = 0 OR t.parent_id IS NOT NULL);
COMMIT;

View file

@ -0,0 +1,10 @@
BEGIN TRANSACTION;
CREATE TABLE __meta__ (key TEXT PRIMARY KEY, value TEXT);
DROP VIEW IF EXISTS v_transactions_layer2;
DROP VIEW IF EXISTS v_transactions_layer1;
DROP VIEW IF EXISTS v_transactions;
DROP VIEW IF EXISTS v_categories;
COMMIT;

View file

@ -0,0 +1,5 @@
BEGIN TRANSACTION;
ALTER TABLE accounts ADD COLUMN sort_order REAL;
COMMIT;

View file

@ -0,0 +1,28 @@
BEGIN TRANSACTION;
CREATE TABLE schedules
(id TEXT PRIMARY KEY,
rule TEXT,
active INTEGER DEFAULT 0,
completed INTEGER DEFAULT 0,
posts_transaction INTEGER DEFAULT 0,
tombstone INTEGER DEFAULT 0);
CREATE TABLE schedules_next_date
(id TEXT PRIMARY KEY,
schedule_id TEXT,
local_next_date INTEGER,
local_next_date_ts INTEGER,
base_next_date INTEGER,
base_next_date_ts INTEGER);
CREATE TABLE schedules_json_paths
(schedule_id TEXT PRIMARY KEY,
payee TEXT,
account TEXT,
amount TEXT,
date TEXT);
ALTER TABLE transactions ADD COLUMN schedule TEXT;
COMMIT;

View file

@ -0,0 +1,135 @@
export default async function runMigration(db, uuid) {
function getValue(node) {
return node.expr != null ? node.expr : node.cachedValue;
}
db.execQuery(`
CREATE TABLE zero_budget_months
(id TEXT PRIMARY KEY,
buffered INTEGER DEFAULT 0);
CREATE TABLE zero_budgets
(id TEXT PRIMARY KEY,
month INTEGER,
category TEXT,
amount INTEGER DEFAULT 0,
carryover INTEGER DEFAULT 0);
CREATE TABLE reflect_budgets
(id TEXT PRIMARY KEY,
month INTEGER,
category TEXT,
amount INTEGER DEFAULT 0,
carryover INTEGER DEFAULT 0);
CREATE TABLE notes
(id TEXT PRIMARY KEY,
note TEXT);
CREATE TABLE kvcache (key TEXT PRIMARY KEY, value TEXT);
CREATE TABLE kvcache_key (id INTEGER PRIMARY KEY, key REAL);
`);
// Migrate budget amounts and carryover
let budget = db.runQuery(
`SELECT * FROM spreadsheet_cells WHERE name LIKE 'budget%!budget-%'`,
[],
true
);
db.transaction(() => {
budget.map(monthBudget => {
let match = monthBudget.name.match(
/^(budget-report|budget)(\d+)!budget-(.+)$/
);
if (match == null) {
console.log('Warning: invalid budget month name', monthBudget.name);
return;
}
let type = match[1];
let month = match[2].slice(0, 4) + '-' + match[2].slice(4);
let dbmonth = parseInt(match[2]);
let cat = match[3];
let amount = parseInt(getValue(monthBudget));
if (isNaN(amount)) {
amount = 0;
}
let sheetName = monthBudget.name.split('!')[0];
let carryover = db.runQuery(
'SELECT * FROM spreadsheet_cells WHERE name = ?',
[`${sheetName}!carryover-${cat}`],
true
);
let table = type === 'budget-report' ? 'reflect_budgets' : 'zero_budgets';
db.runQuery(
`INSERT INTO ${table} (id, month, category, amount, carryover) VALUES (?, ?, ?, ?, ?)`,
[
`${month}-${cat}`,
dbmonth,
cat,
amount,
carryover.length > 0 && getValue(carryover[0]) === 'true' ? 1 : 0
]
);
});
});
// Migrate buffers
let buffers = db.runQuery(
`SELECT * FROM spreadsheet_cells WHERE name LIKE 'budget%!buffered'`,
[],
true
);
db.transaction(() => {
buffers.map(buffer => {
let match = buffer.name.match(/^budget(\d+)!buffered$/);
if (match) {
let month = match[1].slice(0, 4) + '-' + match[1].slice(4);
let amount = parseInt(getValue(buffer));
if (isNaN(amount)) {
amount = 0;
}
db.runQuery(
`INSERT INTO zero_budget_months (id, buffered) VALUES (?, ?)`,
[month, amount]
);
}
});
});
// Migrate notes
let notes = db.runQuery(
`SELECT * FROM spreadsheet_cells WHERE name LIKE 'notes!%'`,
[],
true
);
let parseNote = str => {
try {
let value = JSON.parse(str);
return value && value !== '' ? value : null;
} catch (e) {
return null;
}
};
db.transaction(() => {
notes.forEach(note => {
let parsed = parseNote(getValue(note));
if (parsed) {
let [, id] = note.name.split('!');
db.runQuery(`INSERT INTO notes (id, note) VALUES (?, ?)`, [id, parsed]);
}
});
});
db.execQuery(`
DROP TABLE spreadsheet_cells;
ANALYZE;
VACUUM;
`);
}

39
package.json Normal file
View file

@ -0,0 +1,39 @@
{
"name": "actual-sync",
"version": "1.0.0",
"description": "actual syncing server",
"main": "index.js",
"scripts": {
"start": "node app",
"lint": "eslint --ignore-pattern '**/node_modules/*' --ignore-pattern '**/log/*' --ignore-pattern 'supervise' --ignore-pattern '**/shared/*' ."
},
"dependencies": {
"@actual-app/api": "^2.0.1",
"adm-zip": "^0.5.9",
"bcrypt": "^5.0.1",
"better-sqlite3": "^7.5.0",
"body-parser": "^1.18.3",
"cors": "^2.8.5",
"express": "^4.16.3",
"express-response-size": "^0.0.3",
"node-fetch": "^2.2.0",
"source-map-support": "^0.5.21",
"uuid": "^3.3.2"
},
"eslintConfig": {
"extends": "react-app"
},
"devDependencies": {
"babel-eslint": "^10.0.1",
"eslint": "^5.12.1",
"eslint-config-react-app": "^3.0.6",
"eslint-plugin-flowtype": "^3.2.1",
"eslint-plugin-import": "^2.14.0",
"eslint-plugin-jsx-a11y": "^6.1.2",
"eslint-plugin-react": "^7.12.4"
},
"prettier": {
"singleQuote": true,
"trailingComma": "none"
}
}

5
run Executable file
View file

@ -0,0 +1,5 @@
#!/bin/sh
NODE="/home/ec2-user/.nvm/versions/node/v10.15.1/bin/node"
export VERSION=`"$NODE" ./node_modules/.bin/sentry-cli releases propose-version`
exec chpst -uec2-user "$NODE" app.js 2>&1

17
sql/account.sql Normal file
View file

@ -0,0 +1,17 @@
CREATE TABLE auth
(password TEXT PRIMARY KEY);
CREATE TABLE sessions
(token TEXT PRIMARY KEY);
CREATE TABLE files
(id TEXT PRIMARY KEY,
group_id TEXT,
sync_version SMALLINT,
encrypt_meta TEXT,
encrypt_keyid TEXT,
encrypt_salt TEXT,
encrypt_test TEXT,
deleted BOOLEAN DEFAULT FALSE,
name TEXT);

10
sql/messages.sql Normal file
View file

@ -0,0 +1,10 @@
CREATE TABLE messages_binary
(timestamp TEXT,
is_encrypted BOOLEAN,
content bytea,
PRIMARY KEY(timestamp, group_id));
CREATE TABLE messages_merkles
(id TEXT PRIMAREY KEY,
merkle TEXT);

38
sync-full.js Normal file
View file

@ -0,0 +1,38 @@
let { sequential } = require('./util/async');
let actual = require('@actual-app/api');
let SyncPb = actual.internal.SyncProtoBuf;
// This method must be sequential (TODO: document why, because Actual
// is global etc)
const sync = sequential(async function syncAPI(messages, since, fileId) {
let prefs = await actual.internal.send('load-prefs');
if (prefs == null || prefs.id !== fileId) {
if (prefs != null) {
await actual.internal.send('close-budget');
}
await actual.internal.send('load-budget', { id: fileId });
}
messages = messages.map(envPb => {
let timestamp = envPb.getTimestamp();
let msg = SyncPb.Message.deserializeBinary(envPb.getContent());
return {
timestamp: timestamp,
dataset: msg.getDataset(),
row: msg.getRow(),
column: msg.getColumn(),
value: msg.getValue()
};
});
let newMessages = actual.internal.syncAndReceiveMessages(messages, since);
return {
trie: actual.internal.timestamp.getClock().merkle,
newMessages: newMessages
};
});
module.exports = { sync };

86
sync-simple.js Normal file
View file

@ -0,0 +1,86 @@
let { existsSync, readFileSync } = require('fs');
let { join } = require('path');
let { openDatabase } = require('./db');
let actual = require('@actual-app/api');
let merkle = actual.internal.merkle;
let Timestamp = actual.internal.timestamp.Timestamp;
function getGroupDb(groupId) {
let path = join(__dirname, `user-files/${groupId}.sqlite`);
let needsInit = !existsSync(path);
let db = openDatabase(path);
if (needsInit) {
let sql = readFileSync(join(__dirname, 'sql/messages.sql'), 'utf8');
db.exec(sql);
}
return db;
}
function addMessages(db, messages) {
let returnValue;
db.transaction(() => {
let trie = getMerkle(db);
if (messages.length > 0) {
for (let msg of messages) {
let info = db.mutate(
`INSERT OR IGNORE INTO messages_binary (timestamp, is_encrypted, content)
VALUES (?, ?, ?)`,
[
msg.getTimestamp(),
msg.getIsencrypted() ? 1 : 0,
Buffer.from(msg.getContent())
]
);
if (info.changes > 0) {
trie = merkle.insert(trie, Timestamp.parse(msg.getTimestamp()));
}
}
}
trie = merkle.prune(trie);
db.mutate(
'INSERT INTO messages_merkles (id, merkle) VALUES (1, ?) ON CONFLICT (id) DO UPDATE SET merkle = ?',
[JSON.stringify(trie), JSON.stringify(trie)]
);
returnValue = trie;
});
return returnValue;
}
function getMerkle(db, group_id) {
let rows = db.all('SELECT * FROM messages_merkles', [group_id]);
if (rows.length > 0) {
return JSON.parse(rows[0].merkle);
} else {
// No merkle trie exists yet (first sync of the app), so create a
// default one.
return {};
}
}
function sync(messages, since, fileId) {
let db = getGroupDb(fileId);
let newMessages = db.all(
`SELECT * FROM messages_binary
WHERE timestamp > ?
ORDER BY timestamp`,
[since],
true
);
let trie = addMessages(db, messages);
return { trie, newMessages };
}
module.exports = { sync };

1
util/.#validate-user.js Symbolic link
View file

@ -0,0 +1 @@
james@james.local.427

44
util/async.js Normal file
View file

@ -0,0 +1,44 @@
function sequential(fn) {
let sequenceState = {
running: null,
queue: []
};
function pump() {
if (sequenceState.queue.length > 0) {
const next = sequenceState.queue.shift();
run(next.args, next.resolve, next.reject);
} else {
sequenceState.running = null;
}
}
function run(args, resolve, reject) {
sequenceState.running = fn(...args);
sequenceState.running.then(
val => {
pump();
resolve(val);
},
err => {
pump();
reject(err);
}
);
}
return (...args) => {
if (!sequenceState.running) {
return new Promise((resolve, reject) => {
return run(args, resolve, reject);
});
} else {
return new Promise((resolve, reject) => {
sequenceState.queue.push({ resolve, reject, args });
});
}
};
}
module.exports = { sequential };

6
util/error-middleware.js Normal file
View file

@ -0,0 +1,6 @@
async function middleware(err, req, res, next) {
console.log('ERROR', err);
res.status(500).send({ status: 'error', reason: 'internal-error' });
}
module.exports = middleware;

11
util/handle-error.js Normal file
View file

@ -0,0 +1,11 @@
function handleError(func) {
return (req, res) => {
func(req, res).catch(err => {
console.log('Error', req.originalUrl, err);
res.status(500);
res.send({ status: 'error', reason: 'internal-error' });
});
};
};
module.exports = { handleError }

27
util/validate-user.js Normal file
View file

@ -0,0 +1,27 @@
let { getAccountDb } = require('../account-db');
function validateUser(req, res) {
let { token } = req.body || {};
if (!token) {
token = req.headers['x-actual-token'];
}
let db = getAccountDb();
let rows = db.all('SELECT * FROM sessions WHERE token = ?', [token]);
console.log(req.url, rows, token);
if (rows.length === 0) {
res.status(401);
res.send({
status: 'error',
reason: 'unauthorized',
details: 'token-not-found'
});
return null;
}
return rows[0];
}
module.exports = { validateUser };

3166
yarn.lock Normal file

File diff suppressed because it is too large Load diff