Compare commits
1 commit
master
...
jlongster-
Author | SHA1 | Date | |
---|---|---|---|
|
f22abc4a2c |
25 changed files with 1359 additions and 987 deletions
|
@ -1,5 +0,0 @@
|
|||
**/node_modules/*
|
||||
**/log/*
|
||||
**/shared/*
|
||||
|
||||
supervise
|
22
.eslintrc.js
22
.eslintrc.js
|
@ -1,22 +0,0 @@
|
|||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
browser: true,
|
||||
amd: true,
|
||||
node: true
|
||||
},
|
||||
parser: '@typescript-eslint/parser',
|
||||
plugins: ['@typescript-eslint', 'prettier'],
|
||||
extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'],
|
||||
rules: {
|
||||
'prettier/prettier': 'error',
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'warn',
|
||||
{
|
||||
argsIgnorePattern: '^_'
|
||||
}
|
||||
],
|
||||
|
||||
'@typescript-eslint/no-var-requires': 'off'
|
||||
}
|
||||
};
|
94
.github/workflows/build.yml
vendored
94
.github/workflows/build.yml
vendored
|
@ -1,29 +1,85 @@
|
|||
name: Build
|
||||
name: Build Docker Image
|
||||
|
||||
# Docker Images are only built when a new tag is created
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches: '*'
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docker image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v1
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
node-version: 16
|
||||
- name: Cache
|
||||
uses: actions/cache@v2
|
||||
id: cache
|
||||
# Push to both Docker Hub and Github Container Registry
|
||||
images: |
|
||||
jlongster/actual-server
|
||||
ghcr.io/actualbudget/actual-server
|
||||
# Creates the following tags:
|
||||
# - actual-server:latest
|
||||
# - actual-server:1.3
|
||||
# - actual-server:1.3.7
|
||||
# - actual-server:sha-90dd603
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha
|
||||
|
||||
- name: Docker meta for Alpine image
|
||||
id: alpine-meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
path: '**/node_modules'
|
||||
key: yarn-v1-${{ hashFiles('**/yarn.lock') }}
|
||||
- name: Install
|
||||
run: yarn --immutable
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
- name: Build
|
||||
run: yarn build
|
||||
images: |
|
||||
jlongster/actual-server
|
||||
ghcr.io/actualbudget/actual-server
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=raw,value=latest,suffix=-alpine
|
||||
type=semver,pattern={{version}},suffix=-alpine
|
||||
type=semver,pattern={{major}}.{{minor}},suffix=-alpine
|
||||
type=sha,suffix=-alpine
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push standard image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
- name: Build and push Alpine image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
file: Dockerfile.alpine
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.alpine-meta.outputs.tags }}
|
||||
|
|
93
.github/workflows/docker.yml
vendored
93
.github/workflows/docker.yml
vendored
|
@ -1,93 +0,0 @@
|
|||
name: Build Docker Image
|
||||
|
||||
# Docker Images are built for every push to master or when a new tag is created
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
paths-ignore:
|
||||
- README.md
|
||||
- LICENSE.txt
|
||||
|
||||
env:
|
||||
IMAGES: |
|
||||
jlongster/actual-server
|
||||
ghcr.io/actualbudget/actual-server
|
||||
|
||||
# Creates the following tags:
|
||||
# - actual-server:latest (see docker/metadata-action flavor inputs, below)
|
||||
# - actual-server:edge (for master)
|
||||
# - actual-server:1.3
|
||||
# - actual-server:1.3.7
|
||||
# - actual-server:sha-90dd603
|
||||
TAGS: |
|
||||
type=edge,value=edge
|
||||
type=semver,pattern={{version}}
|
||||
type=sha
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Docker image
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
# Push to both Docker Hub and Github Container Registry
|
||||
images: ${{ env.IMAGES }}
|
||||
# Automatically update :latest for our semver tags
|
||||
flavor: |
|
||||
latest=auto
|
||||
tags: ${{ env.TAGS }}
|
||||
|
||||
- name: Docker meta for Alpine image
|
||||
id: alpine-meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.IMAGES }}
|
||||
# Automatically update :latest for our semver tags and suffix all tags
|
||||
flavor: |
|
||||
latest=auto
|
||||
suffix=-alpine,onlatest=true
|
||||
tags: $${{ env.TAGS }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push standard image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
|
||||
- name: Build and push Alpine image
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
file: Dockerfile.alpine
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.alpine-meta.outputs.tags }}
|
29
.github/workflows/lint.yml
vendored
29
.github/workflows/lint.yml
vendored
|
@ -1,29 +0,0 @@
|
|||
name: Linter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
branches: '*'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 16
|
||||
- name: Cache
|
||||
uses: actions/cache@v2
|
||||
id: cache
|
||||
with:
|
||||
path: '**/node_modules'
|
||||
key: yarn-v1-${{ hashFiles('**/yarn.lock') }}
|
||||
- name: Install
|
||||
run: yarn --immutable
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
- name: Lint
|
||||
run: yarn lint
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -7,5 +7,4 @@ supervise
|
|||
bin/large-sync-data.txt
|
||||
user-files
|
||||
server-files
|
||||
fly.toml
|
||||
build/
|
||||
fly.toml
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none"
|
||||
}
|
13
README.md
13
README.md
|
@ -4,7 +4,7 @@ Join the [discord](https://discord.gg/pRYNYr4W5A)!
|
|||
|
||||
## Non-technical users
|
||||
|
||||
We are working on simpler one-button click deployments of Actual. This will reduce the friction for people not as comfortable with the command line. Some non-official options are listed at the bottom.
|
||||
We are looking into a feature for one-button click click deployment of Actual. This will reduce the friction for people not as comfortable with the command line.
|
||||
|
||||
## Running
|
||||
|
||||
|
@ -29,12 +29,11 @@ docker build -t actual-server .
|
|||
docker run -p 5006:5006 actual-server
|
||||
```
|
||||
|
||||
|
||||
## Deploying
|
||||
|
||||
You should deploy your server so it's always running. We recommend [fly.io](https://fly.io) which makes it incredibly easy and provides a free plan.
|
||||
|
||||
[fly.io](https://fly.io) allows running the application directly and provides a free tier. You should be comfortable with using the command line to set it up though.
|
||||
|
||||
[Create an account](https://fly.io/app/sign-in). Although you are required to enter payment details, everything we do here will work on the free tier and you won't be charged.
|
||||
|
||||
Next, [install the `flyctl`](https://fly.io/docs/flyctl/installing/) utility. Run `flyctl auth login` to sign into your account.
|
||||
|
@ -74,13 +73,6 @@ That's it! Actual will automatically check if the `/data` directory exists and u
|
|||
|
||||
_You can also configure the data dir with the `ACTUAL_USER_FILES` environment variable._
|
||||
|
||||
|
||||
### One-click hosting solutions
|
||||
|
||||
These are non-official methods of one-click solutions for running Actual. If you provide a service like this, feel free to open a PR and add it to this list. These run Actual via a Docker image.
|
||||
|
||||
* PikaPods: [Run on PikaPods](https://www.pikapods.com/pods?run=actual)
|
||||
|
||||
## Configuring the server URL
|
||||
|
||||
The Actual app is totally separate from the server. In this project, they happen to both be served by the same server, but the app doesn't know where the server lives.
|
||||
|
@ -88,4 +80,3 @@ The Actual app is totally separate from the server. In this project, they happen
|
|||
The server could live on a completely different domain. You might setup Actual so that the app and server are running in completely separate places.
|
||||
|
||||
Since Actual doesn't know what server to use, the first thing it does is asks you for the server URL. If you are running this project, simply click "Use this domain" and it will automatically fill it in with the current domain. This works because we are serving the app and server in the same place.
|
||||
|
||||
|
|
|
@ -8,9 +8,7 @@ let { getAccountDb } = require('./account-db');
|
|||
let app = express();
|
||||
app.use(errorMiddleware);
|
||||
|
||||
function init() {
|
||||
// eslint-disable-previous-line @typescript-eslint/no-empty-function
|
||||
}
|
||||
function init() {}
|
||||
|
||||
function hashPassword(password) {
|
||||
return bcrypt.hashSync(password, 12);
|
||||
|
|
|
@ -190,7 +190,7 @@ app.post(
|
|||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Actual Budget'
|
||||
}
|
||||
}).then((res) => res.json());
|
||||
}).then(res => res.json());
|
||||
|
||||
await req.runQuery(
|
||||
'INSERT INTO access_tokens (item_id, user_id, access_token) VALUES ($1, $2, $3)',
|
||||
|
@ -233,7 +233,7 @@ app.post(
|
|||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Actual Budget'
|
||||
}
|
||||
}).then((res) => res.json());
|
||||
}).then(res => res.json());
|
||||
|
||||
if (resData.removed !== true) {
|
||||
console.log('[Error] Item not removed: ' + access_token.slice(0, 3));
|
||||
|
@ -286,7 +286,7 @@ app.post(
|
|||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Actual Budget'
|
||||
}
|
||||
}).then((res) => res.json());
|
||||
}).then(res => res.json());
|
||||
|
||||
res.send(
|
||||
JSON.stringify({
|
||||
|
@ -342,7 +342,7 @@ app.post(
|
|||
'Content-Type': 'application/json',
|
||||
'User-Agent': 'Actual Budget'
|
||||
}
|
||||
}).then((res) => res.json());
|
||||
}).then(res => res.json());
|
||||
|
||||
res.send(
|
||||
JSON.stringify({
|
||||
|
|
76
app-sync.js
76
app-sync.js
|
@ -1,13 +1,16 @@
|
|||
let fs = require('fs/promises');
|
||||
let { Buffer } = require('buffer');
|
||||
let fs = require('fs/promises');
|
||||
let { join } = require('path');
|
||||
let express = require('express');
|
||||
let uuid = require('uuid');
|
||||
let AdmZip = require('adm-zip');
|
||||
let { validateUser } = require('./util/validate-user');
|
||||
let errorMiddleware = require('./util/error-middleware');
|
||||
let config = require('./load-config');
|
||||
let { getAccountDb } = require('./account-db');
|
||||
let { getPathForUserFile, getPathForGroupFile } = require('./util/paths');
|
||||
|
||||
let simpleSync = require('./sync-simple');
|
||||
let fullSync = require('./sync-full');
|
||||
|
||||
let actual = require('@actual-app/api');
|
||||
let SyncPb = actual.internal.SyncProtoBuf;
|
||||
|
@ -15,8 +18,17 @@ let SyncPb = actual.internal.SyncProtoBuf;
|
|||
const app = express();
|
||||
app.use(errorMiddleware);
|
||||
|
||||
// eslint-disable-next-line
|
||||
async function init() {}
|
||||
async function init() {
|
||||
let fileDir = join(process.env.ACTUAL_USER_FILES || config.userFiles);
|
||||
|
||||
console.log('Initializing Actual with user file dir:', fileDir);
|
||||
|
||||
await actual.init({
|
||||
config: {
|
||||
dataDir: fileDir
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// This is a version representing the internal format of sync
|
||||
// messages. When this changes, all sync files need to be reset. We
|
||||
|
@ -111,15 +123,31 @@ app.post('/sync', async (req, res) => {
|
|||
return false;
|
||||
}
|
||||
|
||||
let { trie, newMessages } = simpleSync.sync(messages, since, group_id);
|
||||
// TODO: We also provide a "simple" sync method which currently isn't
|
||||
// used. This method just stores the messages locally and doesn't
|
||||
// load the whole app at all. If we want to support end-to-end
|
||||
// encryption, this method is required because we can't read the
|
||||
// messages. Using it looks like this:
|
||||
//
|
||||
// let simpleSync = require('./sync-simple');
|
||||
// let {trie, newMessages } = simpleSync.sync(messages, since, file_id);
|
||||
|
||||
let { trie, newMessages } = await fullSync.sync(messages, since, file_id);
|
||||
|
||||
// encode it back...
|
||||
let responsePb = new SyncPb.SyncResponse();
|
||||
responsePb.setMerkle(JSON.stringify(trie));
|
||||
newMessages.forEach((msg) => responsePb.addMessages(msg));
|
||||
|
||||
for (let i = 0; i < newMessages.length; i++) {
|
||||
let msg = newMessages[i];
|
||||
let envelopePb = new SyncPb.MessageEnvelope();
|
||||
envelopePb.setTimestamp(msg.timestamp);
|
||||
envelopePb.setIsencrypted(msg.is_encrypted === 1);
|
||||
envelopePb.setContent(msg.content);
|
||||
responsePb.addMessages(envelopePb);
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'application/actual-sync');
|
||||
res.set('X-ACTUAL-SYNC-METHOD', 'simple');
|
||||
res.send(Buffer.from(responsePb.serializeBinary()));
|
||||
});
|
||||
|
||||
|
@ -166,7 +194,7 @@ app.post('/user-create-key', (req, res) => {
|
|||
res.send(JSON.stringify({ status: 'ok' }));
|
||||
});
|
||||
|
||||
app.post('/reset-user-file', async (req, res) => {
|
||||
app.post('/reset-user-file', (req, res) => {
|
||||
let user = validateUser(req, res);
|
||||
if (!user) {
|
||||
return;
|
||||
|
@ -186,11 +214,10 @@ app.post('/reset-user-file', async (req, res) => {
|
|||
accountDb.mutate('UPDATE files SET group_id = NULL WHERE id = ?', [fileId]);
|
||||
|
||||
if (group_id) {
|
||||
try {
|
||||
await fs.unlink(getPathForGroupFile(group_id));
|
||||
} catch (e) {
|
||||
console.log(`Unable to delete sync data for group "${group_id}"`);
|
||||
}
|
||||
// TODO: Instead of doing this, just delete the db file named
|
||||
// after the group
|
||||
// db.mutate('DELETE FROM messages_binary WHERE group_id = ?', [group_id]);
|
||||
// db.mutate('DELETE FROM messages_merkles WHERE group_id = ?', [group_id]);
|
||||
}
|
||||
|
||||
res.send(JSON.stringify({ status: 'ok' }));
|
||||
|
@ -247,11 +274,21 @@ app.post('/upload-user-file', async (req, res) => {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: If we want to support end-to-end encryption, we'd write the
|
||||
// raw file down because it's an encrypted blob. This isn't
|
||||
// supported yet in the self-hosted version because it's unclear if
|
||||
// it's still needed, given that you own your server
|
||||
//
|
||||
// await fs.writeFile(join(config.userFiles, `${fileId}.blob`), req.body);
|
||||
|
||||
let zip = new AdmZip(req.body);
|
||||
|
||||
try {
|
||||
await fs.writeFile(getPathForUserFile(fileId), req.body);
|
||||
zip.extractAllTo(join(config.userFiles, fileId), true);
|
||||
} catch (err) {
|
||||
console.log('Error writing file', err);
|
||||
res.send(JSON.stringify({ status: 'error' }));
|
||||
return;
|
||||
}
|
||||
|
||||
let rows = accountDb.all('SELECT id FROM files WHERE id = ?', [fileId]);
|
||||
|
@ -278,7 +315,6 @@ app.post('/upload-user-file', async (req, res) => {
|
|||
'UPDATE files SET sync_version = ?, encrypt_meta = ?, name = ? WHERE id = ?',
|
||||
[syncFormatVersion, encryptMeta, name, fileId]
|
||||
);
|
||||
|
||||
res.send(JSON.stringify({ status: 'ok', groupId }));
|
||||
}
|
||||
});
|
||||
|
@ -301,14 +337,14 @@ app.get('/download-user-file', async (req, res) => {
|
|||
return;
|
||||
}
|
||||
|
||||
let buffer;
|
||||
let zip = new AdmZip();
|
||||
try {
|
||||
buffer = await fs.readFile(getPathForUserFile(fileId));
|
||||
zip.addLocalFolder(join(config.userFiles, fileId), '/');
|
||||
} catch (e) {
|
||||
console.log(`Error: file does not exist: ${getPathForUserFile(fileId)}`);
|
||||
res.status(500).send('File does not exist on server');
|
||||
res.status(500).send('Error reading files');
|
||||
return;
|
||||
}
|
||||
let buffer = zip.toBuffer();
|
||||
|
||||
res.setHeader('Content-Disposition', `attachment;filename=${fileId}`);
|
||||
res.send(buffer);
|
||||
|
@ -349,7 +385,7 @@ app.get('/list-user-files', (req, res) => {
|
|||
res.send(
|
||||
JSON.stringify({
|
||||
status: 'ok',
|
||||
data: rows.map((row) => ({
|
||||
data: rows.map(row => ({
|
||||
deleted: row.deleted,
|
||||
fileId: row.id,
|
||||
groupId: row.group_id,
|
||||
|
|
15
app.js
15
app.js
|
@ -10,7 +10,7 @@ const syncApp = require('./app-sync');
|
|||
|
||||
const app = express();
|
||||
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
process.on('unhandledRejection', reason => {
|
||||
console.log('Rejection:', reason);
|
||||
});
|
||||
|
||||
|
@ -29,16 +29,7 @@ app.get('/mode', (req, res) => {
|
|||
app.use(actuator()); // Provides /health, /metrics, /info
|
||||
|
||||
// The web frontend
|
||||
app.use((req, res, next) => {
|
||||
res.set('Cross-Origin-Opener-Policy', 'same-origin');
|
||||
res.set('Cross-Origin-Embedder-Policy', 'require-corp');
|
||||
next();
|
||||
});
|
||||
app.use(
|
||||
express.static(__dirname + '/node_modules/@actual-app/web/build', {
|
||||
index: false
|
||||
})
|
||||
);
|
||||
app.use(express.static(__dirname + '/node_modules/@actual-app/web/build'));
|
||||
app.get('/*', (req, res) => {
|
||||
res.sendFile(__dirname + '/node_modules/@actual-app/web/build/index.html');
|
||||
});
|
||||
|
@ -59,7 +50,7 @@ async function run() {
|
|||
app.listen(config.port, config.hostname);
|
||||
}
|
||||
|
||||
run().catch((err) => {
|
||||
run().catch(err => {
|
||||
console.log('Error starting app:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
|
@ -6,12 +6,10 @@ processes = []
|
|||
|
||||
[env]
|
||||
PORT = "5006"
|
||||
TINI_SUBREAPER = 1
|
||||
|
||||
[experimental]
|
||||
allowed_public_ports = []
|
||||
auto_rollback = true
|
||||
cmd = ["node", "--max-old-space-size=180", "app.js"]
|
||||
|
||||
[[services]]
|
||||
http_checks = []
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
let config;
|
||||
try {
|
||||
// @ts-expect-error TS2307: we expect this file may not exist
|
||||
config = require('./config');
|
||||
} catch (e) {
|
||||
let fs = require('fs');
|
||||
|
@ -16,7 +15,4 @@ try {
|
|||
};
|
||||
}
|
||||
|
||||
// The env variable always takes precedence
|
||||
config.userFiles = process.env.ACTUAL_USER_FILES || config.userFiles;
|
||||
|
||||
module.exports = config;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export default async function runMigration(db) {
|
||||
export default async function runMigration(db, uuid) {
|
||||
function getValue(node) {
|
||||
return node.expr != null ? node.expr : node.cachedValue;
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ CREATE TABLE kvcache_key (id INTEGER PRIMARY KEY, key REAL);
|
|||
true
|
||||
);
|
||||
db.transaction(() => {
|
||||
budget.map((monthBudget) => {
|
||||
budget.map(monthBudget => {
|
||||
let match = monthBudget.name.match(
|
||||
/^(budget-report|budget)(\d+)!budget-(.+)$/
|
||||
);
|
||||
|
@ -84,7 +84,7 @@ CREATE TABLE kvcache_key (id INTEGER PRIMARY KEY, key REAL);
|
|||
true
|
||||
);
|
||||
db.transaction(() => {
|
||||
buffers.map((buffer) => {
|
||||
buffers.map(buffer => {
|
||||
let match = buffer.name.match(/^budget(\d+)!buffered$/);
|
||||
if (match) {
|
||||
let month = match[1].slice(0, 4) + '-' + match[1].slice(4);
|
||||
|
@ -108,7 +108,7 @@ CREATE TABLE kvcache_key (id INTEGER PRIMARY KEY, key REAL);
|
|||
true
|
||||
);
|
||||
|
||||
let parseNote = (str) => {
|
||||
let parseNote = str => {
|
||||
try {
|
||||
let value = JSON.parse(str);
|
||||
return value && value !== '' ? value : null;
|
||||
|
@ -118,7 +118,7 @@ CREATE TABLE kvcache_key (id INTEGER PRIMARY KEY, key REAL);
|
|||
};
|
||||
|
||||
db.transaction(() => {
|
||||
notes.forEach((note) => {
|
||||
notes.forEach(note => {
|
||||
let parsed = parseNote(getValue(note));
|
||||
if (parsed) {
|
||||
let [, id] = note.name.split('!');
|
||||
|
|
36
package.json
36
package.json
|
@ -1,37 +1,41 @@
|
|||
{
|
||||
"name": "actual-sync",
|
||||
"version": "22.12.09",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"description": "actual syncing server",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node app",
|
||||
"lint": "eslint .",
|
||||
"build": "tsc",
|
||||
"types": "tsc --noEmit --incremental",
|
||||
"verify": "yarn -s lint && yarn types"
|
||||
"lint": "eslint --ignore-pattern '**/node_modules/*' --ignore-pattern '**/log/*' --ignore-pattern 'supervise' --ignore-pattern '**/shared/*' ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@actual-app/api": "4.1.5",
|
||||
"@actual-app/web": "22.12.3",
|
||||
"@actual-app/api": "^4.0.1",
|
||||
"@actual-app/web": "^4.0.2",
|
||||
"adm-zip": "^0.5.9",
|
||||
"bcrypt": "^5.0.1",
|
||||
"better-sqlite3": "^7.5.0",
|
||||
"body-parser": "^1.18.3",
|
||||
"cors": "^2.8.5",
|
||||
"express": "4.17",
|
||||
"express": "^4.16.3",
|
||||
"express-actuator": "^1.8.1",
|
||||
"express-response-size": "^0.0.3",
|
||||
"node-fetch": "^2.2.0",
|
||||
"uuid": "^3.3.2"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": "react-app"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.5.0",
|
||||
"@types/node": "^17.0.31",
|
||||
"@typescript-eslint/eslint-plugin": "^5.23.0",
|
||||
"@typescript-eslint/parser": "^5.23.0",
|
||||
"eslint": "^8.15.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"prettier": "^2.6.2",
|
||||
"typescript": "^4.6.4"
|
||||
"babel-eslint": "^10.0.1",
|
||||
"eslint": "^5.12.1",
|
||||
"eslint-config-react-app": "^3.0.6",
|
||||
"eslint-plugin-flowtype": "^3.2.1",
|
||||
"eslint-plugin-import": "^2.14.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.1.2",
|
||||
"eslint-plugin-react": "^7.12.4"
|
||||
},
|
||||
"prettier": {
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
|
||||
CREATE TABLE messages_binary
|
||||
(timestamp TEXT PRIMARY KEY,
|
||||
(timestamp TEXT,
|
||||
is_encrypted BOOLEAN,
|
||||
content bytea);
|
||||
content bytea,
|
||||
PRIMARY KEY(timestamp, group_id));
|
||||
|
||||
CREATE TABLE messages_merkles
|
||||
(id INTEGER PRIMARY KEY,
|
||||
(id TEXT PRIMAREY KEY,
|
||||
merkle TEXT);
|
||||
|
|
21
sync-full.js
21
sync-full.js
|
@ -15,7 +15,7 @@ const sync = sequential(async function syncAPI(messages, since, fileId) {
|
|||
await actual.internal.send('load-budget', { id: fileId });
|
||||
}
|
||||
|
||||
messages = messages.map((envPb) => {
|
||||
messages = messages.map(envPb => {
|
||||
let timestamp = envPb.getTimestamp();
|
||||
let msg = SyncPb.Message.deserializeBinary(envPb.getContent());
|
||||
return {
|
||||
|
@ -27,26 +27,11 @@ const sync = sequential(async function syncAPI(messages, since, fileId) {
|
|||
};
|
||||
});
|
||||
|
||||
const newMessages = await actual.internal.syncAndReceiveMessages(
|
||||
messages,
|
||||
since
|
||||
);
|
||||
let newMessages = actual.internal.syncAndReceiveMessages(messages, since);
|
||||
|
||||
return {
|
||||
trie: actual.internal.timestamp.getClock().merkle,
|
||||
newMessages: newMessages.map((msg) => {
|
||||
const envelopePb = new SyncPb.MessageEnvelope();
|
||||
|
||||
const messagePb = new SyncPb.Message();
|
||||
messagePb.setDataset(msg.dataset);
|
||||
messagePb.setRow(msg.row);
|
||||
messagePb.setColumn(msg.column);
|
||||
messagePb.setValue(msg.value);
|
||||
envelopePb.setTimestamp(msg.timestamp);
|
||||
|
||||
envelopePb.setContent(messagePb.serializeBinary());
|
||||
return envelopePb;
|
||||
})
|
||||
newMessages: newMessages
|
||||
};
|
||||
});
|
||||
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
let { existsSync, readFileSync } = require('fs');
|
||||
let { join } = require('path');
|
||||
let { openDatabase } = require('./db');
|
||||
let { getPathForGroupFile } = require('./util/paths');
|
||||
|
||||
let actual = require('@actual-app/api');
|
||||
let merkle = actual.internal.merkle;
|
||||
let SyncPb = actual.internal.SyncProtoBuf;
|
||||
let Timestamp = actual.internal.timestamp.Timestamp;
|
||||
|
||||
function getGroupDb(groupId) {
|
||||
let path = getPathForGroupFile(groupId);
|
||||
let path = join(__dirname, `user-files/${groupId}.sqlite`);
|
||||
let needsInit = !existsSync(path);
|
||||
|
||||
let db = openDatabase(path);
|
||||
|
@ -58,8 +56,8 @@ function addMessages(db, messages) {
|
|||
return returnValue;
|
||||
}
|
||||
|
||||
function getMerkle(db) {
|
||||
let rows = db.all('SELECT * FROM messages_merkles');
|
||||
function getMerkle(db, group_id) {
|
||||
let rows = db.all('SELECT * FROM messages_merkles', [group_id]);
|
||||
|
||||
if (rows.length > 0) {
|
||||
return JSON.parse(rows[0].merkle);
|
||||
|
@ -70,29 +68,19 @@ function getMerkle(db) {
|
|||
}
|
||||
}
|
||||
|
||||
function sync(messages, since, groupId) {
|
||||
let db = getGroupDb(groupId);
|
||||
function sync(messages, since, fileId) {
|
||||
let db = getGroupDb(fileId);
|
||||
let newMessages = db.all(
|
||||
`SELECT * FROM messages_binary
|
||||
WHERE timestamp > ?
|
||||
ORDER BY timestamp`,
|
||||
[since]
|
||||
[since],
|
||||
true
|
||||
);
|
||||
|
||||
let trie = addMessages(db, messages);
|
||||
|
||||
db.close();
|
||||
|
||||
return {
|
||||
trie,
|
||||
newMessages: newMessages.map((msg) => {
|
||||
const envelopePb = new SyncPb.MessageEnvelope();
|
||||
envelopePb.setTimestamp(msg.timestamp);
|
||||
envelopePb.setIsencrypted(msg.is_encrypted);
|
||||
envelopePb.setContent(msg.content);
|
||||
return envelopePb;
|
||||
})
|
||||
};
|
||||
return { trie, newMessages };
|
||||
}
|
||||
|
||||
module.exports = { sync };
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
// DOM for URL global in Node 16+
|
||||
"lib": ["ES2021", "DOM"],
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"experimentalDecorators": true,
|
||||
"resolveJsonModule": true,
|
||||
"downlevelIteration": true,
|
||||
|
@ -12,9 +11,10 @@
|
|||
// Check JS files too
|
||||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"moduleResolution": "node",
|
||||
"module": "commonjs",
|
||||
"outDir": "build"
|
||||
// Used for temp builds
|
||||
"outDir": "build",
|
||||
"moduleResolution": "Node",
|
||||
"module": "ESNext"
|
||||
},
|
||||
"exclude": ["node_modules", "build", "./app-plaid.js"]
|
||||
"exclude": ["node_modules", "build"]
|
||||
}
|
||||
|
|
|
@ -17,11 +17,11 @@ function sequential(fn) {
|
|||
sequenceState.running = fn(...args);
|
||||
|
||||
sequenceState.running.then(
|
||||
(val) => {
|
||||
val => {
|
||||
pump();
|
||||
resolve(val);
|
||||
},
|
||||
(err) => {
|
||||
err => {
|
||||
pump();
|
||||
reject(err);
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
async function middleware(err, req, res, _next) {
|
||||
async function middleware(err, req, res, next) {
|
||||
console.log('ERROR', err);
|
||||
res.status(500).send({ status: 'error', reason: 'internal-error' });
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
function handleError(func) {
|
||||
return (req, res) => {
|
||||
func(req, res).catch((err) => {
|
||||
console.log('Error', req.originalUrl, err);
|
||||
func(req, res).catch(err => {
|
||||
console.log('Error', req.originalUrl, err);
|
||||
res.status(500);
|
||||
res.send({ status: 'error', reason: 'internal-error' });
|
||||
});
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { handleError };
|
||||
module.exports = { handleError }
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
let { join } = require('path');
|
||||
let config = require('../load-config');
|
||||
|
||||
function getPathForUserFile(fileId) {
|
||||
return join(config.userFiles, `file-${fileId}.blob`);
|
||||
}
|
||||
|
||||
function getPathForGroupFile(groupId) {
|
||||
return join(config.userFiles, `group-${groupId}.sqlite`);
|
||||
}
|
||||
|
||||
module.exports = { getPathForUserFile, getPathForGroupFile };
|
Loading…
Reference in a new issue