From 8872ffd32f38afc76c253a1962d840480411b8dc Mon Sep 17 00:00:00 2001 From: Brian Botha Date: Fri, 17 Sep 2021 16:53:33 +1000 Subject: [PATCH] Vaults sharing, permissions and scanning --- src/PolykeyAgent.ts | 29 +- src/agent/GRPCClientAgent.ts | 14 +- src/agent/agentService.ts | 139 +-- src/agent/backgroundAgent.ts | 1 + src/agent/utils.ts | 4 +- src/bin/agent/start.ts | 12 +- src/bin/nodes/index.ts | 2 + src/bin/nodes/scan.ts | 95 ++ src/bin/vaults/clone.ts | 6 +- src/bin/vaults/index.ts | 6 - src/bin/vaults/pull.ts | 24 +- src/bin/vaults/share.ts | 2 +- src/bin/vaults/unshare.ts | 2 +- src/bootstrap/bootstrap.ts | 4 +- src/client/GRPCClientClient.ts | 28 +- src/client/clientService.ts | 7 - src/client/rpcNodes.ts | 25 + src/client/rpcVaults.ts | 158 +--- src/git/GitRequest.ts | 87 -- src/git/index.ts | 1 - src/git/utils.ts | 2 - src/keys/KeyManager.ts | 15 - src/nodes/NodeConnection.ts | 22 +- src/nodes/NodeManager.ts | 9 +- .../js/polykey/v1/agent_service_grpc_pb.d.ts | 47 +- .../js/polykey/v1/agent_service_grpc_pb.js | 54 +- .../js/polykey/v1/client_service_grpc_pb.d.ts | 102 +-- .../js/polykey/v1/client_service_grpc_pb.js | 85 +- src/proto/js/polykey/v1/vaults/vaults_pb.d.ts | 6 + src/proto/js/polykey/v1/vaults/vaults_pb.js | 53 +- .../schemas/polykey/v1/agent_service.proto | 3 +- .../schemas/polykey/v1/client_service.proto | 8 +- .../schemas/polykey/v1/vaults/vaults.proto | 1 + src/vaults/VaultInternal.ts | 98 ++- src/vaults/VaultManager.ts | 305 ++++--- src/vaults/errors.ts | 6 + src/vaults/utils.ts | 163 +--- tests/PolykeyAgent.test.ts | 7 + tests/agent/GRPCClientAgent.test.ts | 53 +- tests/agent/utils.test.ts | 8 +- tests/agent/utils.ts | 8 + tests/bin/agent.test.ts | 11 +- tests/bin/echo.test.ts | 1 + tests/bin/identities.test.ts | 1 + tests/bin/keys.test.ts | 3 + tests/bin/nodes.test.ts | 38 + tests/bin/notifications.test.ts | 2 + tests/bin/secret.test.ts | 1 + tests/bin/sessions.test.ts | 1 + tests/bin/vaults.test.ts | 400 ++++----- tests/bootstrap/bootstrap.test.ts | 1 + tests/client/GRPCClientClient.test.ts | 1 + tests/client/PolykeyClient.test.ts | 2 + tests/client/clientService.test.ts | 139 +-- tests/discovery/Discovery.test.ts | 1 + tests/index.test.ts | 1 + tests/keys/KeyManager.test.ts | 80 -- tests/nodes/NodeConnection.test.ts | 68 +- tests/nodes/NodeManager.test.ts | 83 +- .../NotificationsManager.test.ts | 23 +- tests/utils.ts | 1 + tests/vaults/VaultInternal.test.ts | 2 +- tests/vaults/VaultManager.test.ts | 830 +++++++++--------- tests/vaults/VaultOps.test.ts | 14 +- tests/vaults/old/Vault.test.ts.old | 565 ------------ tests/vaults/utils.test.ts | 75 -- 66 files changed, 1570 insertions(+), 2475 deletions(-) create mode 100644 src/bin/nodes/scan.ts delete mode 100644 src/git/GitRequest.ts delete mode 100644 tests/vaults/old/Vault.test.ts.old diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index 0b1054df0f..2c4e9ae3ee 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -286,18 +286,29 @@ class Polykey { fs: fs_, logger: logger_.getChild('NodeManager'), })); + const notifications_ = + notificationsManager ?? + (await NotificationsManager.createNotificationsManager({ + acl: acl_, + db: db_, + nodeManager: nodes_, + keyManager: keys_, + logger: logger_.getChild('NotificationsManager'), + fresh, + })); const vaults_ = vaultManager ?? (await VaultManager.createVaultManager({ keyManager: keys_, vaultsPath: vaultsPath, - vaultsKey: keys_.vaultKey, nodeManager: nodes_, gestaltGraph: gestalts_, + notificationsManager: notifications_, acl: acl_, db: db_, fs: fs_, logger: logger_.getChild('VaultManager'), + fresh, })); // Setting the workerManager for vaults. if (workers_ != null) { @@ -319,16 +330,6 @@ class Polykey { nodeManager: nodes_, logger: logger_.getChild('Discovery'), })); - const notifications_ = - notificationsManager ?? - (await NotificationsManager.createNotificationsManager({ - acl: acl_, - db: db_, - nodeManager: nodes_, - keyManager: keys_, - logger: logger_.getChild('NotificationsManager'), - fresh, - })); const sessionManager = await SessionManager.createSessionManager({ db: db_, @@ -476,6 +477,8 @@ class Polykey { nodeManager: this.nodes, sigchain: this.sigchain, notificationsManager: this.notifications, + acl: this.acl, + gestaltGraph: this.gestalts, }); // Registering providers. @@ -492,7 +495,6 @@ class Polykey { */ public async start({ fresh = false }: { fresh?: boolean }) { this.logger.info('Starting Polykey'); - if ( (await Lockfile.checkLock( this.fs, @@ -525,7 +527,6 @@ class Polykey { const keyPrivatePem = this.keys.getRootKeyPairPem().privateKey; const certChainPem = await this.keys.getRootCertChainPem(); - // GRPC Server // Client server await this.clientGrpcServer.start({ @@ -543,7 +544,6 @@ class Polykey { host: this.agentGrpcHost as Host, port: this.agentGrpcPort as Port, }); - await this.fwdProxy.start({ tlsConfig: { keyPrivatePem: keyPrivatePem, @@ -571,7 +571,6 @@ class Polykey { 'fwdProxyPort', this.fwdProxy.getProxyPort(), ); - this.logger.info('Started Polykey'); } diff --git a/src/agent/GRPCClientAgent.ts b/src/agent/GRPCClientAgent.ts index 0fa147c93a..fef99d1f81 100644 --- a/src/agent/GRPCClientAgent.ts +++ b/src/agent/GRPCClientAgent.ts @@ -85,10 +85,10 @@ class GRPCClientAgent extends GRPCClient { } @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultsScan(...args) { - return grpcUtils.promisifyReadableStreamCall( + public nodesScan(...args) { + return grpcUtils.promisifyReadableStreamCall( this.client, - this.client.vaultsScan, + this.client.nodesScan, )(...args); } @@ -132,14 +132,6 @@ class GRPCClientAgent extends GRPCClient { )(...args); } - @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultsPermisssionsCheck(...args) { - return grpcUtils.promisifyUnaryCall( - this.client, - this.client.vaultsPermisssionsCheck, - )(...args); - } - @ready(new grpcErrors.ErrorGRPCClientNotStarted()) public nodesCrossSignClaim(...args) { return grpcUtils.promisifyDuplexStreamCall< diff --git a/src/agent/agentService.ts b/src/agent/agentService.ts index 5b1fb52e7c..b375336932 100644 --- a/src/agent/agentService.ts +++ b/src/agent/agentService.ts @@ -4,6 +4,9 @@ import type { ClaimIdString, } from '../claims/types'; import type { VaultName } from '../vaults/types'; +import type { ACL } from '../acl'; +import type { GestaltGraph } from '../gestalts'; +import type { NodeId } from '../nodes/types'; import * as grpc from '@grpc/grpc-js'; import { promisify } from '../utils'; @@ -44,12 +47,16 @@ function createAgentService({ nodeManager, notificationsManager, sigchain, + acl, + gestaltGraph, }: { keyManager: KeyManager; vaultManager: VaultManager; nodeManager: NodeManager; sigchain: Sigchain; notificationsManager: NotificationsManager; + acl: ACL; + gestaltGraph: GestaltGraph; }): IAgentServiceServer { const agentService: IAgentServiceServer = { echo: async ( @@ -66,21 +73,44 @@ function createAgentService({ const genWritable = grpcUtils.generatorWritable(call); const request = call.request; const vaultNameOrId = request.getNameOrId(); - let vaultId, vaultName; + let vaultName; + let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + if (!vaultId) { + try { + vaultId = makeVaultId(idUtils.fromString(vaultNameOrId)); + vaultName = await vaultManager.getVaultName(vaultId); + } catch (err) { + await genWritable.throw(new vaultsErrors.ErrorVaultUndefined()); + return; + } + } else { + vaultName = vaultNameOrId; + } + await vaultManager.openVault(vaultId); + const metaIn = call.metadata; + const nodeId = metaIn.get('nodeId').pop()!.toString() as NodeId; + const actionType = metaIn.get('action').pop()!.toString(); + const perms = await acl.getNodePerm(nodeId); + if (!perms) { + await genWritable.throw(new vaultsErrors.ErrorVaultPermissionDenied()); + return; + } + const vaultPerms = perms.vaults[idUtils.toString(vaultId)]; try { - vaultId = makeVaultId(idUtils.fromString(vaultNameOrId)); - await vaultManager.openVault(vaultId); - vaultName = await vaultManager.getVaultName(vaultId); + if (vaultPerms[actionType] !== null) { + await genWritable.throw( + new vaultsErrors.ErrorVaultPermissionDenied(), + ); + return; + } } catch (err) { - if (err instanceof vaultsErrors.ErrorVaultUndefined) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - vaultName = vaultNameOrId; - } else { - throw err; + if (err instanceof TypeError) { + await genWritable.throw( + new vaultsErrors.ErrorVaultPermissionDenied(), + ); + return; } } - // TODO: Check the permissions here const meta = new grpc.Metadata(); meta.set('vaultName', vaultName); meta.set('vaultId', makeVaultIdPretty(vaultId)); @@ -112,22 +142,15 @@ function createAgentService({ const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); if (vaultNameOrId == null) throw new ErrorGRPC('vault-name not in metadata.'); - let vaultId; - try { - vaultId = makeVaultId(vaultNameOrId); - await vaultManager.openVault(vaultId); - } catch (err) { - if ( - err instanceof vaultsErrors.ErrorVaultUndefined || - err instanceof SyntaxError - ) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - } else { - throw err; + let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + if (!vaultId) { + try { + vaultId = makeVaultId(vaultNameOrId); + } catch (err) { + return; } } - // TODO: Check the permissions here + await vaultManager.openVault(vaultId); const response = new vaultsPB.PackChunk(); const [sideBand, progressStream] = await vaultManager.handlePackRequest( vaultId, @@ -154,20 +177,39 @@ function createAgentService({ call.end(); }); }, - vaultsScan: async ( - call: grpc.ServerWritableStream, + nodesScan: async ( + call: grpc.ServerWritableStream, ): Promise => { const genWritable = grpcUtils.generatorWritable(call); - const response = new vaultsPB.Vault(); - const id = makeNodeId(call.request.getNodeId()); + const response = new vaultsPB.List(); + const nodeId = makeNodeId(call.request.getNodeId()); + const perms = await gestaltGraph.getGestaltActionsByNode(nodeId); + if (!perms) { + await genWritable.throw(new vaultsErrors.ErrorVaultPermissionDenied()); + return; + } try { - throw Error('Not implemented'); - // FIXME: handleVaultNamesRequest doesn't exist. - // const listResponse = vaultManager.handleVaultNamesRequest(id); - let listResponse; - for await (const vault of listResponse) { + if (perms['scan'] !== null) { + await genWritable.throw( + new vaultsErrors.ErrorVaultPermissionDenied(), + ); + return; + } + } catch (err) { + if (err instanceof TypeError) { + await genWritable.throw( + new vaultsErrors.ErrorVaultPermissionDenied(), + ); + return; + } + throw err; + } + try { + const listResponse = await vaultManager.listVaults(); + for (const vault of listResponse) { if (vault !== null) { - response.setNameOrId(vault); + response.setVaultName(vault[0]); + response.setVaultId(makeVaultIdPretty(vault[1])); await genWritable.next(response); } else { await genWritable.next(null); @@ -304,33 +346,6 @@ function createAgentService({ } callback(null, response); }, - vaultsPermisssionsCheck: async ( - call: grpc.ServerUnaryCall< - vaultsPB.NodePermission, - vaultsPB.NodePermissionAllowed - >, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.NodePermissionAllowed(); - try { - const nodeId = makeNodeId(call.request.getNodeId()); - const vaultId = makeVaultId(call.request.getVaultId()); - throw Error('Not Implemented'); - // FIXME: getVaultPermissions not implemented. - // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); - let result; - if (result[nodeId] === undefined) { - response.setPermission(false); - } else if (result[nodeId]['pull'] === undefined) { - response.setPermission(false); - } else { - response.setPermission(true); - } - callback(null, response); - } catch (err) { - callback(grpcUtils.fromError(err), null); - } - }, nodesCrossSignClaim: async ( call: grpc.ServerDuplexStream, ) => { diff --git a/src/agent/backgroundAgent.ts b/src/agent/backgroundAgent.ts index 36a36d2ec1..cc78ad7d94 100644 --- a/src/agent/backgroundAgent.ts +++ b/src/agent/backgroundAgent.ts @@ -22,6 +22,7 @@ process.on('message', async (startOptions: string) => { polykeyAgent = await PolykeyAgent.createPolykey({ password: ops.password, nodePath: ops.nodePath, + fresh: ops.fresh, }); await polykeyAgent.start({}); //Catching kill signals. diff --git a/src/agent/utils.ts b/src/agent/utils.ts index 49fa09a480..c8b2caf2be 100644 --- a/src/agent/utils.ts +++ b/src/agent/utils.ts @@ -26,9 +26,10 @@ async function checkAgentRunning(nodePath: string): Promise { return false; } -async function spawnBackgroundAgent( // FIXME, this is broken. +async function spawnBackgroundAgent( nodePath: string, password: string, + fresh = false, ): Promise { //Checking agent running. if (await checkAgentRunning(nodePath)) { @@ -85,6 +86,7 @@ async function spawnBackgroundAgent( // FIXME, this is broken. const startOptions = { nodePath: nodePath, password: password, + fresh: fresh, }; let pid; diff --git a/src/bin/agent/start.ts b/src/bin/agent/start.ts index e9084d3113..4bfbe1ff83 100644 --- a/src/bin/agent/start.ts +++ b/src/bin/agent/start.ts @@ -14,6 +14,7 @@ const start = binUtils.createCommand('start', { format: true, passwordFile: true, }); +start.option('-fr, --fresh', 'Starts a fresh agent'); start.option('-b, --background', 'Starts the agent as a background process'); start.action(async (options) => { const agentConfig = {}; @@ -28,6 +29,9 @@ start.action(async (options) => { : utils.getDefaultNodePath(); agentConfig['nodePath'] = nodePath; const background = options.background; + if (options.fresh) { + agentConfig['fresh'] = true; + } const password = await fs.promises.readFile(options.passwordFile, { encoding: 'utf-8', @@ -35,13 +39,17 @@ start.action(async (options) => { try { if (background) { - await agentUtils.spawnBackgroundAgent(nodePath, password); + await agentUtils.spawnBackgroundAgent( + nodePath, + password, + agentConfig['fresh'], + ); } else { const agent = await PolykeyAgent.createPolykey({ password, ...agentConfig, }); - await agent.start({}); + await agent.start({ fresh: agentConfig['fresh'] }); // If started add handlers for terminating. const termHandler = async () => { diff --git a/src/bin/nodes/index.ts b/src/bin/nodes/index.ts index 1dc930d02c..2b3d5fb7b5 100644 --- a/src/bin/nodes/index.ts +++ b/src/bin/nodes/index.ts @@ -4,6 +4,7 @@ import claim from './claim'; // Import commandUnclaimNode from "./commandUnclaimNode"; import add from './add'; import find from './find'; +import scan from './scan'; const commandNodes = createCommand('node'); commandNodes.description('nodes commands'); @@ -13,5 +14,6 @@ commandNodes.addCommand(ping); commandNodes.addCommand(add); commandNodes.addCommand(find); commandNodes.addCommand(claim); +commandNodes.addCommand(scan); export default commandNodes; diff --git a/src/bin/nodes/scan.ts b/src/bin/nodes/scan.ts new file mode 100644 index 0000000000..c088e224e8 --- /dev/null +++ b/src/bin/nodes/scan.ts @@ -0,0 +1,95 @@ +import { errors } from '../../grpc'; +import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import { utils as clientUtils } from '../../client'; +import PolykeyClient from '../../PolykeyClient'; +import { createCommand, outputFormatter } from '../utils'; +import { ErrorCLI } from '../errors'; +import * as utils from '../../utils'; +import * as binUtils from '../utils'; + +/** + * This exists to re-contextualize any errors or results as a `ping failed` result and not an actual error with the command. + * this also ensures that a failure to ping results in an exit code of 1. + */ +class ErrorNodePingFailed extends ErrorCLI { + description: string = 'Node was not online or not found.'; + exitCode: number = 1; +} + +const scan = createCommand('scan', { + description: { + description: 'Scans the available vaults on another node', + args: { + node: 'Id of the node.', + }, + }, + nodePath: true, + verbose: true, + format: true, +}); +scan.arguments(''); +scan.action(async (node, options) => { + const clientConfig = {}; + clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ + new StreamHandler(), + ]); + if (options.verbose) { + clientConfig['logger'].setLevel(LogLevel.DEBUG); + } + if (options.nodePath) { + clientConfig['nodePath'] = options.nodePath; + } + + const client = await PolykeyClient.createPolykeyClient(clientConfig); + + try { + await client.start({}); + const grpcClient = client.grpcClient; + + const nodeMessage = new nodesPB.Node(); + nodeMessage.setNodeId(node); + const data: Array = []; + const vaultGenerator = grpcClient.nodesScan(nodeMessage); + const { p, resolveP } = utils.promise(); + vaultGenerator.stream.on('metadata', async (meta) => { + await clientUtils.refreshSession(meta, client.session); + resolveP(null); + }); + + for await (const vault of vaultGenerator) { + data.push(`${vault.getVaultName()}\t\t${vault.getVaultId()}`); + } + await p; + + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: data, + }), + ); + } catch (err) { + if (err instanceof errors.ErrorGRPCClientTimeout) { + process.stderr.write(`${err.message}\n`); + } else if (err instanceof ErrorNodePingFailed) { + // Do nothing, It's printed above already. + } else if (err instanceof errors.ErrorGRPCServerNotStarted) { + process.stderr.write(`${err.message}\n`); + } else { + process.stdout.write( + outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: ['Error:', err.message], + }), + ); + } + throw err; + } finally { + await client.stop(); + options.nodePath = undefined; + options.verbose = undefined; + options.format = undefined; + } +}); + +export default scan; diff --git a/src/bin/vaults/clone.ts b/src/bin/vaults/clone.ts index c24477954a..9eb4acb93a 100644 --- a/src/bin/vaults/clone.ts +++ b/src/bin/vaults/clone.ts @@ -18,8 +18,8 @@ clone.requiredOption( '(required) Id of the node to clone the vault from', ); clone.requiredOption( - '-vi, --vault-id ', - '(required) Id of the vault to be cloned', + '-vi, --vault-nameOrId ', + '(required) Name or Id of the vault to be cloned', ); clone.action(async (options) => { const clientConfig = {}; @@ -46,7 +46,7 @@ clone.action(async (options) => { vaultCloneMessage.setNode(nodeMessage); nodeMessage.setNodeId(options.nodeId); - vaultMessage.setNameOrId(options.vaultId); + vaultMessage.setNameOrId(options.vaultNameOrId); await client.start({}); const grpcClient = client.grpcClient; diff --git a/src/bin/vaults/index.ts b/src/bin/vaults/index.ts index ac9a294965..10246ba60e 100644 --- a/src/bin/vaults/index.ts +++ b/src/bin/vaults/index.ts @@ -4,11 +4,8 @@ import list from './list'; import pull from './pull'; import clone from './clone'; import rename from './rename'; -import scan from './scan'; -import stat from './stat'; import share from './share'; import unshare from './unshare'; -import permissions from './permissions'; import version from './version'; import log from './log'; import * as binUtils from '../utils'; @@ -21,11 +18,8 @@ commandVaults.addCommand(list); commandVaults.addCommand(pull); commandVaults.addCommand(clone); commandVaults.addCommand(rename); -commandVaults.addCommand(scan); -commandVaults.addCommand(stat); commandVaults.addCommand(share); commandVaults.addCommand(unshare); -commandVaults.addCommand(permissions); commandVaults.addCommand(version); commandVaults.addCommand(log); diff --git a/src/bin/vaults/pull.ts b/src/bin/vaults/pull.ts index b38d8819ae..5fca68b7db 100644 --- a/src/bin/vaults/pull.ts +++ b/src/bin/vaults/pull.ts @@ -13,13 +13,14 @@ const pull = binUtils.createCommand('pull', { verbose: true, format: true, }); -pull.requiredOption( - '-ni, --node-id ', - '(required) Id of the node to pull the vault from', +pull.option('-ni, --node-id ', 'Id of the node to pull the vault from'); +pull.option( + '-pv, --pull-vault ', + 'Name or id of the vault to pull from', ); pull.requiredOption( - '-vn, --vault-name ', - '(required) Name of the vault to be pulled', + '-vi, --vault-nameOrId ', + '(required) Name or Id of the vault to pull into', ); pull.action(async (options) => { const clientConfig = {}; @@ -38,13 +39,20 @@ pull.action(async (options) => { const client = await PolykeyClient.createPolykeyClient(clientConfig); const vaultMessage = new vaultsPB.Vault(); + const pullVaultMessage = new vaultsPB.Vault(); const nodeMessage = new nodesPB.Node(); const vaultPullMessage = new vaultsPB.Pull(); vaultPullMessage.setVault(vaultMessage); - vaultPullMessage.setNode(nodeMessage); - nodeMessage.setNodeId(options.nodeId); - vaultMessage.setNameOrId(options.vaultName); + if (options.nodeId) { + vaultPullMessage.setNode(nodeMessage); + nodeMessage.setNodeId(options.nodeId); + } + vaultMessage.setNameOrId(options.vaultNameOrId); + if (options.pullVault) { + vaultPullMessage.setPullVault(pullVaultMessage); + pullVaultMessage.setNameOrId(options.pullVault); + } try { await client.start({}); diff --git a/src/bin/vaults/share.ts b/src/bin/vaults/share.ts index 9f3eef8a35..e7ddb16db7 100644 --- a/src/bin/vaults/share.ts +++ b/src/bin/vaults/share.ts @@ -49,7 +49,7 @@ commandVaultShare.action(async (vaultName, nodeId, options) => { vaultMessage.setNameOrId(vaultName); nodeMessage.setNodeId(nodeId); - const pCall = grpcClient.vaultsPermissionsSet(setVaultPermsMessage); + const pCall = grpcClient.vaultsShare(setVaultPermsMessage); const { p, resolveP } = utils.promise(); pCall.call.on('metadata', async (meta) => { await clientUtils.refreshSession(meta, client.session); diff --git a/src/bin/vaults/unshare.ts b/src/bin/vaults/unshare.ts index 8785e3568d..05743e445a 100644 --- a/src/bin/vaults/unshare.ts +++ b/src/bin/vaults/unshare.ts @@ -49,7 +49,7 @@ commandVaultShare.action(async (vaultName, nodeId, options) => { vaultMessage.setNameOrId(vaultName); nodeMessage.setNodeId(nodeId); - const pCall = grpcClient.vaultsPermissionsUnset(unsetVaultPermsMessage); + const pCall = grpcClient.vaultsUnshare(unsetVaultPermsMessage); const { p, resolveP } = utils.promise(); pCall.call.on('metadata', async (meta) => { await clientUtils.refreshSession(meta, client.session); diff --git a/src/bootstrap/bootstrap.ts b/src/bootstrap/bootstrap.ts index 85262b16bc..621542f5db 100644 --- a/src/bootstrap/bootstrap.ts +++ b/src/bootstrap/bootstrap.ts @@ -48,6 +48,7 @@ async function bootstrapPolykeyState( password, nodePath: nodePath, logger: logger, + fresh: true, }); // Setting FS editing mask. @@ -81,8 +82,7 @@ async function checkKeynodeState(nodePath: string): Promise { !keysFiles.includes('root_certs') || !keysFiles.includes('root.crt') || !keysFiles.includes('root.key') || - !keysFiles.includes('root.pub') || - !keysFiles.includes('vault.key') + !keysFiles.includes('root.pub') ) { return 'MALFORMED_KEYNODE'; } diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index 967d639ea5..472a13a3f7 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -166,34 +166,26 @@ class GRPCClientClient extends GRPCClient { } @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultsScan(...args) { + public nodesScan(...args) { return grpcUtils.promisifyReadableStreamCall( this.client, - this.client.vaultsScan, + this.client.nodesScan, )(...args); } @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultsPermissionsSet(...args) { + public vaultsShare(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermissionsSet, + this.client.vaultsShare, )(...args); } @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultsPermissionsUnset(...args) { + public vaultsUnshare(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermissionsUnset, - )(...args); - } - - @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultPermissions(...args) { - return grpcUtils.promisifyReadableStreamCall( - this.client, - this.client.vaultsPermissions, + this.client.vaultsUnshare, )(...args); } @@ -213,14 +205,6 @@ class GRPCClientClient extends GRPCClient { )(...args); } - @ready(new grpcErrors.ErrorGRPCClientNotStarted()) - public vaultsSecretsStat(...args) { - return grpcUtils.promisifyUnaryCall( - this.client, - this.client.vaultsSecretsStat, - )(...args); - } - @ready(new grpcErrors.ErrorGRPCClientNotStarted()) public vaultsSecretsDelete(...args) { return grpcUtils.promisifyUnaryCall( diff --git a/src/client/clientService.ts b/src/client/clientService.ts index 28502537a4..d868142c6d 100644 --- a/src/client/clientService.ts +++ b/src/client/clientService.ts @@ -16,14 +16,7 @@ import { IClientServiceServer, } from '../proto/js/polykey/v1/client_service_grpc_pb'; import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import * as notificationsPB from '../proto/js/polykey/v1/notifications/notifications_pb'; -import * as sessionsPB from '../proto/js/polykey/v1/sessions/sessions_pb'; -import * as gestaltsPB from '../proto/js/polykey/v1/gestalts/gestalts_pb'; -import * as identitiesPB from '../proto/js/polykey/v1/identities/identities_pb'; -import * as keysPB from '../proto/js/polykey/v1/keys/keys_pb'; -import * as permissionsPB from '../proto/js/polykey/v1/permissions/permissions_pb'; import createEchoRPC from './rpcEcho'; import createSessionRPC from './rpcSession'; import createVaultRPC from './rpcVaults'; diff --git a/src/client/rpcNodes.ts b/src/client/rpcNodes.ts index f2a8bea665..3312aeeae2 100644 --- a/src/client/rpcNodes.ts +++ b/src/client/rpcNodes.ts @@ -7,9 +7,11 @@ import type { NotificationsManager } from '../notifications'; import * as grpc from '@grpc/grpc-js'; import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; +import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; import * as utils from '../client/utils'; import * as nodesUtils from '../nodes/utils'; import * as grpcUtils from '../grpc/utils'; +import * as vaultsUtils from '../vaults/utils'; import * as nodesErrors from '../nodes/errors'; import { makeNodeId } from '../nodes/utils'; @@ -149,6 +151,29 @@ const createNodesRPC = ({ } callback(null, response); }, + nodesScan: async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + const nodeId = makeNodeId(call.request.getNodeId()); + try { + await sessionManager.verifyToken(utils.getToken(call.metadata)); + const responseMeta = utils.createMetaTokenResponse( + await sessionManager.generateToken(), + ); + call.sendMetadata(responseMeta); + const list = await nodeManager.scanNodeVaults(nodeId); + for (const vault of list) { + const vaultListMessage = new vaultsPB.List(); + vaultListMessage.setVaultName(vault[0]); + vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vault[1])); + await genWritable.next(vaultListMessage); + } + await genWritable.next(null); + } catch (err) { + await genWritable.throw(err); + } + }, }; }; diff --git a/src/client/rpcVaults.ts b/src/client/rpcVaults.ts index bec554f8ea..ad31de9db1 100644 --- a/src/client/rpcVaults.ts +++ b/src/client/rpcVaults.ts @@ -1,5 +1,4 @@ -import type { NodeId } from '../nodes/types'; -import type { Vault, VaultAction, VaultName } from '../vaults/types'; +import type { Vault, VaultName } from '../vaults/types'; import type { SessionManager } from '../sessions'; import type { VaultManager } from '../vaults'; @@ -8,12 +7,10 @@ import * as grpc from '@grpc/grpc-js'; import * as grpcUtils from '../grpc/utils'; import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; import * as secretsPB from '../proto/js/polykey/v1/secrets/secrets_pb'; -import { isNodeId, makeNodeId } from '../nodes/utils'; +import { makeNodeId } from '../nodes/utils'; import { vaultOps } from '../vaults'; -import { makeVaultIdPretty } from '../vaults/utils'; -import { parseVaultInput } from './utils'; +import { makeVaultId, makeVaultIdPretty } from '../vaults/utils'; const createVaultRPC = ({ vaultManager, @@ -136,13 +133,17 @@ const createVaultRPC = ({ return; } // Vault id - const vaultId = parseVaultInput(vaultMessage, vaultManager); + let vaultId; + vaultId = vaultMessage.getNameOrId(); + try { + vaultId = makeVaultId(vaultId); + } catch (err) { + vaultId = vaultId as VaultName; + } // Node id - const id = makeNodeId(nodeMessage.getNodeId()); + const nodeId = makeNodeId(nodeMessage.getNodeId()); - throw Error('Not implemented'); - // FIXME, not fully implemented - // await vaultManager.cloneVault(vaultId, id); + await vaultManager.cloneVault(nodeId, vaultId); response.setSuccess(true); callback(null, response); } catch (err) { @@ -165,47 +166,37 @@ const createVaultRPC = ({ callback({ code: grpc.status.NOT_FOUND }, null); return; } + let nodeId; const nodeMessage = call.request.getNode(); if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; + nodeId = null; + } else { + nodeId = makeNodeId(nodeMessage.getNodeId()); + } + let pullVault; + const pullVaultMessage = call.request.getPullVault(); + if (pullVaultMessage == null) { + pullVault = null; + } else { + try { + pullVault = makeVaultId(pullVault); + } catch (err) { + pullVault = pullVault as VaultName; + } } // Vault name const vaultId = await utils.parseVaultInput(vaultMessage, vaultManager); - // Node id - const id = makeNodeId(nodeMessage.getNodeId()); - - // Await vaultManager.pullVault(vaultId, id); + await vaultManager.pullVault({ + vaultId, + pullNodeId: nodeId, + pullVaultNameOrId: pullVault, + }); response.setSuccess(true); callback(null, response); } catch (err) { callback(grpcUtils.fromError(err), null); } }, - vaultsScan: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - const nodeId = makeNodeId(call.request.getNodeId()); - - try { - await sessionManager.verifyToken(utils.getToken(call.metadata)); - const responseMeta = utils.createMetaTokenResponse( - await sessionManager.generateToken(), - ); - call.sendMetadata(responseMeta); - const vaults = await vaultManager.listVaults(); - vaults.forEach(async (vaultId, vaultName) => { - const vaultListMessage = new vaultsPB.List(); - vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(makeVaultIdPretty(vaultId)); - await genWritable.next(vaultListMessage); - }); - await genWritable.next(null); - } catch (err) { - await genWritable.throw(err); - } - }, vaultsSecretsList: async ( call: grpc.ServerWritableStream, ): Promise => { @@ -261,29 +252,6 @@ const createVaultRPC = ({ callback(grpcUtils.fromError(err), null); } }, - vaultsSecretsStat: async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - const response = new vaultsPB.Stat(); - try { - await sessionManager.verifyToken(utils.getToken(call.metadata)); - const responseMeta = utils.createMetaTokenResponse( - await sessionManager.generateToken(), - ); - call.sendMetadata(responseMeta); - const vaultMessage = call.request; - const id = await utils.parseVaultInput(vaultMessage, vaultManager); - const vault = await vaultManager.openVault(id); - // FIXME, reimplement this. - throw Error('Not Implemented'); - // Const stats = await vaultManager.vaultStats(id); - // response.setStats(JSON.stringify(stats));); - callback(null, response); - } catch (err) { - callback(grpcUtils.fromError(err), null); - } - }, vaultsSecretsDelete: async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, @@ -454,7 +422,7 @@ const createVaultRPC = ({ callback(grpcUtils.fromError(err), null); } }, - vaultsPermissionsSet: async ( + vaultsShare: async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, ): Promise => { @@ -469,15 +437,14 @@ const createVaultRPC = ({ callback({ code: grpc.status.NOT_FOUND }, null); return; } - const node = makeNodeId(nodeMessage.getNodeId()); + const nodeId = makeNodeId(nodeMessage.getNodeId()); const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); return; } - const id = await utils.parseVaultInput(vaultMessage, vaultManager); - throw Error('Not Implemented'); - // Await vaultManager.setVaultPermissions(node, id); // FIXME + const vaultId = await utils.parseVaultInput(vaultMessage, vaultManager); + await vaultManager.shareVault(vaultId, nodeId); const response = new utilsPB.StatusMessage(); response.setSuccess(true); callback(null, response); @@ -485,7 +452,7 @@ const createVaultRPC = ({ callback(grpcUtils.fromError(err), null); } }, - vaultsPermissionsUnset: async ( + vaultsUnshare: async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, ): Promise => { @@ -500,15 +467,14 @@ const createVaultRPC = ({ callback({ code: grpc.status.NOT_FOUND }, null); return; } - const node = makeNodeId(nodeMessage.getNodeId()); + const nodeId = makeNodeId(nodeMessage.getNodeId()); const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); return; } - const id = await utils.parseVaultInput(vaultMessage, vaultManager); - throw Error('Not implemented'); - // Await vaultManager.unsetVaultPermissions(node, id); // FIXME + const vaultId = await utils.parseVaultInput(vaultMessage, vaultManager); + await vaultManager.unshareVault(vaultId, nodeId); const response = new utilsPB.StatusMessage(); response.setSuccess(true); callback(null, response); @@ -516,50 +482,6 @@ const createVaultRPC = ({ callback(grpcUtils.fromError(err), null); } }, - vaultsPermissions: async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - await sessionManager.verifyToken(utils.getToken(call.metadata)); - const responseMeta = utils.createMetaTokenResponse( - await sessionManager.generateToken(), - ); - call.sendMetadata(responseMeta); - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - const node = nodeMessage.getNodeId(); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - const id = await utils.parseVaultInput(vaultMessage, vaultManager); - let perms: Record; - throw Error('Not implemented'); - // FIXME - if (isNodeId(node)) { - // Perms = await vaultManager.getVaultPermissions(id, node); - } else { - // Perms = await vaultManager.getVaultPermissions(id); - } - const permissionMessage = new vaultsPB.Permission(); - // For (const nodeId in perms) { - // permissionMessage.setNodeId(nodeId); - // if (perms[nodeId]['pull'] !== undefined) { - // permissionMessage.setAction('pull'); - // } - // await genWritable.next(permissionMessage); - // } - await genWritable.next(null); - } catch (err) { - await genWritable.throw(err); - } - }, vaultsVersion: async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, diff --git a/src/git/GitRequest.ts b/src/git/GitRequest.ts deleted file mode 100644 index 14f304d664..0000000000 --- a/src/git/GitRequest.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Responsible for converting HTTP messages from isomorphic-git into requests and sending them to a specific node. - */ - -class GitRequest { - private requestInfo: ( - vaultNameOrId: string, - ) => AsyncIterableIterator; - private requestPack: ( - vaultNameOrId: string, - body: any, - ) => AsyncIterableIterator; - private requestVaultNames: () => Promise; - - constructor( - requestInfo: (vaultNameOrId: string) => AsyncIterableIterator, - requestPack: ( - vaultNameOrId: string, - body: Buffer, - ) => AsyncIterableIterator, - requestVaultNames: () => Promise, - ) { - this.requestInfo = requestInfo; - this.requestPack = requestPack; - this.requestVaultNames = requestVaultNames; - } - - /** - * The custom http request method to feed into isomorphic-git's [custom http object](https://isomorphic-git.org/docs/en/http) - * In the future this will need to be changed in order to handle the receive-pack command from isomorphic-git. This will be - * in the url passed into the request function and is needed for push functionality - */ - public async request({ - url, - method = 'GET', - headers = {}, - body = Buffer.from(''), - }) { - const u = new URL(url); - - // Parse request - if (method === 'GET') { - const match = u.pathname.match(/\/(.+)\/info\/refs$/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - const infoResponse = this.requestInfo(vaultNameOrId); - - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const match = u.pathname.match(/\/(.+)\/git-(.+)/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - - const packResponse = this.requestPack(vaultNameOrId, body[0]); - - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - } - - public async scanVaults() { - return await this.requestVaultNames(); - } -} - -export default GitRequest; diff --git a/src/git/index.ts b/src/git/index.ts index dae0d1ba12..0060192136 100644 --- a/src/git/index.ts +++ b/src/git/index.ts @@ -1,4 +1,3 @@ -export { default as GitRequest } from './GitRequest'; export * as utils from './utils'; export * as types from './types'; export * as errors from './errors'; diff --git a/src/git/utils.ts b/src/git/utils.ts index b21ee1f8b7..e7df400db7 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -16,7 +16,6 @@ import type { TreeEntry, TreeObject, } from 'isomorphic-git'; -// Import type { EncryptedFS } from '../types'; import path from 'path'; import pako from 'pako'; @@ -24,7 +23,6 @@ import Hash from 'sha.js/sha1'; import { PassThrough } from 'readable-stream'; import createHash from 'sha.js'; -// Import * as vaultUtils from '../vaults/utils'; import { errors as gitErrors } from './'; import type { EncryptedFS } from 'encryptedfs'; diff --git a/src/keys/KeyManager.ts b/src/keys/KeyManager.ts index 9caeee8637..8411c13659 100644 --- a/src/keys/KeyManager.ts +++ b/src/keys/KeyManager.ts @@ -16,7 +16,6 @@ import * as keysErrors from './errors'; import * as utils from '../utils'; import * as networkUtils from '../network/utils'; import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; -import { VaultKey } from '../vaults/types'; /** * Manage Root Keys and Root Certificates @@ -30,13 +29,11 @@ class KeyManager { public readonly rootCertPath: string; public readonly rootCertsPath: string; public readonly dbKeyPath: string; - public readonly vaultKeyPath: string; protected fs: FileSystem; protected logger: Logger; protected rootKeyPair: KeyPair; protected _dbKey: Buffer; - protected _vaultKey: Buffer; protected rootCert: Certificate; protected workerManager?: PolykeyWorkerManagerInterface; @@ -90,7 +87,6 @@ class KeyManager { this.rootCertPath = path.join(keysPath, 'root.crt'); this.rootCertsPath = path.join(keysPath, 'root_certs'); this.dbKeyPath = path.join(keysPath, 'db.key'); - this.vaultKeyPath = path.join(keysPath, 'vault.key'); } public setWorkerManager(workerManager: PolykeyWorkerManagerInterface) { @@ -106,14 +102,12 @@ class KeyManager { rootKeyPairBits = 4096, rootCertDuration = 31536000, dbKeyBits = 256, - vaultKeyBits = 256, fresh = false, }: { password: string; rootKeyPairBits?: number; rootCertDuration?: number; dbKeyBits?: number; - vaultKeyBits?: number; fresh?: boolean; }) { this.logger.info('Creating Key Manager'); @@ -131,7 +125,6 @@ class KeyManager { this.rootKeyPair = rootKeyPair; this.rootCert = rootCert; this._dbKey = await this.setupKey(this.dbKeyPath, dbKeyBits); - this._vaultKey = await this.setupKey(this.vaultKeyPath, vaultKeyBits); this.logger.info('Created Key Manager'); } @@ -308,7 +301,6 @@ class KeyManager { ): Promise { this.logger.info('Renewing root key pair'); const keysDbKeyPlain = await this.readKey(this.dbKeyPath); - const keysVaultKeyPlain = await this.readKey(this.vaultKeyPath); const rootKeyPair = await this.generateKeyPair(bits); const now = new Date(); const rootCert = keysUtils.generateCertificate( @@ -342,7 +334,6 @@ class KeyManager { this.writeRootKeyPair(rootKeyPair, password), this.writeRootCert(rootCert), this.writeKey(keysDbKeyPlain, this.dbKeyPath, rootKeyPair), - this.writeKey(keysVaultKeyPlain, this.vaultKeyPath, rootKeyPair), ]); this.rootKeyPair = rootKeyPair; this.rootCert = rootCert; @@ -363,7 +354,6 @@ class KeyManager { ): Promise { this.logger.info('Resetting root key pair'); const keysDbKeyPlain = await this.readKey(this.dbKeyPath); - const keysVaultKeyPlain = await this.readKey(this.vaultKeyPath); const rootKeyPair = await this.generateKeyPair(bits); const rootCert = keysUtils.generateCertificate( rootKeyPair.publicKey, @@ -379,7 +369,6 @@ class KeyManager { this.writeRootKeyPair(rootKeyPair, password), this.writeRootCert(rootCert), this.writeKey(keysDbKeyPlain, this.dbKeyPath, rootKeyPair), - this.writeKey(keysVaultKeyPlain, this.vaultKeyPath, rootKeyPair), ]); this.rootKeyPair = rootKeyPair; this.rootCert = rootCert; @@ -625,10 +614,6 @@ class KeyManager { return this._dbKey; } - get vaultKey(): VaultKey { - return this._vaultKey as VaultKey; - } - // --- /** diff --git a/src/nodes/NodeConnection.ts b/src/nodes/NodeConnection.ts index e8bc4ac26d..8dc1a26b76 100644 --- a/src/nodes/NodeConnection.ts +++ b/src/nodes/NodeConnection.ts @@ -9,6 +9,7 @@ import type { ClaimIntermediary, ClaimIdString, } from '../claims/types'; +import type { VaultName, VaultId } from '../vaults/types'; import Logger from '@matrixai/logger'; import * as nodesUtils from './utils'; @@ -426,13 +427,20 @@ class NodeConnection { * Retrieves all the vaults for a peers node */ @ready(new nodesErrors.ErrorNodeConnectionNotStarted()) - public async scanVaults(): Promise> { - // Create the handler for git to scan from - const gitRequest = await vaultsUtils.constructGitHandler( - this.client, - this.keyManager.getNodeId(), - ); - return await gitRequest.scanVaults(); + public async scanVaults( + nodeId: NodeId, + ): Promise> { + const nodeIdMessage = new nodesPB.Node(); + nodeIdMessage.setNodeId(nodeId); + const vaults: Array<[VaultName, VaultId]> = []; + const genReadable = this.client.nodesScan(nodeIdMessage); + for await (const vault of genReadable) { + vaults.push([ + vault.getVaultName() as VaultName, + vaultsUtils.makeVaultId(vault.getVaultId()), + ]); + } + return vaults; } } diff --git a/src/nodes/NodeManager.ts b/src/nodes/NodeManager.ts index cd011e109a..3fcf7a992c 100644 --- a/src/nodes/NodeManager.ts +++ b/src/nodes/NodeManager.ts @@ -3,6 +3,7 @@ import type { PublicKeyPem } from '../keys/types'; import type { Sigchain } from '../sigchain'; import type { ChainData, ChainDataEncoded } from '../sigchain/types'; import type { ClaimIdString } from '../claims/types'; +import type { VaultName, VaultId } from '../vaults/types'; import type { NodeId, NodeAddress, @@ -596,11 +597,13 @@ class NodeManager { * Retrieves all the vaults for a peers node */ @ready(new nodesErrors.ErrorNodeManagerNotStarted()) - public async scanNodeVaults(nodeId: string): Promise> { + public async scanNodeVaults( + nodeId: NodeId, + ): Promise> { // Create a connection to another node - const connection = await this.getConnectionToNode(nodeId as NodeId); + const connection = await this.getConnectionToNode(nodeId); // Scan the vaults of the node over the connection - return await connection.scanVaults(); + return await connection.scanVaults(this.getNodeId()); } public async clearDB() { diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts index fa5375154c..e7289e6c95 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts @@ -15,13 +15,12 @@ interface IAgentServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { - path: "/polykey.v1.AgentService/VaultsScan"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IAgentServiceService_IVaultsPermisssionsCheck extends grpc.MethodDefinition { - path: "/polykey.v1.AgentService/VaultsPermisssionsCheck"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IAgentServiceService_INodesClosestLocalNodesGet extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/NodesClosestLocalNodesGet"; requestStream: false; @@ -115,6 +96,15 @@ interface IAgentServiceService_INodesCrossSignClaim extends grpc.MethodDefinitio responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } +interface IAgentServiceService_INodesScan extends grpc.MethodDefinition { + path: "/polykey.v1.AgentService/NodesScan"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IAgentServiceService_INotificationsSend extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/NotificationsSend"; requestStream: false; @@ -131,13 +121,12 @@ export interface IAgentServiceServer extends grpc.UntypedServiceImplementation { echo: grpc.handleUnaryCall; vaultsGitInfoGet: grpc.handleServerStreamingCall; vaultsGitPackGet: grpc.handleBidiStreamingCall; - vaultsScan: grpc.handleServerStreamingCall; - vaultsPermisssionsCheck: grpc.handleUnaryCall; nodesClosestLocalNodesGet: grpc.handleUnaryCall; nodesClaimsGet: grpc.handleUnaryCall; nodesChainDataGet: grpc.handleUnaryCall; nodesHolePunchMessageSend: grpc.handleUnaryCall; nodesCrossSignClaim: grpc.handleBidiStreamingCall; + nodesScan: grpc.handleServerStreamingCall; notificationsSend: grpc.handleUnaryCall; } @@ -150,11 +139,6 @@ export interface IAgentServiceClient { vaultsGitPackGet(): grpc.ClientDuplexStream; vaultsGitPackGet(options: Partial): grpc.ClientDuplexStream; vaultsGitPackGet(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -170,6 +154,8 @@ export interface IAgentServiceClient { nodesCrossSignClaim(): grpc.ClientDuplexStream; nodesCrossSignClaim(options: Partial): grpc.ClientDuplexStream; nodesCrossSignClaim(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; + nodesScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + nodesScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; notificationsSend(request: polykey_v1_notifications_notifications_pb.AgentNotification, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; notificationsSend(request: polykey_v1_notifications_notifications_pb.AgentNotification, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; notificationsSend(request: polykey_v1_notifications_notifications_pb.AgentNotification, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; @@ -184,11 +170,6 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsGitPackGet(options?: Partial): grpc.ClientDuplexStream; public vaultsGitPackGet(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermisssionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -203,6 +184,8 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public nodesHolePunchMessageSend(request: polykey_v1_nodes_nodes_pb.Relay, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public nodesCrossSignClaim(options?: Partial): grpc.ClientDuplexStream; public nodesCrossSignClaim(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; + public nodesScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + public nodesScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public notificationsSend(request: polykey_v1_notifications_notifications_pb.AgentNotification, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public notificationsSend(request: polykey_v1_notifications_notifications_pb.AgentNotification, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; public notificationsSend(request: polykey_v1_notifications_notifications_pb.AgentNotification, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EmptyMessage) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.js b/src/proto/js/polykey/v1/agent_service_grpc_pb.js index ad46827874..f19fa73d0c 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.js @@ -117,26 +117,15 @@ function deserialize_polykey_v1_utils_EmptyMessage(buffer_arg) { return polykey_v1_utils_utils_pb.EmptyMessage.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_NodePermission(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.NodePermission)) { - throw new Error('Expected argument of type polykey.v1.vaults.NodePermission'); +function serialize_polykey_v1_vaults_List(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.List)) { + throw new Error('Expected argument of type polykey.v1.vaults.List'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_NodePermission(buffer_arg) { - return polykey_v1_vaults_vaults_pb.NodePermission.deserializeBinary(new Uint8Array(buffer_arg)); -} - -function serialize_polykey_v1_vaults_NodePermissionAllowed(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.NodePermissionAllowed)) { - throw new Error('Expected argument of type polykey.v1.vaults.NodePermissionAllowed'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_NodePermissionAllowed(buffer_arg) { - return polykey_v1_vaults_vaults_pb.NodePermissionAllowed.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_List(buffer_arg) { + return polykey_v1_vaults_vaults_pb.List.deserializeBinary(new Uint8Array(buffer_arg)); } function serialize_polykey_v1_vaults_PackChunk(arg) { @@ -198,28 +187,6 @@ vaultsGitInfoGet: { responseSerialize: serialize_polykey_v1_vaults_PackChunk, responseDeserialize: deserialize_polykey_v1_vaults_PackChunk, }, - vaultsScan: { - path: '/polykey.v1.AgentService/VaultsScan', - requestStream: false, - responseStream: true, - requestType: polykey_v1_nodes_nodes_pb.Node, - responseType: polykey_v1_vaults_vaults_pb.Vault, - requestSerialize: serialize_polykey_v1_nodes_Node, - requestDeserialize: deserialize_polykey_v1_nodes_Node, - responseSerialize: serialize_polykey_v1_vaults_Vault, - responseDeserialize: deserialize_polykey_v1_vaults_Vault, - }, - vaultsPermisssionsCheck: { - path: '/polykey.v1.AgentService/VaultsPermisssionsCheck', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.NodePermission, - responseType: polykey_v1_vaults_vaults_pb.NodePermissionAllowed, - requestSerialize: serialize_polykey_v1_vaults_NodePermission, - requestDeserialize: deserialize_polykey_v1_vaults_NodePermission, - responseSerialize: serialize_polykey_v1_vaults_NodePermissionAllowed, - responseDeserialize: deserialize_polykey_v1_vaults_NodePermissionAllowed, - }, // Nodes nodesClosestLocalNodesGet: { path: '/polykey.v1.AgentService/NodesClosestLocalNodesGet', @@ -276,6 +243,17 @@ nodesClosestLocalNodesGet: { responseSerialize: serialize_polykey_v1_nodes_CrossSign, responseDeserialize: deserialize_polykey_v1_nodes_CrossSign, }, + nodesScan: { + path: '/polykey.v1.AgentService/NodesScan', + requestStream: false, + responseStream: true, + requestType: polykey_v1_nodes_nodes_pb.Node, + responseType: polykey_v1_vaults_vaults_pb.List, + requestSerialize: serialize_polykey_v1_nodes_Node, + requestDeserialize: deserialize_polykey_v1_nodes_Node, + responseSerialize: serialize_polykey_v1_vaults_List, + responseDeserialize: deserialize_polykey_v1_vaults_List, + }, // Notifications notificationsSend: { path: '/polykey.v1.AgentService/NotificationsSend', diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index 2458e90bcd..0ee906195e 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -27,6 +27,7 @@ interface IClientServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } +interface IClientServiceService_INodesScan extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/NodesScan"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IKeysKeyPairRoot extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/KeysKeyPairRoot"; requestStream: false; @@ -307,15 +314,6 @@ interface IClientServiceService_IVaultsClone extends grpc.MethodDefinition; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsScan extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsScan"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsSecretsList extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsSecretsList"; requestStream: false; @@ -334,15 +332,6 @@ interface IClientServiceService_IVaultsSecretsMkdir extends grpc.MethodDefinitio responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsSecretsStat extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsSecretsStat"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsSecretsDelete extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsSecretsDelete"; requestStream: false; @@ -397,8 +386,8 @@ interface IClientServiceService_IVaultsSecretsNewDir extends grpc.MethodDefiniti responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissionsSet extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsSet"; +interface IClientServiceService_IVaultsShare extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsShare"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -406,8 +395,8 @@ interface IClientServiceService_IVaultsPermissionsSet extends grpc.MethodDefinit responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissionsUnset extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsUnset"; +interface IClientServiceService_IVaultsUnshare extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsUnshare"; requestStream: false; responseStream: false; requestSerialize: grpc.serialize; @@ -415,15 +404,6 @@ interface IClientServiceService_IVaultsPermissionsUnset extends grpc.MethodDefin responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissions extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissions"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsVersion extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsVersion"; requestStream: false; @@ -653,6 +633,7 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation nodesPing: grpc.handleUnaryCall; nodesClaim: grpc.handleUnaryCall; nodesFind: grpc.handleUnaryCall; + nodesScan: grpc.handleServerStreamingCall; keysKeyPairRoot: grpc.handleUnaryCall; keysKeyPairReset: grpc.handleUnaryCall; keysKeyPairRenew: grpc.handleUnaryCall; @@ -669,19 +650,16 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation vaultsDelete: grpc.handleUnaryCall; vaultsPull: grpc.handleUnaryCall; vaultsClone: grpc.handleUnaryCall; - vaultsScan: grpc.handleServerStreamingCall; vaultsSecretsList: grpc.handleServerStreamingCall; vaultsSecretsMkdir: grpc.handleUnaryCall; - vaultsSecretsStat: grpc.handleUnaryCall; vaultsSecretsDelete: grpc.handleUnaryCall; vaultsSecretsEdit: grpc.handleUnaryCall; vaultsSecretsGet: grpc.handleUnaryCall; vaultsSecretsRename: grpc.handleUnaryCall; vaultsSecretsNew: grpc.handleUnaryCall; vaultsSecretsNewDir: grpc.handleUnaryCall; - vaultsPermissionsSet: grpc.handleUnaryCall; - vaultsPermissionsUnset: grpc.handleUnaryCall; - vaultsPermissions: grpc.handleServerStreamingCall; + vaultsShare: grpc.handleUnaryCall; + vaultsUnshare: grpc.handleUnaryCall; vaultsVersion: grpc.handleUnaryCall; vaultsLog: grpc.handleServerStreamingCall; identitiesAuthenticate: grpc.handleServerStreamingCall; @@ -736,6 +714,8 @@ export interface IClientServiceClient { nodesFind(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeAddress) => void): grpc.ClientUnaryCall; nodesFind(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeAddress) => void): grpc.ClientUnaryCall; nodesFind(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeAddress) => void): grpc.ClientUnaryCall; + nodesScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + nodesScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; keysKeyPairRoot(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_keys_keys_pb.KeyPair) => void): grpc.ClientUnaryCall; keysKeyPairRoot(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_keys_keys_pb.KeyPair) => void): grpc.ClientUnaryCall; keysKeyPairRoot(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_keys_keys_pb.KeyPair) => void): grpc.ClientUnaryCall; @@ -782,16 +762,11 @@ export interface IClientServiceClient { vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; @@ -810,14 +785,12 @@ export interface IClientServiceClient { vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, options?: Partial): grpc.ClientReadableStream; - vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; @@ -917,6 +890,8 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public nodesFind(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeAddress) => void): grpc.ClientUnaryCall; public nodesFind(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeAddress) => void): grpc.ClientUnaryCall; public nodesFind(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeAddress) => void): grpc.ClientUnaryCall; + public nodesScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + public nodesScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public keysKeyPairRoot(request: polykey_v1_utils_utils_pb.EmptyMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_keys_keys_pb.KeyPair) => void): grpc.ClientUnaryCall; public keysKeyPairRoot(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_keys_keys_pb.KeyPair) => void): grpc.ClientUnaryCall; public keysKeyPairRoot(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_keys_keys_pb.KeyPair) => void): grpc.ClientUnaryCall; @@ -963,16 +938,11 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; @@ -991,14 +961,12 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, options?: Partial): grpc.ClientReadableStream; - public vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsShare(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsUnshare(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index 8f52b73ebf..3f4ea760f3 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -365,17 +365,6 @@ function deserialize_polykey_v1_vaults_Mkdir(buffer_arg) { return polykey_v1_vaults_vaults_pb.Mkdir.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_PermGet(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermGet)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermGet'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_PermGet(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermGet.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_PermSet(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermSet)) { throw new Error('Expected argument of type polykey.v1.vaults.PermSet'); @@ -398,17 +387,6 @@ function deserialize_polykey_v1_vaults_PermUnset(buffer_arg) { return polykey_v1_vaults_vaults_pb.PermUnset.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Permission(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Permission)) { - throw new Error('Expected argument of type polykey.v1.vaults.Permission'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Permission(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Permission.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_Pull(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.Pull)) { throw new Error('Expected argument of type polykey.v1.vaults.Pull'); @@ -431,17 +409,6 @@ function deserialize_polykey_v1_vaults_Rename(buffer_arg) { return polykey_v1_vaults_vaults_pb.Rename.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Stat(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Stat)) { - throw new Error('Expected argument of type polykey.v1.vaults.Stat'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Stat(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Stat.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_Vault(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.Vault)) { throw new Error('Expected argument of type polykey.v1.vaults.Vault'); @@ -579,6 +546,17 @@ nodesAdd: { responseSerialize: serialize_polykey_v1_nodes_NodeAddress, responseDeserialize: deserialize_polykey_v1_nodes_NodeAddress, }, + nodesScan: { + path: '/polykey.v1.ClientService/NodesScan', + requestStream: false, + responseStream: true, + requestType: polykey_v1_nodes_nodes_pb.Node, + responseType: polykey_v1_vaults_vaults_pb.List, + requestSerialize: serialize_polykey_v1_nodes_Node, + requestDeserialize: deserialize_polykey_v1_nodes_Node, + responseSerialize: serialize_polykey_v1_vaults_List, + responseDeserialize: deserialize_polykey_v1_vaults_List, + }, // Keys keysKeyPairRoot: { path: '/polykey.v1.ClientService/KeysKeyPairRoot', @@ -757,17 +735,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsScan: { - path: '/polykey.v1.ClientService/VaultsScan', - requestStream: false, - responseStream: true, - requestType: polykey_v1_nodes_nodes_pb.Node, - responseType: polykey_v1_vaults_vaults_pb.List, - requestSerialize: serialize_polykey_v1_nodes_Node, - requestDeserialize: deserialize_polykey_v1_nodes_Node, - responseSerialize: serialize_polykey_v1_vaults_List, - responseDeserialize: deserialize_polykey_v1_vaults_List, - }, vaultsSecretsList: { path: '/polykey.v1.ClientService/VaultsSecretsList', requestStream: false, @@ -790,17 +757,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsSecretsStat: { - path: '/polykey.v1.ClientService/VaultsSecretsStat', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.Vault, - responseType: polykey_v1_vaults_vaults_pb.Stat, - requestSerialize: serialize_polykey_v1_vaults_Vault, - requestDeserialize: deserialize_polykey_v1_vaults_Vault, - responseSerialize: serialize_polykey_v1_vaults_Stat, - responseDeserialize: deserialize_polykey_v1_vaults_Stat, - }, vaultsSecretsDelete: { path: '/polykey.v1.ClientService/VaultsSecretsDelete', requestStream: false, @@ -867,8 +823,8 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissionsSet: { - path: '/polykey.v1.ClientService/VaultsPermissionsSet', + vaultsShare: { + path: '/polykey.v1.ClientService/VaultsShare', requestStream: false, responseStream: false, requestType: polykey_v1_vaults_vaults_pb.PermSet, @@ -878,8 +834,8 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissionsUnset: { - path: '/polykey.v1.ClientService/VaultsPermissionsUnset', + vaultsUnshare: { + path: '/polykey.v1.ClientService/VaultsUnshare', requestStream: false, responseStream: false, requestType: polykey_v1_vaults_vaults_pb.PermUnset, @@ -889,17 +845,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissions: { - path: '/polykey.v1.ClientService/VaultsPermissions', - requestStream: false, - responseStream: true, - requestType: polykey_v1_vaults_vaults_pb.PermGet, - responseType: polykey_v1_vaults_vaults_pb.Permission, - requestSerialize: serialize_polykey_v1_vaults_PermGet, - requestDeserialize: deserialize_polykey_v1_vaults_PermGet, - responseSerialize: serialize_polykey_v1_vaults_Permission, - responseDeserialize: deserialize_polykey_v1_vaults_Permission, - }, vaultsVersion: { path: '/polykey.v1.ClientService/VaultsVersion', requestStream: false, diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index 072887bfef..497728120c 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -117,6 +117,11 @@ export class Pull extends jspb.Message { getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; setNode(value?: polykey_v1_nodes_nodes_pb.Node): Pull; + hasPullVault(): boolean; + clearPullVault(): void; + getPullVault(): Vault | undefined; + setPullVault(value?: Vault): Pull; + serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): Pull.AsObject; static toObject(includeInstance: boolean, msg: Pull): Pull.AsObject; @@ -131,6 +136,7 @@ export namespace Pull { export type AsObject = { vault?: Vault.AsObject, node?: polykey_v1_nodes_nodes_pb.Node.AsObject, + pullVault?: Vault.AsObject, } } diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index 6fbd3c4ac7..c24615dc6f 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -1149,7 +1149,8 @@ proto.polykey.v1.vaults.Pull.prototype.toObject = function(opt_includeInstance) proto.polykey.v1.vaults.Pull.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + pullVault: (f = msg.getPullVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f) }; if (includeInstance) { @@ -1196,6 +1197,11 @@ proto.polykey.v1.vaults.Pull.deserializeBinaryFromReader = function(msg, reader) reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); msg.setNode(value); break; + case 3: + var value = new proto.polykey.v1.vaults.Vault; + reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); + msg.setPullVault(value); + break; default: reader.skipField(); break; @@ -1241,6 +1247,14 @@ proto.polykey.v1.vaults.Pull.serializeBinaryToWriter = function(message, writer) polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter ); } + f = message.getPullVault(); + if (f != null) { + writer.writeMessage( + 3, + f, + proto.polykey.v1.vaults.Vault.serializeBinaryToWriter + ); + } }; @@ -1318,6 +1332,43 @@ proto.polykey.v1.vaults.Pull.prototype.hasNode = function() { }; +/** + * optional Vault pull_vault = 3; + * @return {?proto.polykey.v1.vaults.Vault} + */ +proto.polykey.v1.vaults.Pull.prototype.getPullVault = function() { + return /** @type{?proto.polykey.v1.vaults.Vault} */ ( + jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 3)); +}; + + +/** + * @param {?proto.polykey.v1.vaults.Vault|undefined} value + * @return {!proto.polykey.v1.vaults.Pull} returns this +*/ +proto.polykey.v1.vaults.Pull.prototype.setPullVault = function(value) { + return jspb.Message.setWrapperField(this, 3, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.Pull} returns this + */ +proto.polykey.v1.vaults.Pull.prototype.clearPullVault = function() { + return this.setPullVault(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.Pull.prototype.hasPullVault = function() { + return jspb.Message.getField(this, 3) != null; +}; + + diff --git a/src/proto/schemas/polykey/v1/agent_service.proto b/src/proto/schemas/polykey/v1/agent_service.proto index 5c27703ed1..955d9be59b 100644 --- a/src/proto/schemas/polykey/v1/agent_service.proto +++ b/src/proto/schemas/polykey/v1/agent_service.proto @@ -15,8 +15,6 @@ service AgentService { // Vaults rpc VaultsGitInfoGet (polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsGitPackGet(stream polykey.v1.vaults.PackChunk) returns (stream polykey.v1.vaults.PackChunk); - rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.Vault); - rpc VaultsPermisssionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); // Nodes rpc NodesClosestLocalNodesGet (polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeTable); @@ -24,6 +22,7 @@ service AgentService { rpc NodesChainDataGet (polykey.v1.utils.EmptyMessage) returns (polykey.v1.nodes.ChainData); rpc NodesHolePunchMessageSend (polykey.v1.nodes.Relay) returns (polykey.v1.utils.EmptyMessage); rpc NodesCrossSignClaim (stream polykey.v1.nodes.CrossSign) returns (stream polykey.v1.nodes.CrossSign); + rpc NodesScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); // Notifications rpc NotificationsSend (polykey.v1.notifications.AgentNotification) returns (polykey.v1.utils.EmptyMessage); diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index 17a730f7cc..9903c3ceb7 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -29,6 +29,7 @@ service ClientService { rpc NodesPing(polykey.v1.nodes.Node) returns (polykey.v1.utils.StatusMessage); rpc NodesClaim(polykey.v1.nodes.Claim) returns (polykey.v1.utils.StatusMessage); rpc NodesFind(polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeAddress); + rpc NodesScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); // Keys rpc KeysKeyPairRoot (polykey.v1.utils.EmptyMessage) returns (polykey.v1.keys.KeyPair); @@ -49,19 +50,16 @@ service ClientService { rpc VaultsDelete(polykey.v1.vaults.Vault) returns (polykey.v1.utils.StatusMessage); rpc VaultsPull(polykey.v1.vaults.Pull) returns (polykey.v1.utils.StatusMessage); rpc VaultsClone(polykey.v1.vaults.Clone) returns (polykey.v1.utils.StatusMessage); - rpc VaultsScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); rpc VaultsSecretsList(polykey.v1.vaults.Vault) returns (stream polykey.v1.secrets.Secret); rpc VaultsSecretsMkdir(polykey.v1.vaults.Mkdir) returns (polykey.v1.utils.StatusMessage); - rpc VaultsSecretsStat(polykey.v1.vaults.Vault) returns (polykey.v1.vaults.Stat); rpc VaultsSecretsDelete(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsEdit(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsGet(polykey.v1.secrets.Secret) returns (polykey.v1.secrets.Secret); rpc VaultsSecretsRename(polykey.v1.secrets.Rename) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNew(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNewDir(polykey.v1.secrets.Directory) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissionsSet(polykey.v1.vaults.PermSet) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissionsUnset(polykey.v1.vaults.PermUnset) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissions(polykey.v1.vaults.PermGet) returns (stream polykey.v1.vaults.Permission); + rpc VaultsShare(polykey.v1.vaults.PermSet) returns (polykey.v1.utils.StatusMessage); + rpc VaultsUnshare(polykey.v1.vaults.PermUnset) returns (polykey.v1.utils.StatusMessage); rpc VaultsVersion(polykey.v1.vaults.Version) returns (polykey.v1.vaults.VersionResult); rpc VaultsLog(polykey.v1.vaults.Log) returns (stream polykey.v1.vaults.LogEntry); diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index efd2d45b0a..a013eb94c3 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -32,6 +32,7 @@ message Mkdir { message Pull { Vault vault = 1; polykey.v1.nodes.Node node = 2; + Vault pull_vault = 3; } message Clone { diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index 7756fdc045..ff640e6131 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -4,19 +4,18 @@ import type { FileSystemWritable, CommitLog, } from './types'; +import type { KeyManager } from '../keys'; +import type { MutexInterface } from 'async-mutex'; +import type { EncryptedFS } from 'encryptedfs'; import path from 'path'; import git from 'isomorphic-git'; import { Mutex } from 'async-mutex'; -import { EncryptedFS } from 'encryptedfs'; import Logger from '@matrixai/logger'; -import type { MutexInterface } from 'async-mutex'; +import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; -import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; -import { makeVaultIdPretty } from './utils'; -import { KeyManager } from '../keys'; const lastTag = 'last'; @@ -27,12 +26,13 @@ class VaultInternal { public readonly gitDir: string; public readonly vaultId: VaultId; + protected keyManager: KeyManager; + protected efsRoot: EncryptedFS; protected efsVault: EncryptedFS; - protected logger: Logger; protected lock: MutexInterface; protected workingDir: string; - protected keyManager: KeyManager; + protected logger: Logger; public static async create({ vaultId, @@ -48,44 +48,45 @@ class VaultInternal { fresh?: boolean; }) { logger = logger ?? new Logger(this.constructor.name); + const dir = path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'); + const gitdir = path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'); if (fresh) { + logger.info( + `Initialising Vault '${vaultsUtils.makeVaultIdPretty(vaultId)}'`, + ); try { - await efs.rmdir(makeVaultIdPretty(vaultId), { recursive: true }); + await efs.rmdir(vaultsUtils.makeVaultIdPretty(vaultId), { + recursive: true, + }); } catch (err) { if (err.code !== 'ENOENT') { throw err; } } - await efs.mkdir(path.join(makeVaultIdPretty(vaultId), 'contents'), { + await efs.mkdir(dir, { recursive: true, }); - const efsVault = await efs.chroot( - path.join(makeVaultIdPretty(vaultId), 'contents'), - ); + const efsVault = await efs.chroot(dir); await efsVault.start(); - // Creating a new vault. await git.init({ fs: efs, - dir: path.join(makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(makeVaultIdPretty(vaultId), '.git'), + dir, + gitdir, }); const workingDir = await git.commit({ fs: efs, - dir: path.join(makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(makeVaultIdPretty(vaultId), '.git'), + dir, + gitdir, author: { - name: makeVaultIdPretty(vaultId), + name: keyManager.getNodeId(), }, message: 'Initial Commit', }); await efs.writeFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'packed-refs'), + path.join(gitdir, 'packed-refs'), '# pack-refs with: peeled fully-peeled sorted', ); - await efs.writeFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'workingDir'), - workingDir, - ); + await efs.writeFile(path.join(gitdir, 'workingDir'), workingDir); const vault = new VaultInternal({ vaultId, keyManager, @@ -94,20 +95,14 @@ class VaultInternal { workingDir, logger, }); - logger.info(`Initialising vault at '${makeVaultIdPretty(vaultId)}'`); return vault; } else { - // Loading an existing vault. - const efsVault = await efs.chroot( - path.join(makeVaultIdPretty(vaultId), 'contents'), - ); + logger.info(`Starting Vault '${vaultsUtils.makeVaultIdPretty(vaultId)}'`); + const efsVault = await efs.chroot(dir); await efsVault.start(); - const workingDir = (await efs.readFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'workingDir'), - { - encoding: 'utf8', - }, - )) as string; + const workingDir = (await efs.readFile(path.join(gitdir, 'workingDir'), { + encoding: 'utf8', + })) as string; const vault = new VaultInternal({ vaultId, keyManager, @@ -116,7 +111,6 @@ class VaultInternal { workingDir, logger, }); - logger.info(`Starting vault at '${makeVaultIdPretty(vaultId)}'`); return vault; } } @@ -136,8 +130,11 @@ class VaultInternal { workingDir: string; logger?: Logger; }) { - this.baseDir = path.join(makeVaultIdPretty(vaultId), 'contents'); - this.gitDir = path.join(makeVaultIdPretty(vaultId), '.git'); + this.baseDir = path.join( + vaultsUtils.makeVaultIdPretty(vaultId), + 'contents', + ); + this.gitDir = path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'); this.vaultId = vaultId; this.keyManager = keyManager; this.efsRoot = efs; @@ -149,17 +146,21 @@ class VaultInternal { public async destroy(): Promise { const release = await this.lock.acquire(); + this.logger.info( + `Destroying Vault '${vaultsUtils.makeVaultIdPretty(this.vaultId)}'`, + ); try { await this.efsRoot.writeFile( - path.join(makeVaultIdPretty(this.vaultId), '.git', 'workingDirectory'), + path.join( + vaultsUtils.makeVaultIdPretty(this.vaultId), + '.git', + 'workingDirectory', + ), this.workingDir, ); } finally { release(); } - this.logger.info( - `Destroying vault at '${makeVaultIdPretty(this.vaultId)}'`, - ); } @ready(new vaultsErrors.ErrorVaultDestroyed()) @@ -226,6 +227,11 @@ class VaultInternal { } } if (message.length !== 0) { + this.logger.info( + `Committing to Vault '${vaultsUtils.makeVaultIdPretty( + this.vaultId, + )}'`, + ); this.workingDir = await git.commit({ fs: this.efsRoot, dir: this.baseDir, @@ -295,7 +301,8 @@ class VaultInternal { return { oid: readCommit.oid, committer: readCommit.commit.committer.name, - timeStamp: readCommit.commit.committer.timestamp * 1000, // Needs to be in milliseconds for Date. + // Needs to be in milliseconds for Date + timeStamp: readCommit.commit.committer.timestamp * 1000, message: readCommit.commit.message, }; }); @@ -303,11 +310,14 @@ class VaultInternal { @ready(new vaultsErrors.ErrorVaultDestroyed()) public async version(commit: string): Promise { - // Checking for special tags. + // Checking for special tags const commit_ = commit.toLowerCase() === lastTag ? 'HEAD' : commit; - // TODO: add a tag for the start of the histoy so we can use that as the operator. + // TODO: add a tag for the start of the histoy so we can use that as the operator try { + this.logger.info( + `Checking out Vault '${vaultsUtils.makeVaultIdPretty(this.vaultId)}'`, + ); await git.checkout({ fs: this.efsRoot, dir: this.baseDir, diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 03bf1a0ad5..22a0cecf7a 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -7,9 +7,14 @@ import type { VaultList, Vault, } from './types'; -import type { FileSystem } from '../types'; -import type { NodeId } from '../nodes/types'; +import type { FileSystem, POJO } from '../types'; import type { PolykeyWorkerManagerInterface } from '../workers/types'; +import type { NodeId } from '../nodes/types'; +import type { KeyManager } from '../keys'; +import type { NodeManager } from '../nodes'; +import type { GestaltGraph } from '../gestalts'; +import type { ACL } from '../acl'; +import type { NotificationsManager } from '../notifications'; import path from 'path'; import Logger from '@matrixai/logger'; @@ -18,53 +23,45 @@ import git from 'isomorphic-git'; import { PassThrough } from 'readable-stream'; import * as grpc from '@grpc/grpc-js'; -import { KeyManager } from '../keys'; -import { NodeManager } from '../nodes'; -import { GestaltGraph } from '../gestalts'; -import { ACL } from '../acl'; import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; +import { utils as idUtils } from '@matrixai/id'; +import { EncryptedFS } from 'encryptedfs'; +import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; +import VaultInternal from './VaultInternal'; import * as utils from '../utils'; import * as vaultsUtils from './utils'; import * as vaultsErrors from './errors'; import * as gitUtils from '../git/utils'; import * as gitErrors from '../git/errors'; -import * as gestaltErrors from '../gestalts/errors'; -import { EncryptedFS, POJO } from 'encryptedfs'; -import VaultInternal from './VaultInternal'; -import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; -import { utils as idUtils } from '@matrixai/id'; -import { makeVaultId } from './utils'; -import { NotificationsManager } from '../notifications'; interface VaultManager extends CreateDestroy {} @CreateDestroy() class VaultManager { public readonly vaultsPath: string; - protected fs: FileSystem; protected nodeManager: NodeManager; protected gestaltGraph: GestaltGraph; protected acl: ACL; protected notificationsManager: NotificationsManager; + protected keyManager: KeyManager; + + protected vaultsMap: VaultMap; protected efs: EncryptedFS; protected db: DB; - protected logger: Logger; - protected vaultsKey: VaultKey; - protected vaultsMap: VaultMap; - protected vaultsDbDomain: string; - protected vaultsNamesDbDomain: Array; protected vaultsDb: DBLevel; protected vaultsNamesDb: DBLevel; - protected keyManager: KeyManager; + protected vaultsNamesDbDomain: Array; + + protected logger: Logger; static async createVaultManager({ fresh = false, keyManager, vaultsPath, - vaultsKey, nodeManager, gestaltGraph, acl, + notificationsManager, db, fs, logger, @@ -72,10 +69,10 @@ class VaultManager { fresh?: boolean; keyManager: KeyManager; vaultsPath: string; - vaultsKey: VaultKey; nodeManager: NodeManager; gestaltGraph: GestaltGraph; acl: ACL; + notificationsManager: NotificationsManager; db: DB; fs?: FileSystem; logger?: Logger; @@ -83,10 +80,11 @@ class VaultManager { logger = logger ?? new Logger(this.constructor.name); const fileSystem = fs ?? require('fs'); logger.info('Creating Vault Manager'); - const vaultsDbDomain = 'VaultManager'; + const vaultsDbDomain = VaultManager.name; const vaultsDb = await db.level(vaultsDbDomain); const vaultsNamesDbDomain = [vaultsDbDomain, 'names']; const vaultsNamesDb = await db.level(vaultsNamesDbDomain[1], vaultsDb); + let vaultsKey; if (fresh) { await vaultsDb.clear(); await fileSystem.promises.rm(vaultsPath, { @@ -94,6 +92,19 @@ class VaultManager { recursive: true, }); logger.info(`Removing vaults directory at '${vaultsPath}'`); + vaultsKey = await vaultsUtils.generateVaultKey(); + await vaultsDb.put('vaultsKey', vaultsKey); + } else { + try { + vaultsKey = (await vaultsDb.get('vaultsKey')) as VaultKey; + } catch (e) { + throw new vaultsErrors.ErrorVaultKeyRead(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } } await utils.mkdirExists(fileSystem, vaultsPath, { recursive: true }); const efs = await EncryptedFS.createEncryptedFS({ @@ -108,14 +119,12 @@ class VaultManager { nodeManager, gestaltGraph, acl, + notificationsManager, + efs, db, - vaultsKey, - vaultsDbDomain, - vaultsNamesDbDomain, vaultsDb, vaultsNamesDb, - efs, - fs, + vaultsNamesDbDomain, logger, }); } @@ -125,43 +134,37 @@ class VaultManager { nodeManager, gestaltGraph, acl, + notificationsManager, + efs, db, - vaultsKey, - vaultsDbDomain, - vaultsNamesDbDomain, vaultsDb, vaultsNamesDb, - efs, - fs, + vaultsNamesDbDomain, logger, }: { keyManager: KeyManager; nodeManager: NodeManager; gestaltGraph: GestaltGraph; acl: ACL; + notificationsManager: NotificationsManager; + efs: EncryptedFS; db: DB; - vaultsKey: VaultKey; - vaultsDbDomain: string; - vaultsNamesDbDomain: Array; vaultsDb: DBLevel; vaultsNamesDb: DBLevel; - efs: EncryptedFS; - fs?: FileSystem; + vaultsNamesDbDomain: Array; logger?: Logger; }) { this.keyManager = keyManager; this.nodeManager = nodeManager; this.gestaltGraph = gestaltGraph; this.acl = acl; + this.notificationsManager = notificationsManager; this.db = db; - this.vaultsDbDomain = vaultsDbDomain; this.vaultsNamesDbDomain = vaultsNamesDbDomain; this.vaultsDb = vaultsDb; this.vaultsNamesDb = vaultsNamesDb; this.vaultsMap = new Map(); this.efs = efs; - this.fs = fs ?? require('fs'); - this.vaultsKey = vaultsKey; this.logger = logger ?? new Logger(this.constructor.name); } @@ -231,6 +234,9 @@ class VaultManager { const lock = new Mutex(); this.vaultsMap.set(idUtils.toString(vaultId), { lock }); return await this._transaction(async () => { + this.logger.info( + `Storing metadata for Vault ${vaultsUtils.makeVaultIdPretty(vaultId)}`, + ); await this.db.put(this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId), { name: vaultName, }); @@ -248,6 +254,9 @@ class VaultManager { @ready(new vaultsErrors.ErrorVaultManagerDestroyed()) public async destroyVault(vaultId: VaultId) { + this.logger.info( + `Destroying Vault ${vaultsUtils.makeVaultIdPretty(vaultId)}`, + ); await this._transaction(async () => { const vaultName = await this.getVaultName(vaultId); if (!vaultName) return; @@ -282,7 +291,7 @@ class VaultManager { const dbMeta = (o as any).value; const dbId = (o as any).key; const vaultMeta = await this.db.deserializeDecrypt(dbMeta, false); - vaults.set(vaultMeta.name, makeVaultId(dbId)); + vaults.set(vaultMeta.name, vaultsUtils.makeVaultId(dbId)); } return vaults; } @@ -292,6 +301,9 @@ class VaultManager { vaultId: VaultId, newVaultName: VaultName, ): Promise { + this.logger.info( + `Renaming Vault ${vaultsUtils.makeVaultIdPretty(vaultId)}`, + ); await this._transaction(async () => { const meta = await this.db.get( this.vaultsNamesDbDomain, @@ -314,7 +326,7 @@ class VaultManager { const dbId = (o as any).key; const vaultMeta = await this.db.deserializeDecrypt(dbMeta, false); if (vaultName === vaultMeta.name) { - return makeVaultId(dbId); + return vaultsUtils.makeVaultId(dbId); } } } @@ -325,16 +337,9 @@ class VaultManager { if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); return await this.gestaltGraph._transaction(async () => { return await this.acl._transaction(async () => { - const gestalt = await this.gestaltGraph.getGestaltByNode(nodeId); - if (gestalt == null) { - throw new gestaltErrors.ErrorGestaltsGraphNodeIdMissing(); - } - const nodes = gestalt.nodes; - for (const node in nodes) { - await this.acl.setNodeAction(nodeId, 'scan'); - await this.acl.setVaultAction(vaultId, nodes[node].id, 'pull'); - await this.acl.setVaultAction(vaultId, nodes[node].id, 'clone'); - } + await this.gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + await this.acl.setVaultAction(vaultId, nodeId, 'pull'); + await this.acl.setVaultAction(vaultId, nodeId, 'clone'); await this.notificationsManager.sendNotification(nodeId, { type: 'VaultShare', vaultId: idUtils.toString(vaultId), @@ -348,17 +353,38 @@ class VaultManager { }); } + @ready(new vaultsErrors.ErrorVaultManagerDestroyed()) + public async unshareVault(vaultId: VaultId, nodeId: NodeId): Promise { + const vaultName = await this.getVaultName(vaultId); + if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); + return await this.gestaltGraph._transaction(async () => { + return await this.acl._transaction(async () => { + await this.gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'pull'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'clone'); + }); + }); + } + @ready(new vaultsErrors.ErrorVaultManagerDestroyed()) public async cloneVault( nodeId: NodeId, vaultNameOrId: VaultId | VaultName, ): Promise { + // This error flag will contain the error returned by the cloning grpc stream + let error; let vaultName, remoteVaultId; + const thisNodeId = this.nodeManager.getNodeId(); const nodeConnection = await this.nodeManager.getConnectionToNode(nodeId); const client = nodeConnection.getClient(); const vaultId = await this.generateVaultId(); const lock = new Mutex(); this.vaultsMap.set(idUtils.toString(vaultId), { lock }); + this.logger.info( + `Cloning Vault ${vaultsUtils.makeVaultIdPretty( + vaultId, + )} on Node ${nodeId}`, + ); return await this._transaction(async () => { await this.efs.mkdir( path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), @@ -379,20 +405,44 @@ class VaultManager { const infoResponse = { async *[Symbol.iterator]() { const request = new vaultsPB.Vault(); + const meta = new grpc.Metadata(); + meta.set('nodeId', thisNodeId); + meta.set('action', 'clone'); if (typeof vaultNameOrId === 'string') { request.setNameOrId(vaultNameOrId); } else { request.setNameOrId(idUtils.toString(vaultNameOrId)); } - const response = client.vaultsGitInfoGet(request); - response.stream.on('metadata', async (meta) => { - vaultName = meta.get('vaultName').pop()!.toString(); - remoteVaultId = makeVaultId( - meta.get('vaultId').pop()!.toString(), - ); + const response = client.vaultsGitInfoGet(request, meta); + response.stream.on('metadata', (meta) => { + if (!error) { + vaultName = meta.get('vaultName').pop(); + if (vaultName) { + vaultName = vaultName.toString(); + const vId = meta.get('vaultId').pop(); + if (vId) { + remoteVaultId = vaultsUtils.makeVaultId(vId.toString()); + } + } + } }); - for await (const resp of response) { - yield resp.getChunk_asU8(); + // The polykey error is caught and the error flag set + // If there was an error, we return an empty generator so + // isomorphic git does not complain about an undefined res body + try { + for await (const resp of response) { + yield resp.getChunk_asU8(); + } + } catch (err) { + error = err; + return { + url: url, + method: method, + body: [], + headers: headers, + statusCode: 400, + statusMessage: 'Server Error', + }; } }, }; @@ -445,14 +495,23 @@ class VaultManager { throw new Error('Method not supported'); } }; - await git.clone({ - fs: this.efs, - http: { request }, - dir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - url: 'http://', - singleBranch: true, - }); + try { + await git.clone({ + fs: this.efs, + http: { request }, + dir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), + url: 'http://', + singleBranch: true, + }); + } catch (err) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (err instanceof git.Errors.SmartHttpError && error) { + throw error; + } + throw err; + } await this.efs.writeFile( path.join( vaultsUtils.makeVaultIdPretty(vaultId), @@ -483,8 +542,13 @@ class VaultManager { await this.db.put(this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId), { name: vaultName, defaultPullNode: nodeId, - defaultPullVault: idUtils.toBuffer(remoteVaultId), + defaultPullVault: idUtils.toString(remoteVaultId), }); + this.logger.info( + `Cloned Vault ${vaultsUtils.makeVaultIdPretty( + vaultId, + )} on Node ${nodeId}`, + ); return vault; }, [vaultId]); } @@ -498,34 +562,42 @@ class VaultManager { pullNodeId?: NodeId; pullVaultNameOrId?: VaultId | VaultName; }): Promise { - let metaChange = 0; - let vaultMeta, remoteVaultId; return await this._transaction(async () => { - if (pullNodeId == null || pullVaultNameOrId == null) { - vaultMeta = await this.db.get( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Keeps track of whether the metadata needs changing to avoid unnecessary db ops + // 0 = no change, 1 = change with vault Id, 2 = change with vault name + let metaChange = 0; + let remoteVaultId; + const thisNodeId = this.nodeManager.getNodeId(); + const vaultMeta = await this.db.get( + this.vaultsNamesDbDomain, + idUtils.toBuffer(vaultId), + ); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultUnlinked(); + if (pullNodeId == null) { + pullNodeId = vaultMeta.defaultPullNode; + } else { + metaChange = 1; + vaultMeta.defaultPullNode = pullNodeId; + } + if (pullVaultNameOrId == null) { + pullVaultNameOrId = vaultsUtils.makeVaultId( + idUtils.fromString(vaultMeta.defaultPullVault), ); - if (!vaultMeta) throw new vaultsErrors.ErrorVaultUnlinked(); - if (pullNodeId == null) { - pullNodeId = vaultMeta.defaultPullNode; + } else { + metaChange = 1; + if (typeof pullVaultNameOrId === 'string') { + metaChange = 2; } else { - metaChange = 1; - vaultMeta.defaultPullNode = pullNodeId; - } - if (pullVaultNameOrId == null) { - pullVaultNameOrId = makeVaultId( - idUtils.fromBuffer(Buffer.from(vaultMeta.defaultPullVault.data)), - ); - } else { - metaChange = 1; - if (typeof pullVaultNameOrId === 'string') { - metaChange = 2; - } else { - vaultMeta.defaultPullVault = idUtils.toBuffer(pullVaultNameOrId); - } + vaultMeta.defaultPullVault = idUtils.toString(pullVaultNameOrId); } } + this.logger.info( + `Pulling Vault ${vaultsUtils.makeVaultIdPretty( + vaultId, + )} from Node ${pullNodeId}`, + ); const nodeConnection = await this.nodeManager.getConnectionToNode( pullNodeId!, ); @@ -545,19 +617,40 @@ class VaultManager { const infoResponse = { async *[Symbol.iterator]() { const request = new vaultsPB.Vault(); + const meta = new grpc.Metadata(); + meta.set('nodeId', thisNodeId); + meta.set('action', 'clone'); if (typeof pullVaultNameOrId === 'string') { request.setNameOrId(pullVaultNameOrId); } else { request.setNameOrId(idUtils.toString(pullVaultNameOrId!)); } - const response = client.vaultsGitInfoGet(request); + const response = client.vaultsGitInfoGet(request, meta); response.stream.on('metadata', async (meta) => { - remoteVaultId = makeVaultId( - meta.get('vaultId').pop()!.toString(), - ); + if (!error) { + const vId = meta.get('vaultId').pop(); + if (vId) { + remoteVaultId = vaultsUtils.makeVaultId(vId.toString()); + } + } }); - for await (const resp of response) { - yield resp.getChunk_asU8(); + // The polykey error is caught and the error flag set + // If there was an error, we return an empty generator so + // isomorphic git does not complain about an undefined res body + try { + for await (const resp of response) { + yield resp.getChunk_asU8(); + } + } catch (err) { + error = err; + return { + url: url, + method: method, + body: [], + headers: headers, + statusCode: 400, + statusMessage: 'Server Error', + }; } }, }; @@ -624,7 +717,11 @@ class VaultManager { }, }); } catch (err) { - if (err instanceof git.Errors.MergeNotSupportedError) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (err instanceof git.Errors.SmartHttpError && error) { + throw error; + } else if (err instanceof git.Errors.MergeNotSupportedError) { throw new vaultsErrors.ErrorVaultMergeConflict( 'Merge Conflicts are not supported yet', ); @@ -632,7 +729,8 @@ class VaultManager { throw err; } if (metaChange !== 0) { - if (metaChange === 2) vaultMeta.defaultPullVault = remoteVaultId; + if (metaChange === 2) + vaultMeta.defaultPullVault = idUtils.toString(remoteVaultId); await this.db.put( this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId), @@ -641,6 +739,11 @@ class VaultManager { } const vault = await this.getVault(vaultId); await vault.readWorkingDirectory(); + this.logger.info( + `Pulled Vault ${vaultsUtils.makeVaultIdPretty( + vaultId, + )} from Node ${pullNodeId}`, + ); return vault; }, [vaultId]); } diff --git a/src/vaults/errors.ts b/src/vaults/errors.ts index ab4faf10ff..2718339f9c 100644 --- a/src/vaults/errors.ts +++ b/src/vaults/errors.ts @@ -6,6 +6,8 @@ class ErrorSecrets extends ErrorPolykey {} class ErrorVaultManagerDestroyed extends ErrorVaults {} +class ErrorVaultKeyRead extends ErrorVaults {} + class ErrorVaultUndefined extends ErrorVaults { description: string = 'Vault does not exist'; exitCode: number = 10; @@ -40,6 +42,8 @@ class ErrorVaultCommitUndefined extends ErrorVaults { exitCode: number = 10; } +class ErrorVaultPermissionDenied extends ErrorVaults {} + class ErrorSecretUndefined extends ErrorSecrets {} class ErrorSecretDefined extends ErrorSecrets {} @@ -51,6 +55,7 @@ class ErrorGitFile extends ErrorSecrets {} export { ErrorVaults, ErrorVaultManagerDestroyed, + ErrorVaultKeyRead, ErrorVaultUndefined, ErrorVaultDefined, ErrorRemoteVaultUndefined, @@ -65,6 +70,7 @@ export { ErrorInvalidVaultId, ErrorVaultMergeConflict, ErrorVaultCommitUndefined, + ErrorVaultPermissionDenied, ErrorSecretUndefined, ErrorSecretDefined, ErrorReadingSecret, diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index 00358cd498..6c73bfbdfe 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -1,30 +1,16 @@ -import type { EncryptedFS } from 'encryptedfs'; import type { VaultId, VaultKey, - VaultList, - VaultName, FileSystemReadable, VaultIdPretty, } from './types'; import type { FileSystem } from '../types'; -import type { NodeId } from '../nodes/types'; import fs from 'fs'; import path from 'path'; import { IdRandom } from '@matrixai/id'; -import { GitRequest } from '../git'; -import * as grpc from '@grpc/grpc-js'; - -import { promisify } from '../utils'; - -import { GRPCClientAgent } from '../agent'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; - import * as keysUtils from '../keys/utils'; -import { errors as vaultErrors } from './'; import { isIdString, isId, makeIdString, makeId } from '../GenericIdTypes'; async function generateVaultKey(bits: number = 256): Promise { @@ -91,7 +77,7 @@ async function* readdirRecursivelyEFS( const dirents = await efs.readdir(dir); let secretPath: string; for (const dirent of dirents) { - const res = dirent.toString(); // Makes string | buffer a string. + const res = dirent.toString(); secretPath = path.join(dir, res); if ((await efs.stat(secretPath)).isDirectory() && dirent !== '.git') { if (dirs === true) { @@ -104,150 +90,6 @@ async function* readdirRecursivelyEFS( } } -async function* readdirRecursivelyEFS2( - fs: EncryptedFS, - dir: string, - dirs?: boolean, -): AsyncGenerator { - const dirents = await fs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); - secretPath = path.join(dir, res); - if (dirent !== '.git') { - try { - await fs.readdir(secretPath); - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursivelyEFS2(fs, secretPath, dirs); - } catch (err) { - if (err.code === 'ENOTDIR') { - yield secretPath; - } - } - } - } -} - -/** - * Searches a list of vaults for the given vault Id and associated name - * @throws If the vault Id does not exist - */ -function searchVaultName(vaultList: VaultList, vaultId: VaultId): VaultName { - let vaultName: VaultName | undefined; - - // Search each element in the list of vaults - for (const elem in vaultList) { - // List is of form \t - const value = vaultList[elem].split('\t'); - if (value[1] === vaultId) { - vaultName = value[0]; - break; - } - } - if (vaultName == null) { - throw new vaultErrors.ErrorRemoteVaultUndefined( - `${vaultId} does not exist on connected node`, - ); - } - return vaultName; -} - -/** - * Creates a GitRequest object from the desired node connection. - * @param client GRPC connection to desired node - * @param nodeId - */ -async function constructGitHandler( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const gitRequest = new GitRequest( - ((vaultNameOrId: string) => requestInfo(vaultNameOrId, client)).bind(this), - ((vaultNameOrId: string, body: Buffer) => - requestPack(vaultNameOrId, body, client)).bind(this), - (() => requestVaultNames(client, nodeId)).bind(this), - ); - return gitRequest; -} - -/** - * Requests remote info from the connected node for the named vault. - * @param vaultId ID of the desired vault - * @param client A connection object to the node - * @returns Async Generator of Uint8Arrays representing the Info Response - */ -async function* requestInfo( - vaultNameOrId: string, - client: GRPCClientAgent, -): AsyncGenerator { - const request = new vaultsPB.Vault(); - request.setNameOrId(vaultNameOrId); - const response = client.vaultsGitInfoGet(request); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } -} - -/** - * Requests a pack from the connected node for the named vault - * @param vaultId ID of vault - * @param body contains the pack request - * @param client A connection object to the node - * @returns AsyncGenerator of Uint8Arrays representing the Pack Response - */ -async function* requestPack( - vaultNameOrId: string, - body: Buffer, - client: GRPCClientAgent, -): AsyncGenerator { - const responseBuffers: Array = []; - - const meta = new grpc.Metadata(); - // FIXME make it a VaultIdReadable - meta.set('vaultNameOrId', vaultNameOrId); - - const stream = client.vaultsGitPackGet(meta); - const write = promisify(stream.write).bind(stream); - - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body); - write(chunk); - stream.end(); - - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); -} - -/** - * Requests the vault names from the connected node. - * @param client A connection object to the node - * @param nodeId - */ -async function requestVaultNames( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const request = new nodesPB.Node(); - request.setNodeId(nodeId); - const vaultList = client.vaultsScan(request); - const data: string[] = []; - for await (const vault of vaultList) { - const vaultMessage = vault.getNameOrId(); - data.push(vaultMessage); - } - - return data; -} - export { isVaultId, isVaultIdPretty, @@ -258,7 +100,4 @@ export { fileExists, readdirRecursively, readdirRecursivelyEFS, - readdirRecursivelyEFS2, - constructGitHandler, - searchVaultName, }; diff --git a/tests/PolykeyAgent.test.ts b/tests/PolykeyAgent.test.ts index 3e9f65f556..eafc935fe5 100644 --- a/tests/PolykeyAgent.test.ts +++ b/tests/PolykeyAgent.test.ts @@ -39,6 +39,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); expect(pk).toBeInstanceOf(PolykeyAgent); }, @@ -54,6 +55,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); await pk.start({}); const nodePathContents = await fs.promises.readdir(nodePath); @@ -74,6 +76,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); await pk.start({}); await pk.stop(); @@ -93,6 +96,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); const providers = pk.identities.getProviders(); // Exists @@ -122,6 +126,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); }).rejects.toThrow(ErrorStateVersionMismatch); }, @@ -141,6 +146,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); await pk.start({}); await pk.stop(); @@ -160,6 +166,7 @@ describe('Polykey', () => { logger, cores: 1, workerManager: null, + fresh: true, }); await pk.start({}); expect(await checkAgentRunning(nodePath)).toBeTruthy(); diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index 615d5fe725..cac4ac9bd0 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -1,7 +1,6 @@ -import type { NodeAddress, NodeInfo } from '@/nodes/types'; +import type { NodeAddress } from '@/nodes/types'; import type { ClaimIdString, ClaimIntermediary } from '@/claims/types'; import type { Host, Port, TLSConfig } from '@/network/types'; -import type { VaultName } from '@/vaults/types'; import fs from 'fs'; import os from 'os'; @@ -18,7 +17,6 @@ import { ACL } from '@/acl'; import { GestaltGraph } from '@/gestalts'; import { GRPCClientAgent } from '@/agent'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; import { ForwardProxy, ReverseProxy } from '@/network'; import { DB } from '@matrixai/db'; @@ -35,10 +33,6 @@ describe('GRPC agent', () => { const logger = new Logger('AgentServerTest', LogLevel.WARN, [ new StreamHandler(), ]); - const node1: NodeInfo = { - id: makeNodeId('v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug'), - chain: {}, - }; let client: GRPCClientAgent; let server: grpc.Server; @@ -133,12 +127,13 @@ describe('GRPC agent', () => { keyManager: keyManager, vaultsPath: vaultsPath, nodeManager: nodeManager, - vaultsKey: keyManager.vaultKey, db: db, acl: acl, gestaltGraph: gestaltGraph, + notificationsManager: notificationsManager, fs: fs, logger: logger, + fresh: true, }); await db.start(); @@ -154,6 +149,8 @@ describe('GRPC agent', () => { nodeManager, sigchain, notificationsManager, + acl, + gestaltGraph, }); client = await testUtils.openTestAgentClient(port); }, global.polykeyStartupTimeout); @@ -183,46 +180,6 @@ describe('GRPC agent', () => { const response = await client.echo(echoMessage); expect(response.getChallenge()).toBe('yes'); }); - test.skip('can check permissions', async () => { - // FIXME: permissions not implemented on vaults. - const vault = await vaultManager.createVault('TestAgentVault' as VaultName); - await gestaltGraph.setNode(node1); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - // await vaultManager.unsetVaultPermissions('12345' as NodeId, vault.vaultId); - const vaultPermMessage = new vaultsPB.NodePermission(); - vaultPermMessage.setNodeId(node1.id); - // VaultPermMessage.setVaultId(vault.vaultId); - const response = await client.vaultsPermisssionsCheck(vaultPermMessage); - expect(response.getPermission()).toBeFalsy(); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - const response2 = await client.vaultsPermisssionsCheck(vaultPermMessage); - expect(response2.getPermission()).toBeTruthy(); - // Await vaultManager.deleteVault(vault.vaultId); - }); - test.skip('can scan vaults', async () => { - //FIXME, permissions not implemented on vaults - const vault = await vaultManager.createVault('TestAgentVault' as VaultName); - await gestaltGraph.setNode(node1); - const nodeIdMessage = new nodesPB.Node(); - nodeIdMessage.setNodeId(node1.id); - const response = client.vaultsScan(nodeIdMessage); - const data: string[] = []; - for await (const resp of response) { - const chunk = resp.getNameOrId(); - data.push(Buffer.from(chunk).toString()); - } - expect(data).toStrictEqual([]); - fail(); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - const response2 = client.vaultsScan(nodeIdMessage); - const data2: string[] = []; - for await (const resp of response2) { - const chunk = resp.getNameOrId(); - // Data2.push(Buffer.from(chunk).toString()); - } - // Expect(data2).toStrictEqual([`${vault.vaultName}\t${vault.vaultId}`]); - // await vaultManager.deleteVault(vault.vaultId); - }); test('Can connect over insecure connection.', async () => { const echoMessage = new utilsPB.EchoMessage(); echoMessage.setChallenge('yes'); diff --git a/tests/agent/utils.test.ts b/tests/agent/utils.test.ts index 4404014775..fe713b2656 100644 --- a/tests/agent/utils.test.ts +++ b/tests/agent/utils.test.ts @@ -41,6 +41,7 @@ describe('agent utils', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await agent.start({}); await expect( @@ -70,7 +71,11 @@ describe('agent utils', () => { await expect( agentUtils.checkAgentRunning(nodePath), ).resolves.toBeFalsy(); - const pid = await agentUtils.spawnBackgroundAgent(nodePath, password); + const pid = await agentUtils.spawnBackgroundAgent( + nodePath, + password, + true, + ); expect(typeof pid).toBe('number'); //Returns a number. expect(pid > 0).toBeTruthy(); // Non-zero await poll(global.polykeyStartupTimeout * 1.5, async () => { @@ -100,6 +105,7 @@ describe('agent utils', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await agent.start({}); await expect( diff --git a/tests/agent/utils.ts b/tests/agent/utils.ts index 2c152f0617..200897af71 100644 --- a/tests/agent/utils.ts +++ b/tests/agent/utils.ts @@ -15,6 +15,8 @@ import { NodeManager } from '@/nodes'; import { promisify } from '@/utils'; import { Sigchain } from '@/sigchain'; import { NotificationsManager } from '@/notifications'; +import { ACL } from '@/acl'; +import { GestaltGraph } from '@/gestalts'; async function openTestAgentServer({ keyManager, @@ -22,12 +24,16 @@ async function openTestAgentServer({ nodeManager, sigchain, notificationsManager, + acl, + gestaltGraph, }: { keyManager: KeyManager; vaultManager: VaultManager; nodeManager: NodeManager; sigchain: Sigchain; notificationsManager: NotificationsManager; + acl: ACL; + gestaltGraph: GestaltGraph; }) { const agentService: IAgentServiceServer = createAgentService({ keyManager, @@ -35,6 +41,8 @@ async function openTestAgentServer({ nodeManager, sigchain: sigchain, notificationsManager: notificationsManager, + acl: acl, + gestaltGraph: gestaltGraph, }); const server = new grpc.Server(); diff --git a/tests/bin/agent.test.ts b/tests/bin/agent.test.ts index 6549f99ab0..75ca2d1a1b 100644 --- a/tests/bin/agent.test.ts +++ b/tests/bin/agent.test.ts @@ -54,6 +54,7 @@ describe('CLI agent', () => { logger, cores: 1, workerManager: null, + fresh: true, }); await activeNode.start({}); }); @@ -77,6 +78,7 @@ describe('CLI agent', () => { foregroundNodePath, '--password-file', passwordFile, + '-fr', ], '.', ); @@ -127,7 +129,6 @@ describe('CLI agent', () => { }); }); describe('Starting the agent in the background', () => { - // FIXME, failing on it's own. test( 'should start the agent and clean up the lockfile when a kill signal is received', async () => { @@ -139,6 +140,7 @@ describe('CLI agent', () => { backgroundNodePath, '--password-file', passwordFile, + '-fr', ]; // We can await this since it should finish after spawning the background agent. @@ -277,6 +279,7 @@ describe('CLI agent', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await activeAgent.start({}); }, global.polykeyStartupTimeout); @@ -431,6 +434,7 @@ describe('CLI agent', () => { 'node ping nodeId', 'node find nodeId', 'node add nodeId 0.0.0.0 55555', + 'node scan nodeId', ]; const notificationCommands = [ 'notifications clear', @@ -452,13 +456,10 @@ describe('CLI agent', () => { 'vaults create -vn vaultName', 'vaults rename -vn vaultName -nn vaultName', 'vaults delete -vn vaultName', - 'vaults stat -vn vaultName', 'vaults share vaultName nodeId', 'vaults unshare vaultName nodeId', - 'vaults perms vaultName', 'vaults clone -ni nodeId -vi vaultId', - 'vaults pull -vn vaultName -ni nodeId', - 'vaults scan -ni nodeId', + 'vaults pull -pv vaultName -ni nodeId -vi vaultId', 'vaults version vaultName nodeId', 'vaults log vaultName', ]; diff --git a/tests/bin/echo.test.ts b/tests/bin/echo.test.ts index de74061de3..d50f9b8738 100644 --- a/tests/bin/echo.test.ts +++ b/tests/bin/echo.test.ts @@ -43,6 +43,7 @@ describe('CLI Echo', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); diff --git a/tests/bin/identities.test.ts b/tests/bin/identities.test.ts index 3cb9fb7d8e..37aed1b17c 100644 --- a/tests/bin/identities.test.ts +++ b/tests/bin/identities.test.ts @@ -121,6 +121,7 @@ describe('CLI Identities', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); keynode.id = polykeyAgent.nodes.getNodeId(); diff --git a/tests/bin/keys.test.ts b/tests/bin/keys.test.ts index 64d571591e..3c223af1cd 100644 --- a/tests/bin/keys.test.ts +++ b/tests/bin/keys.test.ts @@ -51,6 +51,7 @@ describe('CLI keys', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); newPolykeyAgent1 = await PolykeyAgent.createPolykey({ password, @@ -58,6 +59,7 @@ describe('CLI keys', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); newPolykeyAgent2 = await PolykeyAgent.createPolykey({ password, @@ -65,6 +67,7 @@ describe('CLI keys', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); await newPolykeyAgent1.start({}); diff --git a/tests/bin/nodes.test.ts b/tests/bin/nodes.test.ts index 75bc690681..5636271fa9 100644 --- a/tests/bin/nodes.test.ts +++ b/tests/bin/nodes.test.ts @@ -1,5 +1,6 @@ import type { NodeId, NodeAddress } from '@/nodes/types'; import type { Host, Port } from '@/network/types'; +import type { VaultName } from '@/vaults/types'; import os from 'os'; import path from 'path'; @@ -9,6 +10,7 @@ import { PolykeyAgent } from '@'; import * as testUtils from './utils'; import * as testKeynodeUtils from '../utils'; import { makeNodeId } from '@/nodes/utils'; +import { makeVaultIdPretty } from '@/vaults/utils'; /** * This test file has been optimised to use only one instance of PolykeyAgent where posible. @@ -72,6 +74,7 @@ describe('CLI Nodes', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); keynodeId = polykeyAgent.nodes.getNodeId(); @@ -246,6 +249,41 @@ describe('CLI Nodes', () => { expect(result2.stdout).toContain('Node is Active'); }); }); + describe('commandScanNode', () => { + test('should return the vaults names and ids of the remote vault', async () => { + await remoteOnline.gestalts.setNode({ + id: polykeyAgent.nodes.getNodeId(), + chain: {}, + }); + + await remoteOnline.gestalts.setGestaltActionByNode( + polykeyAgent.nodes.getNodeId(), + 'scan', + ); + + const vault1 = await remoteOnline.vaults.createVault( + 'Vault1' as VaultName, + ); + const vault2 = await remoteOnline.vaults.createVault( + 'Vault2' as VaultName, + ); + const vault3 = await remoteOnline.vaults.createVault( + 'Vault3' as VaultName, + ); + const commands = genCommands(['scan', remoteOnlineNodeId]); + const result = await testUtils.pkWithStdio(commands); + expect(result.code).toBe(0); + expect(result.stdout).toContain( + `Vault1\t\t${makeVaultIdPretty(vault1.vaultId)}`, + ); + expect(result.stdout).toContain( + `Vault2\t\t${makeVaultIdPretty(vault2.vaultId)}`, + ); + expect(result.stdout).toContain( + `Vault3\t\t${makeVaultIdPretty(vault3.vaultId)}`, + ); + }); + }); describe('commandFindNode', () => { test('Should find an online node', async () => { const commands = genCommands(['find', remoteOnlineNodeId]); diff --git a/tests/bin/notifications.test.ts b/tests/bin/notifications.test.ts index ff57d8aed2..dd83adc43f 100644 --- a/tests/bin/notifications.test.ts +++ b/tests/bin/notifications.test.ts @@ -65,6 +65,7 @@ describe('CLI Notifications', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); receiverPolykeyAgent = await PolykeyAgent.createPolykey({ password, @@ -72,6 +73,7 @@ describe('CLI Notifications', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await senderPolykeyAgent.start({}); await receiverPolykeyAgent.start({}); diff --git a/tests/bin/secret.test.ts b/tests/bin/secret.test.ts index 02c0e9c680..0f6871f10f 100644 --- a/tests/bin/secret.test.ts +++ b/tests/bin/secret.test.ts @@ -40,6 +40,7 @@ describe('CLI secrets', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); }, global.polykeyStartupTimeout); diff --git a/tests/bin/sessions.test.ts b/tests/bin/sessions.test.ts index 9a8dd532aa..64363c80e5 100644 --- a/tests/bin/sessions.test.ts +++ b/tests/bin/sessions.test.ts @@ -57,6 +57,7 @@ describe('Session Token Refreshing', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); diff --git a/tests/bin/vaults.test.ts b/tests/bin/vaults.test.ts index ac6d3e9c2b..c115f8ee67 100644 --- a/tests/bin/vaults.test.ts +++ b/tests/bin/vaults.test.ts @@ -75,6 +75,7 @@ describe('CLI vaults', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); await polykeyAgent.gestalts.setNode(node1); @@ -213,84 +214,7 @@ describe('CLI vaults', () => { expect(namesList).not.toContain(vaultName); }); }); - describe.skip('commandVaultStats', () => { - test('should return the stats of a vault', async () => { - command = ['vaults', 'stat', '-np', dataDir, '-vn', vaultName]; - await polykeyAgent.vaults.createVault(vaultName); - const id = polykeyAgent.vaults.getVaultId(vaultName); - expect(id).toBeTruthy(); - - const result = await utils.pkWithStdio([...command]); - expect(result.code).toBe(0); - }); - }); - describe.skip('commandSetPermsVault', () => { - test('should share a vault', async () => { - command = ['vaults', 'share', '-np', dataDir, vaultName, node1.id]; - await polykeyAgent.vaults.createVault(vaultName); - const id = await polykeyAgent.vaults.getVaultId(vaultName); - expect(id).toBeTruthy(); - - const result = await utils.pkWithStdio([...command]); - expect(result.code).toBe(0); - fail(); - // FIXME methods not implemented. - // const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( - // id!, - // undefined, - // ); - // const sharedNodesString = JSON.stringify(sharedNodes); - // expect(sharedNodesString).toContain(node1.id); - // expect(sharedNodesString).not.toContain(node2.id); - }); - }); - describe.skip('commandUnsetPermsVault', () => { - test('should un-share a vault', async () => { - command = ['vaults', 'unshare', '-np', dataDir, vaultName, node1.id]; - //Creating vault. - await polykeyAgent.vaults.createVault(vaultName); - const id = await polykeyAgent.vaults.getVaultId(vaultName); - expect(id).toBeTruthy(); - - //Init sharing. - fail(); - // FIXME methods not implemented. - // await polykeyAgent.vaults.setVaultPermissions(node1.id, id!); - // await polykeyAgent.vaults.setVaultPermissions(node2.id, id!); - // await polykeyAgent.vaults.setVaultPermissions(node3.id, id!); - - const result = await utils.pkWithStdio([...command]); - expect(result.code).toBe(0); - // Const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( - // id!, - // undefined, - // ); - // expect(sharedNodes[node1.id]['pull']).toBeUndefined(); - // expect(sharedNodes[node2.id]['pull']).toBeNull(); - // expect(sharedNodes[node3.id]['pull']).toBeNull(); - }); - }); - describe.skip('commandVaultPermissions', () => { - test('should get permissions of a vault', async () => { - command = ['vaults', 'perms', '-np', dataDir, vaultName]; - - await polykeyAgent.vaults.createVault(vaultName); - const id = await polykeyAgent.vaults.getVaultId(vaultName); - expect(id).toBeTruthy(); - - fail(); - // FIXME methods not implemented. - // await polykeyAgent.vaults.setVaultPermissions(node1.id, vault.vaultId); - // await polykeyAgent.vaults.setVaultPermissions(node2.id, vault.vaultId); - // await polykeyAgent.vaults.setVaultPermissions(node3.id, vault.vaultId); - - // await polykeyAgent.vaults.unsetVaultPermissions(node2.id, vault.vaultId); - - const result = await utils.pkWithStdio([...command]); - expect(result.code).toBe(0); - }); - }); - describe.skip('commandPullVault', () => { + describe('commandPullVault', () => { test( 'should clone a vault', async () => { @@ -303,9 +227,13 @@ describe('CLI vaults', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await targetPolykeyAgent.start({}); const vault = await targetPolykeyAgent.vaults.createVault(vaultName); + await vault.commit(async (efs) => { + await efs.writeFile('secret 1', 'secret'); + }); const id = await targetPolykeyAgent.vaults.getVaultId(vaultName); expect(id).toBeTruthy(); @@ -313,12 +241,6 @@ describe('CLI vaults', () => { id: polykeyAgent.nodes.getNodeId(), chain: {}, }); - fail(); - // FIXME methods not implemented. - // await targetPolykeyAgent.vaults.setVaultPermissions( - // polykeyAgent.nodes.getNodeId(), - // vault.vaultId, - // ); const targetNodeId = targetPolykeyAgent.nodes.getNodeId(); const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); @@ -327,15 +249,21 @@ describe('CLI vaults', () => { ip: targetHost, port: targetPort, }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodes.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodes.openConnection( - clientEgressHost, - clientEgressPort, + + await targetPolykeyAgent.nodes.setNode(polykeyAgent.nodes.getNodeId(), { + ip: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }); + await polykeyAgent.acl.setNodePerm(targetNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + await targetPolykeyAgent.vaults.shareVault( + vault.vaultId, + polykeyAgent.nodes.getNodeId(), ); command = [ @@ -349,16 +277,41 @@ describe('CLI vaults', () => { makeVaultIdPretty(vault.vaultId), ]; - // Vault does not exist on the source PolykeyAgent so the pull command throws an error which - // caught, the error is checked and if it is ErrorVaultUndefined, then the Agent attempts a - // clone instead - const result = await utils.pkWithStdio([...command]); + let result = await utils.pkWithStdio([...command]); expect(result.code).toBe(0); - // Const list = (await polykeyAgent.vaults.listVaults()).map( - // (vault) => vault, - // ); - // expect(JSON.stringify(list)).toContain(vaultName); + const clonedVaultId = await polykeyAgent.vaults.getVaultId(vaultName); + const clonedVault = await polykeyAgent.vaults.openVault(clonedVaultId!); + let file = await clonedVault.access(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret'); + + await polykeyAgent.vaults.destroyVault(clonedVaultId!); + + command = [ + 'vaults', + 'clone', + '-np', + dataDir, + '-ni', + targetNodeId as string, + '-vi', + vaultName, + ]; + result = await utils.pkWithStdio([...command]); + expect(result.code).toBe(0); + + const secondClonedVaultId = await polykeyAgent.vaults.getVaultId( + vaultName, + ); + const secondClonedVault = await polykeyAgent.vaults.openVault( + secondClonedVaultId!, + ); + file = await secondClonedVault.access(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret'); await targetPolykeyAgent.stop(); await targetPolykeyAgent.destroy(); @@ -381,10 +334,13 @@ describe('CLI vaults', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await targetPolykeyAgent.start({}); - await targetPolykeyAgent.vaults.createVault(vaultName); - + const vault = await targetPolykeyAgent.vaults.createVault(vaultName); + await vault.commit(async (efs) => { + await efs.writeFile('secret 1', 'secret'); + }); const id = await targetPolykeyAgent.vaults.getVaultId(vaultName); expect(id).toBeTruthy(); @@ -392,12 +348,6 @@ describe('CLI vaults', () => { id: polykeyAgent.nodes.getNodeId(), chain: {}, }); - fail(); - // FIXME methods not implemented. - // await targetPolykeyAgent.vaults.setVaultPermissions( - // polykeyAgent.nodes.getNodeId(), - // vault.vaultId, - // ); const targetNodeId = targetPolykeyAgent.nodes.getNodeId(); const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); @@ -406,52 +356,65 @@ describe('CLI vaults', () => { ip: targetHost, port: targetPort, }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodes.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodes.openConnection( - clientEgressHost, - clientEgressPort, + + await targetPolykeyAgent.nodes.setNode(polykeyAgent.nodes.getNodeId(), { + ip: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }); + await polykeyAgent.acl.setNodePerm(targetNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + await targetPolykeyAgent.vaults.shareVault( + vault.vaultId, + polykeyAgent.nodes.getNodeId(), + ); + + const clonedVault = await polykeyAgent.vaults.cloneVault( + targetNodeId, + vault.vaultId, ); - // Await polykeyAgent.vaults.cloneVault(vault.vaultId, targetNodeId); - - // await vault.addSecret('MySecret', 'This secret will be pulled'); - - // const list = (await polykeyAgent.vaults.listVaults()).map( - // (vault) => vault, - // ); - // const filteredList = list.filter((value) => { - // return value.name === vaultName; - // }); - // expect(filteredList.length).toBe(1); - // const clonedVault = await polykeyAgent.vaults.getVault( - // filteredList[0].id, - // ); - // await expect(clonedVault.listSecrets()).resolves.toStrictEqual([]); + + await vault.commit(async (efs) => { + await efs.writeFile('secret 2', 'secret the second'); + }); + + command = ['vaults', 'pull', '-np', dataDir, '-vi', vaultName]; + let result = await utils.pkWithStdio([...command]); + expect(result.code).toBe(0); + + let file = await clonedVault.access(async (efs) => { + return await efs.readFile('secret 2', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the second'); + + await vault.commit(async (efs) => { + await efs.writeFile('secret 3', 'secret the third'); + }); command = [ 'vaults', 'pull', '-np', dataDir, - '-vn', + '-vi', + makeVaultIdPretty(clonedVault.vaultId), + '-pv', vaultName, '-ni', targetNodeId, ]; - const result = await utils.pkWithStdio([...command]); + result = await utils.pkWithStdio([...command]); expect(result.code).toBe(0); - // Await expect(clonedVault.listSecrets()).resolves.toStrictEqual([ - // 'MySecret', - // ]); - // await expect(clonedVault.getSecret('MySecret')).resolves.toStrictEqual( - // 'This secret will be pulled', - // ); + file = await clonedVault.access(async (efs) => { + return await efs.readFile('secret 2', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the second'); await targetPolykeyAgent.stop(); await targetPolykeyAgent.destroy(); @@ -459,73 +422,98 @@ describe('CLI vaults', () => { }, global.defaultTimeout * 2, ); - }); - describe('commandScanVault', () => { - test('should scan a node for vaults', async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const targetPolykeyAgent = await PolykeyAgent.createPolykey({ - password, - nodePath: dataDir2, - logger: logger, - cores: 1, - workerManager: null, - }); - await targetPolykeyAgent.start({}); - - const targetNodeId = targetPolykeyAgent.nodes.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodes.setNode(targetNodeId, { - ip: targetHost, - port: targetPort, - }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodes.getConnectionToNode(targetNodeId); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodes.openConnection( - clientEgressHost, - clientEgressPort, - ); - - await targetPolykeyAgent.vaults.createVault( - `${vaultName}-Vault1` as VaultName, - ); - await targetPolykeyAgent.vaults.createVault( - `${vaultName}-Vault2` as VaultName, - ); - await targetPolykeyAgent.vaults.createVault( - `${vaultName}-Vault3` as VaultName, - ); - - const targetVaults = ( - await targetPolykeyAgent.vaults.listVaults() - ).keys(); - const namesList: string[] = []; - for await (const name of targetVaults) { - namesList.push(name); - } - expect(namesList.length).toBe(3); + test( + 'share and unshare vaults', + async () => { + const dataDir2 = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + const targetPolykeyAgent = await PolykeyAgent.createPolykey({ + password, + nodePath: dataDir2, + logger: logger, + cores: 1, + workerManager: null, + fresh: true, + }); + await targetPolykeyAgent.start({}); + const vault = await polykeyAgent.vaults.createVault(vaultName); + await vault.commit(async (efs) => { + await efs.writeFile('secret 1', 'secret'); + }); - command = [ - 'vaults', - 'scan', - '-np', - dataDir, - '-ni', - targetNodeId as string, - ]; - const result = await utils.pkWithStdio([...command]); - expect(result.code).toBe(0); + await polykeyAgent.gestalts.setNode({ + id: targetPolykeyAgent.nodes.getNodeId(), + chain: {}, + }); - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rmdir(dataDir2, { recursive: true }); - }); + const targetNodeId = targetPolykeyAgent.nodes.getNodeId(); + const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); + const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); + await polykeyAgent.nodes.setNode(targetNodeId, { + ip: targetHost, + port: targetPort, + }); + + await targetPolykeyAgent.nodes.setNode(polykeyAgent.nodes.getNodeId(), { + ip: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }); + await targetPolykeyAgent.acl.setNodePerm( + polykeyAgent.nodes.getNodeId(), + { + gestalt: { + notify: null, + }, + vaults: {}, + }, + ); + + await expect(() => + targetPolykeyAgent.vaults.cloneVault( + polykeyAgent.nodes.getNodeId(), + vault.vaultId, + ), + ).rejects.toThrow(); + + command = ['vaults', 'share', '-np', dataDir, vaultName, targetNodeId]; + let result = await utils.pkWithStdio([...command]); + expect(result.code).toBe(0); + + const clonedVault = await targetPolykeyAgent.vaults.cloneVault( + polykeyAgent.nodes.getNodeId(), + vault.vaultId, + ); + const file = await clonedVault.access(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret'); + + command = [ + 'vaults', + 'unshare', + '-np', + dataDir, + makeVaultIdPretty(vault.vaultId), + targetNodeId, + ]; + + result = await utils.pkWithStdio([...command]); + expect(result.code).toBe(0); + + await expect(() => + targetPolykeyAgent.vaults.cloneVault( + polykeyAgent.nodes.getNodeId(), + vault.vaultId, + ), + ).rejects.toThrow(); + + await targetPolykeyAgent.stop(); + await targetPolykeyAgent.destroy(); + await fs.promises.rm(dataDir2, { recursive: true }); + }, + global.defaultTimeout * 2, + ); }); describe('commandVaultVersion', () => { test('should switch the version of a vault', async () => { diff --git a/tests/bootstrap/bootstrap.test.ts b/tests/bootstrap/bootstrap.test.ts index cec1fd6147..3259fa8c82 100644 --- a/tests/bootstrap/bootstrap.test.ts +++ b/tests/bootstrap/bootstrap.test.ts @@ -70,6 +70,7 @@ describe('Bootstrap', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await pk.start({}); await pk.stop(); diff --git a/tests/client/GRPCClientClient.test.ts b/tests/client/GRPCClientClient.test.ts index bfb192f8ff..4687e4c822 100644 --- a/tests/client/GRPCClientClient.test.ts +++ b/tests/client/GRPCClientClient.test.ts @@ -41,6 +41,7 @@ describe('GRPCClientClient', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); diff --git a/tests/client/PolykeyClient.test.ts b/tests/client/PolykeyClient.test.ts index 18817662ed..18cbb05d19 100644 --- a/tests/client/PolykeyClient.test.ts +++ b/tests/client/PolykeyClient.test.ts @@ -37,6 +37,7 @@ describe('GRPCClientClient', () => { logger: logger, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); @@ -105,6 +106,7 @@ describe('TLS tests', () => { clientGrpcPort: 55555, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); diff --git a/tests/client/clientService.test.ts b/tests/client/clientService.test.ts index 62867ce1f4..ea70540efa 100644 --- a/tests/client/clientService.test.ts +++ b/tests/client/clientService.test.ts @@ -152,6 +152,7 @@ describe('Client service', () => { fwdProxy, cores: 1, workerManager: null, + fresh: true, }); await polykeyAgent.start({}); @@ -299,6 +300,7 @@ describe('Client service', () => { logger, cores: 1, workerManager: null, + fresh: true, }); await agent.start({}); @@ -422,37 +424,6 @@ describe('Client service', () => { const name = (await vaultManager.listVaults()).entries().next().value[0]; expect(name).toBe(vaultRename); }); - //FIXME, fix this when vault secrets stat is re-implemented - test.skip('should get stats for vaults', async () => { - fail('not implemented'); - const statsVault = grpcUtils.promisifyUnaryCall( - client, - client.vaultsSecretsStat, - ); - - const vault = await vaultManager.createVault('MyFirstVault' as VaultName); - const vault2 = await vaultManager.createVault( - 'MySecondVault' as VaultName, - ); - - const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(makeVaultIdPretty(vault.vaultId)); - - const res = await statsVault(vaultMessage, callCredentials); - const stats1 = res.getStats(); - - vaultMessage.setNameOrId(makeVaultIdPretty(vault.vaultId)); - const res2 = await statsVault(vaultMessage, callCredentials); - const stats2 = res2.getStats(); - - // FIXME - // expect(stats1).toBe( - // JSON.stringify(await vaultManager.vaultStats(vault.vaultId)), - // ); - // expect(stats2).toBe( - // JSON.stringify(await vaultManager.vaultStats(vault2.vaultId)), - // ); - }); test('should make a directory in a vault', async () => { const vaultName = 'MySecondVault' as VaultName; @@ -550,7 +521,7 @@ describe('Client service', () => { const secrets = await vault.access(async (efs) => { return await efs.readdir('.'); }); - expect(secrets).toEqual(secretList2); // FIXME, this will likely fail. + expect(secrets).toEqual(secretList2); }); test('should edit secrets in a vault', async () => { const vaultName = 'MyFirstVault' as VaultName; @@ -705,7 +676,6 @@ describe('Client service', () => { return await efs.readdir('.'); }); expect(secrets).toEqual(['Secret1']); - // Expect((await vault.listSecrets()).sort()).toStrictEqual(['Secret1']); expect(response.getSuccess()).toBeTruthy(); const secret = await vault.access(async (efs) => { return (await efs.readFile('Secret1')).toString(); @@ -750,109 +720,6 @@ describe('Client service', () => { // Remove temp directory await fs.promises.rmdir(tmpDir, { recursive: true }); }); - // TODO: Permissions not supported yet. - test.skip('should add permissions to a vault', async () => { - fail('Functionality not fully implemented'); - const vaultName = 'vault1' as VaultName; - const vaultsSetPerms = - grpcUtils.promisifyUnaryCall( - client, - client.vaultsPermissionsSet, - ); - - // Creating a vault - await vaultManager.createVault(vaultName); - - // Creating a gestalts state - await createGestaltState(); - - const setVaultPermMessage = new vaultsPB.PermSet(); - const nodeMessage = new nodesPB.Node(); - const vaultMessage = new vaultsPB.Vault(); - nodeMessage.setNodeId(node2.id); - vaultMessage.setNameOrId(vaultName); - setVaultPermMessage.setVault(vaultMessage); - setVaultPermMessage.setNode(nodeMessage); - await vaultsSetPerms(setVaultPermMessage, callCredentials); - - // FIXME: this is not implemented yet. - const result = 'Not implemented'; //Await vaultManager.getVaultPermissions(vaultId); - const stringResult = JSON.stringify(result); - expect(stringResult).toContain(node2.id); - expect(stringResult).toContain('pull'); - }); - test.skip('should remove permissions to a vault', async () => { - const vaultName = 'vault1' as VaultName; - const vaultsUnsetPerms = - grpcUtils.promisifyUnaryCall( - client, - client.vaultsPermissionsUnset, - ); - - // Creating a vault. - const vault = await vaultManager.createVault(vaultName); - const vaults = await vaultManager.listVaults(); - const vaultId = vault.vaultId; - - // Creating a gestalts state - await createGestaltState(); - fail('Functionality not fully implemented'); - // FIXME: not implemented yet - // await vaultManager.setVaultPermissions(node2.id, vaultId); - - const unsetVaultPermMessage = new vaultsPB.PermUnset(); - const nodeMessage = new nodesPB.Node(); - const vaultMessage = new vaultsPB.Vault(); - nodeMessage.setNodeId(node2.id); - vaultMessage.setNameOrId(vaults[0].name); - unsetVaultPermMessage.setVault(vaultMessage); - unsetVaultPermMessage.setNode(nodeMessage); - await vaultsUnsetPerms(unsetVaultPermMessage, callCredentials); - - // FIXME: not implemented yet - // const result = await vaultManager.getVaultPermissions(vaultId); - // const stringResult = JSON.stringify(result); - // expect(stringResult).toContain(node2.id); - // expect(stringResult.includes('pull')).toBeFalsy(); - }); - test.skip('should get permissions to a vault', async () => { - const vaultName = 'vault1' as VaultName; - const vaultsPermissions = - grpcUtils.promisifyReadableStreamCall( - client, - client.vaultsPermissions, - ); - - // Creating a vault - const vault = await vaultManager.createVault(vaultName); - const vaults = await vaultManager.listVaults(); - const vaultId = vault.vaultId; - - // Creating a gestalts state - await createGestaltState(); - - fail('Functionality not fully implemented'); - // FIXME: not implemented yet - // await vaultManager.setVaultPermissions(node2.id, vaultId); - - const getVaultPermMessage = new vaultsPB.PermGet(); - const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - vaultMessage.setNameOrId(vaults[0].name); - nodeMessage.setNodeId(node2.id); - getVaultPermMessage.setVault(vaultMessage); - getVaultPermMessage.setNode(nodeMessage); - const resGen = vaultsPermissions(getVaultPermMessage, callCredentials); - - const results: Array = []; - // FIXME - // for await (const res of resGen) { - // results.push(res.toObject()); - // } - // const resultsString = JSON.stringify(results); - // expect(resultsString).toContain(node2.id); - // expect(resultsString).toContain('pull'); - }); describe('vault versions', () => { const vaultName = 'Vault1' as VaultName; const secretName = 'Secret-1'; diff --git a/tests/discovery/Discovery.test.ts b/tests/discovery/Discovery.test.ts index 35a3a688de..6418597676 100644 --- a/tests/discovery/Discovery.test.ts +++ b/tests/discovery/Discovery.test.ts @@ -40,6 +40,7 @@ describe('Discovery', () => { logger, cores: 1, workerManager: null, + fresh: true, }); discovery = polykeyAgent.discovery; }); diff --git a/tests/index.test.ts b/tests/index.test.ts index 125272564f..897a5493d8 100644 --- a/tests/index.test.ts +++ b/tests/index.test.ts @@ -20,6 +20,7 @@ describe('index', () => { logger, cores: 1, workerManager: null, + fresh: true, }); expect(pk).toBeInstanceOf(PolykeyAgent); await pk.stop(); diff --git a/tests/keys/KeyManager.test.ts b/tests/keys/KeyManager.test.ts index 11e421a928..98b774889d 100644 --- a/tests/keys/KeyManager.test.ts +++ b/tests/keys/KeyManager.test.ts @@ -437,86 +437,6 @@ describe('KeyManager', () => { await keyManager.destroy(); }); }); - describe('vaultKey', () => { - test('Creates a key when started.', async () => { - const keysPath = `${dataDir}/keys`; - const keyManager = await KeyManager.createKeyManager({ - password, - keysPath, - logger, - }); - expect(await fs.promises.readdir(keysPath)).toContain('vault.key'); - expect(keyManager.vaultKey.toString()).toBeTruthy(); - }); - test('Throws an exception when it fails to parse the key.', async () => { - const keysPath = `${dataDir}/keys`; - const keyManager = await KeyManager.createKeyManager({ - password: 'Password', - keysPath, - logger, - }); - expect(await fs.promises.readdir(keysPath)).toContain('vault.key'); - expect(keyManager.vaultKey.toString()).toBeTruthy(); - await keyManager.destroy(); - - // Use a different key. - await expect( - KeyManager.createKeyManager({ - password: 'OtherPassword', - keysPath, - logger, - }), - ).rejects.toThrow(); - }); - test('key remains unchanged when resetting keys.', async () => { - const keysPath = `${dataDir}/keys`; - let keyManager = await KeyManager.createKeyManager({ - password: 'Password', - keysPath, - logger, - }); - expect(await fs.promises.readdir(keysPath)).toContain('vault.key'); - expect(keyManager.vaultKey.toString()).toBeTruthy(); - const vaultKey = keyManager.vaultKey; - - await keyManager.resetRootKeyPair('NewPassword'); - expect(keyManager.vaultKey).toEqual(vaultKey); - await keyManager.destroy(); - - // Use a different key. - keyManager = await KeyManager.createKeyManager({ - password: 'NewPassword', - keysPath, - logger, - }); - expect(keyManager.vaultKey).toEqual(vaultKey); - await keyManager.destroy(); - }); - test('key remains unchanged when renewing keys.', async () => { - const keysPath = `${dataDir}/keys`; - let keyManager = await KeyManager.createKeyManager({ - password: 'Password', - keysPath, - logger, - }); - expect(await fs.promises.readdir(keysPath)).toContain('vault.key'); - expect(keyManager.vaultKey.toString()).toBeTruthy(); - const vaultKey = keyManager.vaultKey; - - await keyManager.renewRootKeyPair('NewPassword'); - expect(keyManager.vaultKey).toEqual(vaultKey); - await keyManager.destroy(); - - // Use a different key. - keyManager = await KeyManager.createKeyManager({ - password: 'NewPassword', - keysPath, - logger, - }); - expect(keyManager.vaultKey).toEqual(vaultKey); - await keyManager.destroy(); - }); - }); test('KeyManager generates a valid NodeId', async () => { const keysPath = `${dataDir}/keys`; const keyManager = await KeyManager.createKeyManager({ diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 04b1e5ad6d..3afe88e810 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -1,5 +1,6 @@ import type { Host, Port, TLSConfig } from '@/network/types'; import type { NodeId, NodeInfo, NodeData } from '@/nodes/types'; +import type { VaultName, VaultId } from '@/vaults/types'; import os from 'os'; import path from 'path'; @@ -163,25 +164,26 @@ describe('NodeConnection', () => { fs: fs, logger: logger, }); + serverNotificationsManager = + await NotificationsManager.createNotificationsManager({ + acl: serverACL, + db: serverDb, + nodeManager: serverNodeManager, + keyManager: serverKeyManager, + logger: logger, + }); serverVaultManager = await VaultManager.createVaultManager({ keyManager: serverKeyManager, vaultsPath: serverVaultsPath, nodeManager: serverNodeManager, - vaultsKey: serverKeyManager.vaultKey, db: serverDb, acl: serverACL, gestaltGraph: serverGestaltGraph, + notificationsManager: serverNotificationsManager, fs: fs, logger: logger, + fresh: true, }); - serverNotificationsManager = - await NotificationsManager.createNotificationsManager({ - acl: serverACL, - db: serverDb, - nodeManager: serverNodeManager, - keyManager: serverKeyManager, - logger: logger, - }); await serverDb.start(); await serverGestaltGraph.setNode(node); await serverNodeManager.start(); @@ -191,6 +193,8 @@ describe('NodeConnection', () => { nodeManager: serverNodeManager, sigchain: serverSigchain, notificationsManager: serverNotificationsManager, + acl: serverACL, + gestaltGraph: serverGestaltGraph, }); server = await GRPCServer.createGRPCServer({ logger: logger, @@ -393,18 +397,14 @@ describe('NodeConnection', () => { await conn.destroy(); }); - test.skip('scans the servers vaults', async () => { - // Const vault1 = await serverVaultManager.createVault('Vault1' as VaultName); - // const vault2 = await serverVaultManager.createVault('Vault2' as VaultName); - // const vault3 = await serverVaultManager.createVault('Vault3' as VaultName); - // const vault4 = await serverVaultManager.createVault('Vault4' as VaultName); - // const vault5 = await serverVaultManager.createVault('Vault5' as VaultName); - + test('scans the servers vaults', async () => { await serverGestaltGraph.setNode({ id: sourceNodeId, chain: {}, }); + await serverGestaltGraph.setGestaltActionByNode(sourceNodeId, 'scan'); + const conn = await NodeConnection.createNodeConnection({ targetNodeId: targetNodeId, targetHost: targetHost, @@ -415,34 +415,20 @@ describe('NodeConnection', () => { }); await conn.start({}); await revProxy.openConnection(sourceHost, sourcePort); + const vaultName1 = 'vn1' as VaultName; + const vaultName2 = 'vn2' as VaultName; + const vaultName3 = 'vn3' as VaultName; + const v1 = await serverVaultManager.createVault(vaultName1); + const v2 = await serverVaultManager.createVault(vaultName2); + const v3 = await serverVaultManager.createVault(vaultName3); - const vaultList: string[] = []; - - let vaults = await conn.scanVaults(); - - expect(vaults.sort()).toStrictEqual(vaultList.sort()); - - fail('Not Implemented'); - // FIXME - // await serverVaultManager.setVaultPermissions(sourceNodeId, vault1.vaultId); - // await serverVaultManager.setVaultPermissions(sourceNodeId, vault2.vaultId); - // await serverVaultManager.setVaultPermissions(sourceNodeId, vault3.vaultId); - - vaults = await conn.scanVaults(); - - // VaultList.push(`${vault1.vaultName}\t${vault1.vaultId}`); - // vaultList.push(`${vault2.vaultName}\t${vault2.vaultId}`); - // vaultList.push(`${vault3.vaultName}\t${vault3.vaultId}`); - - expect(vaults.sort()).toStrictEqual(vaultList.sort()); - - // Await serverVaultManager.setVaultPermissions(sourceNodeId, vault4.vaultId); - // await serverVaultManager.setVaultPermissions(sourceNodeId, vault5.vaultId); + const vaultList: Array<[VaultName, VaultId]> = []; - vaults = await conn.scanVaults(); + vaultList.push([vaultName1, v1.vaultId]); + vaultList.push([vaultName2, v2.vaultId]); + vaultList.push([vaultName3, v3.vaultId]); - // VaultList.push(`${vault4.vaultName}\t${vault4.vaultId}`); - // vaultList.push(`${vault5.vaultName}\t${vault5.vaultId}`); + const vaults = await conn.scanVaults(sourceNodeId); expect(vaults.sort()).toStrictEqual(vaultList.sort()); diff --git a/tests/nodes/NodeManager.test.ts b/tests/nodes/NodeManager.test.ts index e4d68e4cd2..b5ad22b8ae 100644 --- a/tests/nodes/NodeManager.test.ts +++ b/tests/nodes/NodeManager.test.ts @@ -3,6 +3,7 @@ import type { NodeId, NodeAddress } from '@/nodes/types'; import type { Host, Port } from '@/network/types'; import type { CertificatePem, KeyPairPem, PublicKeyPem } from '@/keys/types'; import type { ClaimIdString } from '@/claims/types'; +import type { VaultName, VaultId } from '@/vaults/types'; import os from 'os'; import path from 'path'; @@ -16,6 +17,7 @@ import { ForwardProxy, ReverseProxy } from '@/network'; import { Sigchain } from '@/sigchain'; import { sleep } from '@/utils'; import * as testUtils from '../utils'; +import * as vaultsErrors from '@/vaults/errors'; import * as nodesErrors from '@/nodes/errors'; import * as claimsUtils from '@/claims/utils'; import { makeCrypto } from '../utils'; @@ -142,7 +144,7 @@ describe('NodeManager', () => { beforeEach(async () => { await target.start({}); - targetNodeId = target.keys.getNodeId(); + targetNodeId = target.nodes.getNodeId(); targetNodeAddress = { ip: target.revProxy.getIngressHost(), port: target.revProxy.getIngressPort(), @@ -207,6 +209,85 @@ describe('NodeManager', () => { }); }); + describe('Scanning nodes', () => { + let server: PolykeyAgent; + let serverNodeId: NodeId; + let serverNodeAddress: NodeAddress; + + beforeAll(async () => { + server = await testUtils.setupRemoteKeynode({ + logger: logger, + }); + serverNodeId = server.nodes.getNodeId(); + serverNodeAddress = { + ip: server.revProxy.getIngressHost(), + port: server.revProxy.getIngressPort(), + }; + }, global.polykeyStartupTimeout * 2); + + afterAll(async () => { + await testUtils.cleanupRemoteKeynode(server); + }); + + test('scans the targets vaults', async () => { + await nodeManager.setNode(serverNodeId, serverNodeAddress); + await server.gestalts.setNode({ + id: nodeManager.getNodeId(), + chain: {}, + }); + await server.gestalts.setGestaltActionByNode( + nodeManager.getNodeId(), + 'scan', + ); + + const vaultName1 = 'vn1' as VaultName; + const vaultName2 = 'vn2' as VaultName; + const vaultName3 = 'vn3' as VaultName; + const v1 = await server.vaults.createVault(vaultName1); + const v2 = await server.vaults.createVault(vaultName2); + const v3 = await server.vaults.createVault(vaultName3); + + const vaultList: Array<[VaultName, VaultId]> = []; + + vaultList.push([vaultName1, v1.vaultId]); + vaultList.push([vaultName2, v2.vaultId]); + vaultList.push([vaultName3, v3.vaultId]); + + const vaults = await nodeManager.scanNodeVaults(serverNodeId); + expect(vaults.sort()).toStrictEqual(vaultList.sort()); + + await server.gestalts.unsetGestaltActionByNode( + nodeManager.getNodeId(), + 'scan', + ); + }); + + test('fails to scan the targets vaults without permission', async () => { + await nodeManager.setNode(serverNodeId, serverNodeAddress); + await server.gestalts.setNode({ + id: nodeManager.getNodeId(), + chain: {}, + }); + + const vaultName1 = 'vn1' as VaultName; + const vaultName2 = 'vn2' as VaultName; + const vaultName3 = 'vn3' as VaultName; + const v1 = await server.vaults.createVault(vaultName1); + const v2 = await server.vaults.createVault(vaultName2); + const v3 = await server.vaults.createVault(vaultName3); + + const vaultList: Array<[VaultName, VaultId]> = []; + + vaultList.push([vaultName1, v1.vaultId]); + vaultList.push([vaultName2, v2.vaultId]); + vaultList.push([vaultName3, v3.vaultId]); + + await expect(() => + nodeManager.scanNodeVaults(serverNodeId), + ).rejects.toThrow(vaultsErrors.ErrorVaultPermissionDenied); + }); + }); + test( 'pings node', async () => { diff --git a/tests/notifications/NotificationsManager.test.ts b/tests/notifications/NotificationsManager.test.ts index 273954a3de..01ae24971f 100644 --- a/tests/notifications/NotificationsManager.test.ts +++ b/tests/notifications/NotificationsManager.test.ts @@ -127,26 +127,27 @@ describe('NotificationsManager', () => { fs: fs, logger: logger, }); + receiverNotificationsManager = + await NotificationsManager.createNotificationsManager({ + acl: receiverACL, + db: receiverDb, + nodeManager: receiverNodeManager, + keyManager: receiverKeyManager, + messageCap: 5, + logger: logger, + }); receiverVaultManager = await VaultManager.createVaultManager({ keyManager: receiverKeyManager, vaultsPath: receiverVaultsPath, nodeManager: receiverNodeManager, - vaultsKey: receiverKeyManager.vaultKey, db: receiverDb, acl: receiverACL, gestaltGraph: receiverGestaltGraph, + notificationsManager: receiverNotificationsManager, fs: fs, logger: logger, + fresh: true, }); - receiverNotificationsManager = - await NotificationsManager.createNotificationsManager({ - acl: receiverACL, - db: receiverDb, - nodeManager: receiverNodeManager, - keyManager: receiverKeyManager, - messageCap: 5, - logger: logger, - }); receiverKeyPairPem = receiverKeyManager.getRootKeyPairPem(); receiverCertPem = receiverKeyManager.getRootCertPem(); receiverNodeId = networkUtils.certNodeId(receiverKeyManager.getRootCert()); @@ -164,6 +165,8 @@ describe('NotificationsManager', () => { nodeManager: receiverNodeManager, sigchain: receiverSigchain, notificationsManager: receiverNotificationsManager, + acl: receiverACL, + gestaltGraph: receiverGestaltGraph, }); server = await GRPCServer.createGRPCServer({ logger: logger, diff --git a/tests/utils.ts b/tests/utils.ts index a798d2a94e..ac3a39906d 100644 --- a/tests/utils.ts +++ b/tests/utils.ts @@ -37,6 +37,7 @@ async function setupRemoteKeynode({ logger: logger, cores: 1, workerManager: null, + fresh: true, }); await remote.start({}); return remote; diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index bbc74dde90..a29d8e70ec 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -41,7 +41,7 @@ describe('VaultInternal', () => { keyManager = await KeyManager.createKeyManager({ keysPath, password: 'password', - logger: logger, + logger, }); vault = await VaultInternal.create({ vaultId, diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 69c5d6b0a9..bc9e6e6d3c 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -1,12 +1,13 @@ import type { NodeId, NodeAddress, NodeInfo } from '@/nodes/types'; -import type { Host, Port, TLSConfig } from '@/network/types'; -import type { VaultId, VaultKey, VaultName } from '@/vaults/types'; +import type { TLSConfig } from '@/network/types'; +import type { VaultId, VaultName } from '@/vaults/types'; import type { ChainData } from '@/sigchain/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { utils as idUtils } from '@matrixai/id'; import { KeyManager } from '@/keys'; import { NodeManager } from '@/nodes'; @@ -22,10 +23,8 @@ import { NotificationsManager } from '@/notifications'; import { IAgentServiceServer } from '@/proto/js/polykey/v1/agent_service_grpc_pb'; import { errors as vaultErrors } from '@/vaults'; -import { utils as vaultUtils } from '@/vaults'; import { makeCrypto } from '../utils'; import { makeVaultId } from '@/vaults/utils'; -import { utils as idUtils } from '@matrixai/id'; describe('VaultManager', () => { const password = 'password'; @@ -35,7 +34,6 @@ describe('VaultManager', () => { const nonExistantVaultId = makeVaultId(idUtils.fromString('DoesNotExist')); let dataDir: string; let vaultsPath: string; - let vaultsKey: VaultKey; let keyManager: KeyManager; let db: DB; let acl: ACL; @@ -43,21 +41,10 @@ describe('VaultManager', () => { let nodeManager: NodeManager; let vaultManager: VaultManager; let sigchain: Sigchain; - - // FIXME, try not to do this, they can all have the localhost, - // but use the generated port when the server is started. - const sourceHost = '127.0.0.1' as Host; - const sourcePort = 11112 as Port; - const targetHost = '127.0.0.2' as Host; - const targetPort = 11113 as Port; - const altHost = '127.0.0.3' as Host; - const altPort = 11114 as Port; - const altHostIn = '127.0.0.4' as Host; - const altPortIn = 11115 as Port; + let notificationsManager: NotificationsManager; let fwdProxy: ForwardProxy; let revProxy: ReverseProxy; - let altRevProxy: ReverseProxy; const vaultName = 'TestVault' as VaultName; const secondVaultName = 'SecondTestVault' as VaultName; @@ -71,9 +58,6 @@ describe('VaultManager', () => { revProxy = await ReverseProxy.createReverseProxy({ logger: logger, }); - altRevProxy = await ReverseProxy.createReverseProxy({ - logger: logger, - }); }); beforeEach(async () => { dataDir = await fs.promises.mkdtemp( @@ -82,22 +66,12 @@ describe('VaultManager', () => { const keysPath = path.join(dataDir, 'keys'); const dbPath = path.join(dataDir, 'db'); vaultsPath = path.join(dataDir, 'vaults'); - vaultsKey = await vaultUtils.generateVaultKey(); keyManager = await KeyManager.createKeyManager({ password, keysPath: keysPath, logger: logger, }); - await fwdProxy.start({ - tlsConfig: { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }, - egressHost: sourceHost, - egressPort: sourcePort, - }); - db = await DB.createDB({ dbPath: dbPath, logger: logger, @@ -127,6 +101,16 @@ describe('VaultManager', () => { logger: logger, }); + notificationsManager = + await NotificationsManager.createNotificationsManager({ + acl: acl, + db: db, + nodeManager: nodeManager, + keyManager: keyManager, + messageCap: 5, + logger: logger, + }); + gestaltGraph = await GestaltGraph.createGestaltGraph({ db: db, acl: acl, @@ -136,11 +120,11 @@ describe('VaultManager', () => { vaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath, - vaultsKey, nodeManager, db, acl: acl, gestaltGraph: gestaltGraph, + notificationsManager: notificationsManager, fs, logger: logger, fresh: true, @@ -153,15 +137,13 @@ describe('VaultManager', () => { await db.stop(); await nodeManager.stop(); await keyManager.destroy(); + await fwdProxy.stop(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - afterAll(async () => { - await fwdProxy.stop(); - }); test('is type correct', () => { expect(vaultManager).toBeInstanceOf(VaultManager); }); @@ -285,9 +267,9 @@ describe('VaultManager', () => { vaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath, - vaultsKey, nodeManager, gestaltGraph, + notificationsManager, acl, db, logger, @@ -301,7 +283,7 @@ describe('VaultManager', () => { }, global.defaultTimeout * 2, ); - test.skip('cannot concurrently create the same vault', async () => { + test.skip('cannot concurrently create vaults with the same name', async () => { const vaults = Promise.all([ vaultManager.createVault(vaultName), vaultManager.createVault(vaultName), @@ -338,11 +320,11 @@ describe('VaultManager', () => { vaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath, - vaultsKey, nodeManager, db, acl: acl, gestaltGraph: gestaltGraph, + notificationsManager, fs, logger, }); @@ -391,7 +373,9 @@ describe('VaultManager', () => { const v9 = await vaultManager.getVaultId('Vault9' as VaultName); expect(v9).toBeTruthy(); await vaultManager.renameVault(v9!, 'Vault10' as VaultName); - await vaultManager.createVault('ThirdImpact' as VaultName); + const beforeVault = await vaultManager.createVault( + 'ThirdImpact' as VaultName, + ); await vaultManager.createVault('Cake' as VaultName); const vn: Array = []; (await vaultManager.listVaults()).forEach((_, vaultName) => @@ -399,17 +383,14 @@ describe('VaultManager', () => { ); expect(vn.sort()).toEqual(alteredVaultNames.sort()); await vaultManager.destroy(); - await db.stop(); - - await db.start(); const vaultManagerReloaded = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath, - vaultsKey, nodeManager, db, acl: acl, gestaltGraph: gestaltGraph, + notificationsManager, fs, logger, }); @@ -427,97 +408,24 @@ describe('VaultManager', () => { vnAltered.push(vaultName), ); expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); + const reloadedVault = await vaultManagerReloaded.openVault( + beforeVault.vaultId, + ); + await reloadedVault.commit(async (efs) => { + await efs.writeFile('reloaded', 'reload'); + }); + const file = await reloadedVault.access(async (efs) => { + return await efs.readFile('reloaded', { encoding: 'utf8' }); + }); + expect(file).toBe('reload'); await vaultManagerReloaded.destroy(); }, global.defaultTimeout * 2, ); - // Test('able to update the default node repo to pull from', async () => { - // await vaultManager.start({}); - // const vault1 = await vaultManager.createVault('MyTestVault'); - // const vault2 = await vaultManager.createVault('MyOtherTestVault'); - // const noNode = await vaultManager.getDefaultNode(vault1.vaultId); - // expect(noNode).toBeUndefined(); - // await vaultManager.setDefaultNode(vault1.vaultId, 'abc' as NodeId); - // const node = await vaultManager.getDefaultNode(vault1.vaultId); - // const noNode2 = await vaultManager.getDefaultNode(vault2.vaultId); - // expect(node).toBe('abc'); - // expect(noNode2).toBeUndefined(); - // await vaultManager.stop(); - // }); - // test('checking gestalt permissions for vaults', async () => { - // const node1: NodeInfo = { - // id: '123' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node2: NodeInfo = { - // id: '345' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node3: NodeInfo = { - // id: '678' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node4: NodeInfo = { - // id: '890' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const id1: IdentityInfo = { - // providerId: 'github.com' as ProviderId, - // identityId: 'abc' as IdentityId, - // claims: { - // nodes: {}, - // } as ChainData, - // }; - // const id2: IdentityInfo = { - // providerId: 'github.com' as ProviderId, - // identityId: 'def' as IdentityId, - // claims: { - // nodes: {}, - // } as ChainData, - // }; - - // await gestaltGraph.setNode(node1); - // await gestaltGraph.setNode(node2); - // await gestaltGraph.setNode(node3); - // await gestaltGraph.setNode(node4); - // await gestaltGraph.setIdentity(id1); - // await gestaltGraph.setIdentity(id2); - // await gestaltGraph.linkNodeAndNode(node1, node2); - // await gestaltGraph.linkNodeAndIdentity(node1, id1); - // await gestaltGraph.linkNodeAndIdentity(node4, id2); - - // await vaultManager.start({}); - // const vault = await vaultManager.createVault('Test'); - // await vaultManager.setVaultPermissions('123' as NodeId, vault.vaultId); - // let record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeNull(); - // expect(record['345']['pull']).toBeNull(); - // expect(record['678']).toBeUndefined(); - // expect(record['890']).toBeUndefined(); - - // await vaultManager.unsetVaultPermissions('345' as NodeId, vault.vaultId); - // record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeUndefined(); - // expect(record['345']['pull']).toBeUndefined(); - - // await gestaltGraph.unlinkNodeAndNode(node1.id, node2.id); - // await vaultManager.setVaultPermissions('345' as NodeId, vault.vaultId); - // record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeUndefined(); - // expect(record['345']['pull']).toBeNull(); - - // await vaultManager.stop(); - // }); - // /* TESTING TODO: - // * Changing the default node to pull from - // */ describe('interacting with another node to', () => { let targetDataDir: string, altDataDir: string; let targetKeyManager: KeyManager, altKeyManager: KeyManager; - let targetFwdProxy: ForwardProxy; + let targetFwdProxy: ForwardProxy, altFwdProxy: ForwardProxy; let targetDb: DB, altDb: DB; let targetACL: ACL, altACL: ACL; let targetGestaltGraph: GestaltGraph, altGestaltGraph: GestaltGraph; @@ -528,24 +436,45 @@ describe('VaultManager', () => { altNotificationsManager: NotificationsManager; let targetNodeId: NodeId, altNodeId: NodeId; - let revTLSConfig: TLSConfig, altRevTLSConfig: TLSConfig; + let revTLSConfig: TLSConfig, + targetRevTLSConfig: TLSConfig, + altRevTLSConfig: TLSConfig; - let targetAgentService: IAgentServiceServer, + let agentService: IAgentServiceServer, + targetAgentService: IAgentServiceServer, altAgentService: IAgentServiceServer; - let targetServer: GRPCServer, altServer: GRPCServer; + let agentServer: GRPCServer, + targetServer: GRPCServer, + altServer: GRPCServer; - let node: NodeInfo; + let targetRevProxy: ReverseProxy, altRevProxy: ReverseProxy; - let altFwdProxy: ForwardProxy; + let node: NodeInfo; beforeAll(async () => { altFwdProxy = await ForwardProxy.createForwardProxy({ authToken: 'abc', logger: logger, }); + targetFwdProxy = await ForwardProxy.createForwardProxy({ + authToken: 'def', + logger: logger, + }); + altRevProxy = await ReverseProxy.createReverseProxy({ + logger: logger, + }); + targetRevProxy = await ReverseProxy.createReverseProxy({ + logger: logger, + }); }); beforeEach(async () => { + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }, + }); node = { id: nodeManager.getNodeId(), chain: { nodes: {}, identities: {} } as ChainData, @@ -561,12 +490,18 @@ describe('VaultManager', () => { }); targetNodeId = targetKeyManager.getNodeId(); revTLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + targetRevTLSConfig = { keyPrivatePem: targetKeyManager.getRootKeyPairPem().privateKey, certChainPem: await targetKeyManager.getRootCertChainPem(), }; - targetFwdProxy = await ForwardProxy.createForwardProxy({ - authToken: '', - logger: logger, + await targetFwdProxy.start({ + tlsConfig: { + keyPrivatePem: targetKeyManager.getRootKeyPairPem().privateKey, + certChainPem: await targetKeyManager.getRootCertChainPem(), + }, }); targetDb = await DB.createDB({ dbPath: path.join(targetDataDir, 'db'), @@ -584,7 +519,7 @@ describe('VaultManager', () => { sigchain: targetSigchain, keyManager: targetKeyManager, fwdProxy: targetFwdProxy, - revProxy: revProxy, + revProxy: targetRevProxy, fs: fs, logger: logger, }); @@ -608,15 +543,14 @@ describe('VaultManager', () => { logger: logger, }); await targetGestaltGraph.setNode(node); - const targetVaultKey = await vaultUtils.generateVaultKey(); targetVaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath: path.join(targetDataDir, 'vaults'), - vaultsKey: targetVaultKey, nodeManager: targetNodeManager, db: targetDb, acl: targetACL, gestaltGraph: targetGestaltGraph, + notificationsManager: targetNotificationsManager, logger: logger, fresh: true, }); @@ -626,13 +560,14 @@ describe('VaultManager', () => { nodeManager: targetNodeManager, sigchain: targetSigchain, notificationsManager: targetNotificationsManager, + acl: targetACL, + gestaltGraph: targetGestaltGraph, }); targetServer = await GRPCServer.createGRPCServer({ logger: logger, }); await targetServer.start({ services: [[AgentServiceService, targetAgentService]], - host: targetHost, }); altDataDir = await fs.promises.mkdtemp( @@ -658,8 +593,6 @@ describe('VaultManager', () => { keyPrivatePem: altKeyManager.getRootKeyPairPem().privateKey, certChainPem: await altKeyManager.getRootCertChainPem(), }, - egressHost: altHost, - egressPort: altPort, }); altDb = await DB.createDB({ dbPath: path.join(altDataDir, 'db'), @@ -701,16 +634,16 @@ describe('VaultManager', () => { logger: logger, }); await altGestaltGraph.setNode(node); - const altVaultKey = await vaultUtils.generateVaultKey(); altVaultManager = await VaultManager.createVaultManager({ keyManager: keyManager, vaultsPath: path.join(altDataDir, 'vaults'), - vaultsKey: altVaultKey, nodeManager: altNodeManager, db: altDb, acl: altACL, + notificationsManager: altNotificationsManager, gestaltGraph: altGestaltGraph, logger: logger, + fresh: true, }); altAgentService = createAgentService({ keyManager: altKeyManager, @@ -718,46 +651,80 @@ describe('VaultManager', () => { nodeManager: altNodeManager, sigchain: altSigchain, notificationsManager: altNotificationsManager, + acl: altACL, + gestaltGraph: altGestaltGraph, }); altServer = await GRPCServer.createGRPCServer({ logger: logger, }); + await altServer.start({ services: [[AgentServiceService, altAgentService]], - host: altHostIn, + }); + + agentService = createAgentService({ + keyManager: keyManager, + vaultManager: vaultManager, + nodeManager: nodeManager, + sigchain: sigchain, + notificationsManager: notificationsManager, + acl: acl, + gestaltGraph: gestaltGraph, + }); + + agentServer = await GRPCServer.createGRPCServer({ + logger: logger, + }); + + await agentServer.start({ + services: [[AgentServiceService, agentService]], }); await revProxy.start({ - ingressHost: targetHost, - ingressPort: targetPort, - serverHost: targetHost, - serverPort: targetServer.getPort(), + serverHost: agentServer.getHost(), + serverPort: agentServer.getPort(), tlsConfig: revTLSConfig, }); + await targetRevProxy.start({ + serverHost: targetServer.getHost(), + serverPort: targetServer.getPort(), + tlsConfig: targetRevTLSConfig, + }); + await altRevProxy.start({ - ingressHost: altHostIn, - ingressPort: altPortIn, - serverHost: altHostIn, + serverHost: altServer.getHost(), serverPort: altServer.getPort(), tlsConfig: altRevTLSConfig, }); + + await acl.setNodePerm(targetNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + await acl.setNodePerm(altNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + await altACL.setNodePerm(targetNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); }, global.polykeyStartupTimeout * 2); afterEach(async () => { - await revProxy.closeConnection(altHost, altPort); - await revProxy.closeConnection(sourceHost, sourcePort); - await altRevProxy.closeConnection(sourceHost, sourcePort); - await fwdProxy.closeConnection( - fwdProxy.getEgressHost(), - fwdProxy.getEgressPort(), - ); - await altFwdProxy.closeConnection( - altFwdProxy.getEgressHost(), - altFwdProxy.getEgressPort(), - ); await revProxy.stop(); + await targetRevProxy.stop(); await altRevProxy.stop(); + await agentServer.stop(); await targetServer.stop(); await targetVaultManager.destroy(); await targetGestaltGraph.destroy(); @@ -778,275 +745,340 @@ describe('VaultManager', () => { await altDb.stop(); await altNodeManager.stop(); await altKeyManager.destroy(); + await targetFwdProxy.stop(); + await altFwdProxy.stop(); await fs.promises.rm(altDataDir, { force: true, recursive: true, }); }); - afterAll(async () => { - await altFwdProxy.stop(); - }); - test( - 'clone and pull vaults', + 'clone vaults using a vault name', async () => { - // Await vaultManager.createVault(vaultName); - // await vaultManager.createVault('MyFirstVault copy'); - const vault = await targetVaultManager.createVault(vaultName); - // Await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); + const firstVault = await targetVaultManager.createVault(vaultName); const names: string[] = []; - for (let i = 0; i < 1; i++) { + for (let i = 0; i < 5; i++) { const name = 'secret ' + i.toString(); names.push(name); const content = 'Success?'; - await vaultOps.addSecret(vault, name, content); + await vaultOps.addSecret(firstVault, name, content); } await nodeManager.setNode(targetNodeId, { - ip: targetHost, - port: targetPort, + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await targetVaultManager.shareVault( + firstVault.vaultId, + nodeManager.getNodeId(), + ); + await expect(() => + vaultManager.cloneVault(targetNodeId, 'not-existing' as VaultName), + ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); + await vaultManager.cloneVault(targetNodeId, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + const vaultClone = await vaultManager.openVault(vaultId!); + const file = await vaultClone.access(async (efs) => { + return await efs.readFile('secret 0', { encoding: 'utf8' }); + }); + expect(file).toBe('Success?'); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); + }, + global.defaultTimeout * 2, + ); + test( + 'clone and pull vaults using a vault id', + async () => { + const firstVault = await targetVaultManager.createVault(vaultName); + await nodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), } as NodeAddress); - await nodeManager.getConnectionToNode(targetNodeId); - await revProxy.openConnection(sourceHost, sourcePort); - await vaultManager.cloneVault(targetNodeId, vault.vaultId); + await targetVaultManager.shareVault( + firstVault.vaultId, + nodeManager.getNodeId(), + ); + const names: string[] = []; + for (let i = 0; i < 5; i++) { + const name = 'secret ' + i.toString(); + names.push(name); + const content = 'Success?'; + await vaultOps.addSecret(firstVault, name, content); + } + await vaultManager.cloneVault(targetNodeId, firstVault.vaultId); const vaultId = await vaultManager.getVaultId(vaultName); const vaultClone = await vaultManager.openVault(vaultId!); let file = await vaultClone.access(async (efs) => { return await efs.readFile('secret 0', { encoding: 'utf8' }); }); expect(file).toBe('Success?'); - // Expect(vaultsList[2].name).toStrictEqual('MyFirstVault copy copy'); - // await expect( - // vaultManager.getDefaultNode(vaultsList[2].id), - // ).resolves.toBe(targetNodeId); - // const clonedVault = await vaultManager.getVault(vaultsList[2].id); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'Success?', - // ); - // expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // names.sort(), - // ); - for (let i = 1; i < 2; i++) { + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); + for (let i = 5; i < 10; i++) { const name = 'secret ' + i.toString(); names.push(name); const content = 'Second Success?'; - await vaultOps.addSecret(vault, name, content); + await vaultOps.addSecret(firstVault, name, content); } await vaultManager.pullVault({ vaultId: vaultClone.vaultId }); file = await vaultClone.access(async (efs) => { - return await efs.readFile('secret 1', { encoding: 'utf8' }); + return await efs.readFile('secret 5', { encoding: 'utf8' }); }); expect(file).toBe('Second Success?'); - // Expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // names.sort(), - // ); - // expect(await clonedVault.getSecret('secret 19')).toStrictEqual( - // 'Second Success?', - // ); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); + }, + global.defaultTimeout * 4, + ); + test( + 'reject cloning and pulling when permissions are not set', + async () => { + const vault = await targetVaultManager.createVault(vaultName); + await nodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await vaultOps.addSecret(vault, 'MyFirstSecret', 'Success?'); + await expect(() => + vaultManager.cloneVault(targetNodeId, vault.vaultId), + ).rejects.toThrow(vaultErrors.ErrorVaultPermissionDenied); + expect((await vaultManager.listVaults()).size).toBe(0); + await targetVaultManager.shareVault( + vault.vaultId, + nodeManager.getNodeId(), + ); + const clonedVault = await vaultManager.cloneVault( + targetNodeId, + vault.vaultId, + ); + const file = await clonedVault.access(async (efs) => { + return await efs.readFile('MyFirstSecret', { encoding: 'utf8' }); + }); + expect(file).toBe('Success?'); + await targetVaultManager.unshareVault( + vault.vaultId, + nodeManager.getNodeId(), + ); + vaultOps.addSecret(vault, 'MySecondSecret', 'SecondSuccess?'); + await expect(() => + vaultManager.pullVault({ vaultId: clonedVault.vaultId }), + ).rejects.toThrow(vaultErrors.ErrorVaultPermissionDenied); + await expect(vaultOps.listSecrets(clonedVault)).resolves.toStrictEqual([ + 'MyFirstSecret', + ]); + }, + global.defaultTimeout * 2, + ); + test( + 'throw when encountering merge conflicts', + async () => { + const vault = await targetVaultManager.createVault(vaultName); + await nodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await targetVaultManager.shareVault( + vault.vaultId, + nodeManager.getNodeId(), + ); + const names: string[] = []; + for (let i = 0; i < 5; i++) { + const name = 'secret ' + i.toString(); + names.push(name); + const content = 'Success?'; + await vaultOps.addSecret(vault, name, content); + } + await vaultOps.mkdir(vault, 'dir', { recursive: true }); + const cloneVault = await vaultManager.cloneVault( + targetNodeId, + vault.vaultId, + ); + await vaultOps.renameSecret(cloneVault, 'secret 4', 'secret 5'); + await vaultOps.renameSecret( + vault, + 'secret 4', + 'causing merge conflict', + ); + await expect(() => + vaultManager.pullVault({ vaultId: cloneVault.vaultId }), + ).rejects.toThrow(vaultErrors.ErrorVaultMergeConflict); + }, + global.defaultTimeout * 2, + ); + test( + 'clone and pull from other cloned vaults', + async () => { + const vault = await targetVaultManager.createVault(vaultName); + await nodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await nodeManager.setNode(altNodeId, { + ip: altRevProxy.getIngressHost(), + port: altRevProxy.getIngressPort(), + } as NodeAddress); + await altNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await altNodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(altNodeId, { + ip: altRevProxy.getIngressHost(), + port: altRevProxy.getIngressPort(), + } as NodeAddress); + await targetVaultManager.shareVault( + vault.vaultId, + altNodeManager.getNodeId(), + ); + await targetVaultManager.shareVault( + vault.vaultId, + nodeManager.getNodeId(), + ); + const names: string[] = []; + for (let i = 0; i < 5; i++) { + const name = 'secret ' + i.toString(); + names.push(name); + const content = 'Success?'; + await vaultOps.addSecret(vault, name, content); + } + const clonedVaultAlt = await altVaultManager.cloneVault( + targetNodeId, + vault.vaultId, + ); + await altVaultManager.shareVault( + clonedVaultAlt.vaultId, + nodeManager.getNodeId(), + ); + await vaultManager.cloneVault(altNodeId, clonedVaultAlt.vaultId); + const vaultIdClone = await vaultManager.getVaultId(vaultName); + const vaultClone = await vaultManager.openVault(vaultIdClone!); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); + for (let i = 5; i < 10; i++) { + const name = 'secret ' + i.toString(); + names.push(name); + const content = 'Success?'; + await vaultOps.addSecret(vault, name, content); + } + await vaultManager.pullVault({ + vaultId: vaultClone.vaultId, + pullNodeId: targetNodeId, + pullVaultNameOrId: vault.vaultId, + }); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); + }, + global.defaultTimeout * 5, + ); + test( + 'manage pulling from different remotes', + async () => { + const vault = await targetVaultManager.createVault(vaultName); + await nodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await nodeManager.setNode(altNodeId, { + ip: altRevProxy.getIngressHost(), + port: altRevProxy.getIngressPort(), + } as NodeAddress); + await altNodeManager.setNode(nodeManager.getNodeId(), { + ip: revProxy.getIngressHost(), + port: revProxy.getIngressPort(), + } as NodeAddress); + await altNodeManager.setNode(targetNodeId, { + ip: targetRevProxy.getIngressHost(), + port: targetRevProxy.getIngressPort(), + } as NodeAddress); + await targetNodeManager.setNode(altNodeId, { + ip: altRevProxy.getIngressHost(), + port: altRevProxy.getIngressPort(), + } as NodeAddress); + await targetVaultManager.shareVault( + vault.vaultId, + altNodeManager.getNodeId(), + ); + await targetVaultManager.shareVault( + vault.vaultId, + nodeManager.getNodeId(), + ); + const names: string[] = []; + for (let i = 0; i < 2; i++) { + const name = 'secret ' + i.toString(); + names.push(name); + const content = 'Success?'; + await vaultOps.addSecret(vault, name, content); + } + const clonedVaultAlt = await altVaultManager.cloneVault( + targetNodeId, + vault.vaultId, + ); + await altVaultManager.shareVault( + clonedVaultAlt.vaultId, + nodeManager.getNodeId(), + ); + const vaultClone = await vaultManager.cloneVault( + altNodeId, + clonedVaultAlt.vaultId, + ); + for (let i = 2; i < 4; i++) { + const name = 'secret ' + i.toString(); + names.push(name); + const content = 'Success?'; + await vaultOps.addSecret(vault, name, content); + } + await vaultManager.pullVault({ + vaultId: vaultClone.vaultId, + pullNodeId: targetNodeId, + pullVaultNameOrId: vaultName, + }); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); + for (let i = 4; i < 6; i++) { + const name = 'secret ' + i.toString(); + const content = 'Success?'; + await vaultOps.addSecret(clonedVaultAlt, name, content); + } + await vaultManager.pullVault({ vaultId: vaultClone.vaultId }); + expect((await vaultOps.listSecrets(vaultClone)).sort()).toStrictEqual( + names.sort(), + ); }, global.defaultTimeout * 2, ); - // Test( - // 'reject clone and pull ops when permissions are not set', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await vault.addSecret('MyFirstSecret', 'Success?'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await expect(() => - // vaultManager.cloneVault(vault.vaultId, targetNodeId), - // ).rejects.toThrow(gitErrors.ErrorGitPermissionDenied); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList).toStrictEqual([]); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // const vaultList = await vaultManager.listVaults(); - // await targetVaultManager.unsetVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // vault.addSecret('MySecondSecret', 'SecondSuccess?'); - // await expect(() => - // vaultManager.pullVault(vaultList[0].id, targetNodeId), - // ).rejects.toThrow(gitErrors.ErrorGitPermissionDenied); - // const list = await vaultManager.listVaults(); - // const clonedVault = await vaultManager.getVault(list[0].id); - // expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // ['MyFirstSecret'].sort(), - // ); - // await vaultManager.stop(); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'handle vault conflicts', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await vault.mkdir('dir', { recursive: true }); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // const vaultList = await vaultManager.listVaults(); - // const clonedVault = await vaultManager.getVault(vaultList[0].id); - // await clonedVault.renameSecret('secret 9', 'secret 10'); - // await vault.renameSecret('secret 9', 'causing merge conflict'); - // await expect(() => - // vaultManager.pullVault(clonedVault.vaultId), - // ).rejects.toThrow(vaultErrors.ErrorVaultMergeConflict); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'clone and pull from a default node', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // altNodeManager.getNodeId(), - // vault.vaultId, - // ); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await altNodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await altNodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(altHost, altPort); - // await altVaultManager.cloneVault(vault.vaultId, targetNodeId); - // const altVaultsList = await altVaultManager.listVaults(); - // expect(altVaultsList[0].name).toStrictEqual('MyFirstVault'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // await altVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // altVaultsList[0].id, - // ); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList[0].name).toStrictEqual('MyFirstVault'); - // const clonedVault = await vaultManager.getVault(vaultsList[0].id); - // const altClonedVault = await altVaultManager.getVault( - // altVaultsList[0].id, - // ); - // await altClonedVault.updateSecret('secret 9', 'this is new'); - // await nodeManager.setNode(altNodeId, { - // ip: altHostIn, - // port: altPortIn, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(altNodeId); - // await altRevProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.pullVault(clonedVault.vaultId, altNodeId); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'this is new', - // ); - // await altClonedVault.addSecret('secret 10', 'default pull?'); - // await vaultManager.pullVault(clonedVault.vaultId); - // expect(await clonedVault.getSecret('secret 10')).toStrictEqual( - // 'default pull?', - // ); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'clone and pull within a system of 3 nodes', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // altNodeManager.getNodeId(), - // vault.vaultId, - // ); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await altNodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await altNodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(altHost, altPort); - // await altVaultManager.cloneVault(vault.vaultId, targetNodeId); - // const altVaultsList = await altVaultManager.listVaults(); - // expect(altVaultsList[0].name).toStrictEqual('MyFirstVault'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // await altVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // altVaultsList[0].id, - // ); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList[0].name).toStrictEqual('MyFirstVault'); - // const clonedVault = await vaultManager.getVault(vaultsList[0].id); - // const altClonedVault = await altVaultManager.getVault( - // altVaultsList[0].id, - // ); - // await altClonedVault.updateSecret('secret 9', 'this is new'); - // await nodeManager.setNode(altNodeId, { - // ip: altHostIn, - // port: altPortIn, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(altNodeId); - // await altRevProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.pullVault(clonedVault.vaultId, altNodeId); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'this is new', - // ); - // }, - // global.defaultTimeout * 2, - // ); }); }); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index ba06f58c62..24d7b3a2b3 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -15,9 +15,6 @@ import { NodeId } from '@/nodes/types'; describe('VaultOps', () => { const password = 'password'; const logger = new Logger('VaultOps', LogLevel.WARN, [new StreamHandler()]); - const probeLogger = new Logger('vaultOpsProbe', LogLevel.INFO, [ - new StreamHandler(), - ]); let dataDir: string; @@ -209,14 +206,9 @@ describe('VaultOps', () => { path.join('dir-1', 'dir-2', 'secret-1'), 'secret-content', ); - await vaultOps.deleteSecret( - vault, - path.join('dir-1', 'dir-2'), - { - recursive: true, - }, - probeLogger, - ); + await vaultOps.deleteSecret(vault, path.join('dir-1', 'dir-2'), { + recursive: true, + }); await expect( vault.access((efs) => efs.readdir('dir-1')), ).resolves.not.toContain('dir-2'); diff --git a/tests/vaults/old/Vault.test.ts.old b/tests/vaults/old/Vault.test.ts.old deleted file mode 100644 index 96202daa5e..0000000000 --- a/tests/vaults/old/Vault.test.ts.old +++ /dev/null @@ -1,565 +0,0 @@ -import type { NodeId } from '@/nodes/types'; -import type { VaultId, VaultIdRaw, VaultKey, VaultName } from "@/vaults/types"; - -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import git from 'isomorphic-git'; -import Vault from '@/vaults/old/Vault'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; - -import { generateVaultId, generateVaultKey } from '@/vaults/utils'; -import { getRandomBytes } from '@/keys/utils'; -import { EncryptedFS } from 'encryptedfs'; -import * as errors from '@/vaults/errors'; -import * as utils from '@/utils'; - -describe.skip('Vault is', () => { - let dataDir: string; - let vault: Vault; - let key: VaultKey; - let vaultId: VaultId; - let efsDir: string; - const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); - const name = 'vault-1' as VaultName; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - key = await generateVaultKey(); - vaultId = generateVaultId(); - efsDir = path.join(dataDir, vaultId); - await fs.promises.mkdir(efsDir); - vault = new Vault({ - vaultId: vaultId, - vaultName: name, - baseDir: efsDir, - fs: fs, - logger: logger, - }); - }); - - afterEach(async () => { - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - - test('type correct', async () => { - expect(vault).toBeInstanceOf(Vault); - }); - test('creating the vault directory', async () => { - await vault.start({ key }); - await expect(fs.promises.readdir(dataDir)).resolves.toContain(vaultId); - }); - test('able to destroy an empty vault', async () => { - await vault.start({ key }); - await expect(fs.promises.readdir(dataDir)).resolves.toContain(vaultId); - await vault.stop(); - await expect(fs.promises.readdir(dataDir)).resolves.not.toContain(vaultId); - }); - test('adding a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-1.data'); - await vault.stop(); - }); - test('adding a secret and getting it', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - const secret = await vault.getSecret('secret-1'); - expect(secret).toBe('secret-content'); - await expect(() => vault.getSecret('doesnotexist')).rejects.toThrow( - errors.ErrorSecretUndefined, - ); - await vault.stop(); - }); - test('able to make directories', async () => { - await vault.start({ key }); - await vault.mkdir('dir-1', { recursive: true }); - await vault.mkdir('dir-2', { recursive: true }); - await vault.mkdir(path.join('dir-3', 'dir-4'), { recursive: true }); - await vault.addSecret( - path.join('dir-3', 'dir-4', 'secret-1'), - 'secret-content', - ); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('dir-1.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('dir-2.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, 'dir-3.data')), - ).resolves.toContain('dir-4.data'); - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, 'dir-3.data', 'dir-4.data'), - ), - ).resolves.toContain('secret-1.data'); - await vault.stop(); - }); - test('adding and committing a secret 10 times', async () => { - await vault.start({ key }); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vault.addSecret(name, content); - await expect(vault.getSecret(name)).resolves.toStrictEqual(content); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain(`${name}.data`); - } - await vault.stop(); - }); - test('updating secret content', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.updateSecret('secret-1', 'secret-content-change'); - await expect(vault.getSecret('secret-1')).resolves.toStrictEqual( - 'secret-content-change', - ); - await vault.stop(); - }); - test('updating secret content within a directory', async () => { - await vault.start({ key }); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.updateSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content-change', - ); - await expect( - vault.getSecret(path.join('dir-1', 'dir-2', 'secret-1')), - ).resolves.toStrictEqual('secret-content-change'); - await vault.stop(); - }); - test('updating a secret 10 times', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - for (let i = 0; i < 10; i++) { - const content = 'secret-content'; - await vault.updateSecret('secret-1', content); - await expect(vault.getSecret('secret-1')).resolves.toStrictEqual(content); - } - await vault.stop(); - }); - test('deleting a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.mkdir('dir-1'); - await vault.deleteSecret('secret-1'); - await expect(() => vault.deleteSecret('dir-1')).rejects.toThrow( - errors.ErrorRecursive, - ); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain('secret-1.data'); - await vault.stop(); - }); - test('deleting a secret within a directory', async () => { - await vault.start({ key }); - await expect(() => vault.mkdir(path.join('dir-1', 'dir-2'))).rejects.toThrow( - errors.ErrorRecursive, - ); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.deleteSecret(path.join('dir-1', 'dir-2'), { recursive: true }); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, 'dir-1.data')), - ).resolves.not.toContain('dir2-1.data'); - await vault.stop(); - }); - test('deleting a secret 10 times', async () => { - await vault.start({ key }); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vault.addSecret(name, content); - await expect(vault.getSecret(name)).resolves.toStrictEqual(content); - await vault.deleteSecret(name, { recursive: true }); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain(`${name}.data`); - } - await vault.stop(); - }); - test('renaming a vault', async () => { - await vault.start({ key }); - await vault.renameVault('vault-change' as VaultName); - expect(vault.vaultName).toEqual('vault-change'); - await vault.stop(); - }); - test('renaming a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.renameSecret('secret-1', 'secret-change'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain('secret-1.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-change.data'); - await vault.stop(); - }); - test('renaming a secret within a directory', async () => { - await vault.start({ key }); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.renameSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - path.join('dir-1', 'dir-2', 'secret-change'), - ); - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, 'dir-1.data', 'dir-2.data'), - ), - ).resolves.toContain(`secret-change.data`); - await vault.stop(); - }); - test('listing secrets', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.addSecret('secret-2', 'secret-content'); - await vault.mkdir(path.join('dir1', 'dir2'), { recursive: true }); - await vault.addSecret( - path.join('dir1', 'dir2', 'secret-3'), - 'secret-content', - ); - expect((await vault.listSecrets()).sort()).toStrictEqual( - ['secret-1', 'secret-2', 'dir1/dir2/secret-3'].sort(), - ); - await vault.stop(); - }); - test('listing secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = await getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - } - await vault.start({ key }); - await vault.addSecretDirectory(secretDir); - expect(await vault.listSecrets()).toStrictEqual([ - path.join(secretDirName, `secret 0`), - path.join(secretDirName, `secret 1`), - path.join(secretDirName, `secret 2`), - path.join(secretDirName, `secret 3`), - path.join(secretDirName, `secret 4`), - path.join(secretDirName, `secret 5`), - path.join(secretDirName, `secret 6`), - path.join(secretDirName, `secret 7`), - path.join(secretDirName, `secret 8`), - path.join(secretDirName, `secret 9`), - ]); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding hidden files and directories', async () => { - await vault.start({ key }); - await vault.addSecret('.hiddenSecret', 'hidden_contents'); - await vault.mkdir('.hiddenDir', { recursive: true }); - await vault.addSecret('.hiddenDir/.hiddenInSecret', 'hidden_inside'); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - ['.hiddenSecret', '.hiddenDir/.hiddenInSecret'].sort(), - ); - await vault.stop(); - }); - test('updating and deleting hidden files and directories', async () => { - await vault.start({ key }); - await vault.addSecret('.hiddenSecret', 'hidden_contents'); - await vault.mkdir('.hiddenDir', { recursive: true }); - await vault.addSecret('.hiddenDir/.hiddenInSecret', 'hidden_inside'); - await vault.updateSecret('.hiddenSecret', 'change_contents'); - await vault.updateSecret('.hiddenDir/.hiddenInSecret', 'change_inside'); - await vault.renameSecret('.hiddenSecret', '.hidingSecret'); - await vault.renameSecret('.hiddenDir', '.hidingDir'); - let list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - ['.hidingSecret', '.hidingDir/.hiddenInSecret'].sort(), - ); - await expect(vault.getSecret('.hidingSecret')).resolves.toStrictEqual( - 'change_contents', - ); - await expect( - vault.getSecret('.hidingDir/.hiddenInSecret'), - ).resolves.toStrictEqual('change_inside'); - await vault.deleteSecret('.hidingSecret', { recursive: true }); - await vault.deleteSecret('.hidingDir', { recursive: true }); - list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual([].sort()); - await vault.stop(); - }); - test( - 'adding and committing a secret 100 times on efs', - async () => { - const efs = await EncryptedFS.createEncryptedFS({ - dbKey: await getRandomBytes(32), - dbPath: dataDir, - }); - const exists = utils.promisify(efs.exists).bind(efs); - const mkdir = utils.promisify(efs.mkdir).bind(efs); - const writeFile = utils.promisify(efs.writeFile).bind(efs); - const vaultId = vault.vaultId; - await mkdir(path.join(dataDir, vaultId), { - recursive: true, - }); - await git.init({ - fs: efs, - dir: path.join(dataDir, vaultId), - }); - await git.commit({ - fs: efs, - dir: path.join(dataDir, vaultId), - author: { - name: vaultId, - }, - message: 'Initial Commit', - }); - await writeFile( - path.join(path.join(dataDir, vaultId), '.git', 'packed-refs'), - '# pack-refs with: peeled fully-peeled sorted', - ); - for (let i = 0; i < 100; i++) { - const name = 'secret ' + i.toString(); - const content = await getRandomBytes(5); - const writePath = path.join(dataDir, vaultId, name); - await writeFile(writePath, content, {}); - await git.add({ - fs: efs, - dir: path.join(dataDir, vaultId), - filepath: name, - }); - await git.commit({ - fs: efs, - dir: path.join(dataDir, vaultId), - author: { - name: vaultId, - }, - message: `Add secret: ${name}`, - }); - - await expect(exists(path.join(dataDir, vaultId, name))).resolves.toBe( - true, - ); - } - }, - global.defaultTimeout * 2, - ); - test('adding a directory of 1 secret', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - const name = 'secret'; - const content = await getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - await vault.start({ key }); - await vault.addSecretDirectory(path.join(secretDir)); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, `${secretDirName}.data`)), - ).resolves.toContain('secret.data'); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('getting the stats of a vault', async () => { - await vault.start({ key }); - const stats = await vault.stats(); - expect(stats).toBeInstanceOf(fs.Stats); - await vault.stop(); - }); - test('adding a directory with subdirectories and files', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - await vault.start({ key }); - await vault.addSecretDirectory(path.join(secretDir)); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('testing the errors handling of adding secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - await vault.start({ key }); - await vault.mkdir(secretDirName, { recursive: true }); - await vault.addSecret( - path.join(secretDirName, 'secret1'), - 'blocking-secret', - ); - await vault.addSecretDirectory(secretDir); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - await vault.start({ key }); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding a directory of 100 secrets with some secrets already existing', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 50; i++) { - const name = 'secret ' + i.toString(); - const content = 'this is secret ' + i.toString(); - await fs.promises.writeFile( - path.join(secretDir, name), - Buffer.from(content), - ); - } - await vault.start({ key }); - await vault.mkdir(secretDirName, { recursive: false }); - await vault.addSecret( - path.join(secretDirName, 'secret 8'), - 'secret-content', - ); - await vault.addSecret( - path.join(secretDirName, 'secret 9'), - 'secret-content', - ); - await vault.addSecretDirectory(secretDir); - - for (let j = 0; j < 8; j++) { - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, `${secretDirName}.data`), - ), - ).resolves.toContain('secret ' + j.toString() + '.data'); - } - await expect( - vault.getSecret(path.join(secretDirName, 'secret 8')), - ).resolves.toStrictEqual('this is secret 8'); - await expect( - vault.getSecret(path.join(secretDirName, 'secret 9')), - ).resolves.toStrictEqual('this is secret 9'); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('able to persist data across multiple vault objects', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-1.data'); - const vault2 = new Vault({ - vaultId: vaultId, - vaultName: name, - baseDir: efsDir, - fs: fs, - logger: logger, - }); - await vault2.start({ key }); - const content = await vault2.getSecret('secret-1'); - expect(content).toBe('secret-content'); - await vault2.stop(); - }); - // Test('able to erase dirty commits on start up', async () => { - // await vault.start({ key }); - // await vault.addSecret('secret-1', 'secret-content'); - // await vault.mkdir('dir-1', { recursive: true }); - // await vault.addSecret('dir-1/secret-1', 'secret-content'); - // await vault.start({ key }); - // await fs.promises.writeFile(path.join(dataDir, `${vault.vaultId}:nodeID`), 'dirty-commit'); - // const vault2 = new Vault({ - // vaultId: vaultId, - // vaultName: name, - // baseDir: efsDir, - // fs: fs, - // logger: logger, - // }); - // await vault2.start({ key }); - // await vault2.stop(); - // }); -}); diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 495509d43f..38b3e2a533 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -28,18 +28,9 @@ describe('Vaults utils', () => { }); test('VaultId type guard works', async () => { - // Const nodeId = makeNodeId('A'.repeat(44)); const vaultId = vaultsUtils.generateVaultId(); expect(isVaultId(vaultId)).toBeTruthy(); }); - // TODO: this may be fully removed later. check if splitting is needed for vaultIds - // test('vaultIds can be split', async () => { - // const nodeId = 'alkjsddfjknacqqquiry32741834id'; - // const id = vaultsUtils.generateVaultId(); - // expect(id).toContain(nodeId); - // const vaultId = vaultsUtils.splitVaultId(id); - // expect(vaultId).not.toContain(nodeId); - // }); test.skip('EFS can be read recursively', async () => { const key = await vaultsUtils.generateVaultKey(); const efs = await EncryptedFS.createEncryptedFS({ @@ -69,74 +60,8 @@ describe('Vaults utils', () => { ['dir', 'dir/dir2', 'dir/dir2/dir3', 'dir/file'].sort(), ); }); - // Test('a persisted EFS object can be read recursively', async () => { - // const key = await vaultsUtils.generateVaultKey(); - // const efs = new EncryptedFS(key, fs, dataDir); - // const mkdir = utils.promisify(efs.mkdir).bind(efs); - // const writeFile = utils.promisify(efs.writeFile).bind(efs); - // await mkdir('dir', { recursive: true }); - // await mkdir('dir/dir2/dir3', { recursive: true }); - // await writeFile('dir/file', 'content'); - // const efs2 = new EncryptedFS(key, fs, dataDir); - // let files: string[] = []; - // for await (const file of vaultsUtils.readdirRecursivelyEFS( - // efs2, - // '', - // false, - // )) { - // files.push(file); - // } - // expect(files.sort()).toStrictEqual(['dir/file'].sort()); - // files = []; - // for await (const file of vaultsUtils.readdirRecursivelyEFS( - // efs2, - // '', - // true, - // )) { - // files.push(file); - // } - // expect(files.sort()).toStrictEqual( - // ['dir', 'dir/dir2', 'dir/dir2/dir3', 'dir/file'].sort(), - // ); - // }); - test.skip('can search for a vault name', async () => { - // Const vaultList = ['a\tb', 'b\ta', '', 'c\tc', 'e\tf']; - - fail(); - // FIXME secret methods not implemented. - // expect(vaultsUtils.searchVaultName(vaultList, 'b' as VaultId)).toEqual('a'); - // expect(vaultsUtils.searchVaultName(vaultList, 'a' as VaultId)).toEqual('b'); - // expect(vaultsUtils.searchVaultName(vaultList, 'c' as VaultId)).toEqual('c'); - // expect(vaultsUtils.searchVaultName(vaultList, 'f' as VaultId)).toEqual('e'); - // expect(() => - // vaultsUtils.searchVaultName(vaultList, 'd' as VaultId), - // ).toThrow(vaultsErrors.ErrorRemoteVaultUndefined); - }); test('makeVaultId converts a buffer', async () => { const randomIdGen = new IdRandom(); Buffer.from(randomIdGen.get()); }); }); - -// Test('vaultIds are alphanumeric', async () => { -// const id1 = utils.generateVaultId('abc'); -// -// expect(isAlphaNumeric(id1)).toBe(true); -// }); -// -// function isAlphaNumeric(str) { -// let code, i, len; -// -// for (i = 0, len = str.length; i < len; i++) { -// code = str.charCodeAt(i); -// if ( -// !(code > 47 && code < 58) && // numeric (0-9) -// !(code > 64 && code < 91) && // upper alpha (A-Z) -// !(code > 96 && code < 123) -// ) { -// // lower alpha (a-z) -// return false; -// } -// } -// return true; -// }