Legacy
...
Administration
Work with API
Import from Bitwarden
10min
you can import bitwarden json files into passwork via api run import js script and follow the instructions totp codes must be valid, otherwise the script will terminate with an error configure and run (deb architecture) get root privileges and update the local package database sudo i apt get update install node js and npm apt install nodejs npm y node js version must be 17 or higher check the installed version node v install modules for importing npm install dotenv readline fs util passwork js create the import script — import js source code of import js require("util") inspect defaultoptions depth = null; const env = require('dotenv') config() parsed; const readline = require('readline'); const fs = require('fs'); const passwork = require(' /node modules/passwork js/src/passwork api'); / @type passworkapi / const passwork = new passwork(env host); function throwfatalerror(message, error) { console error(message); console error(error); process exit(0); } (async () => { try { const \[argfilename, argcollections, argpath] = process argv slice(2); let jsonfilename; let jsondata; let collectionstoimport = \[]; let importvault; // authorize try { await passwork login(env api key, env user master pass); } catch (e) { throwfatalerror('failed to authorise', e); } const rl = readline createinterface({ input process stdin, output process stdout }); // read json from bitwarden const answerfilename = await new promise(resolve => { rl question('\nspecify the file to export\n', resolve) }); jsonfilename = answerfilename ? answerfilename argfilename; try { jsondata = json parse(fs readfilesync(jsonfilename)); if (!jsondata || !jsondata hasownproperty('items')) { throw 'invalid json file format'; } } catch (e) { throwfatalerror('failed to read json file', e); } // specify collections to import const answercollections = await new promise(resolve => { rl question('\nspecify comma separated id or name of collections to be exported (optional)\n', resolve) }); let collections = answercollections ? answercollections argcollections; if (collections) { collections = collections split(',') map(c => c trim()) filter((c) => c); } else { collections = \[]; } if (jsondata collections && jsondata collections length) { if (collections length === 0) { collectionstoimport = jsondata collections; } else { jsondata collections foreach(c => { if (collections includes(c name) || collections includes(c id)) { collectionstoimport push(c); } }); } } else { collectionstoimport = \[]; } collectionstoimport = \[ new set(collectionstoimport)]; // specify vault id for import const answerpath = await new promise(resolve => { rl question('\nspecify the id of the vault to import (optional) \n', resolve) }); let path = answerpath ? answerpath argpath; if (path) { importvault = await passwork getvault(path); if (!importvault) { throwfatalerror('the vault specified for import was not found'); } } // confirm import let confirmmessage = '\nthe following collections will be exported \n'; if (jsondata collections) { collectionstoimport foreach(c => { confirmmessage += `${c name} (${c id})\n`; }); } else { confirmmessage += 'private vault\n'; } if (importvault) { confirmmessage += `\nexports will be made to "${importvault name}"\n`; } confirmmessage += 'to be continued? y/n\n'; const answerconfirm = await new promise(resolve => { rl question(confirmmessage, resolve) }); if (answerconfirm tolowercase() === 'y') { rl close(); importpasswords() then(() => process exit(0)) catch((e) => { throwfatalerror('error', e); }); } else { console log('the operation has been cancelled'); process exit(0); } async function importpasswords() { const logfilename = 'import ' + new date() gettime() + ' log'; function logmessage(message) { let msg = new date() toisostring() + ' ' + message + '\n'; fs appendfilesync(logfilename, msg); console log(msg); } function preparepasswordfields(data, directories) { const vaultsnames = getdirectoriesnames(directories); if (data type !== 1 && data type !== 2) { logmessage(`object type ${data type}, ${data name}` \+ ` from collections ${vaultsnames} has not been imported`); return; } const fields = { password '', name data name, description data notes, custom \[], }; if (directories length > 1) { fields description = fields description ? (fields description + '\n') ''; fields description += `a copy of the password can be found in ${vaultsnames}`; } if (data login) { if (data login username) { fields login = data login username; } if (data login password) { fields password = data login password; } if (data login totp) { fields custom push({ name 'totp', value data login totp, type 'totp' }); } if (data login uris) { fields url = data login uris length === 1 ? data login uris\[0] uri data login uris reduce((a, b) => (a uri || a) + ", " + b uri, '') } } if (data fields) { data fields foreach((field) => { if (field type === 0 || field type === 2) { fields custom push({ name string(field name), value string(field value), type 'text' }); } else if (field type === 1) { fields custom push({ name string(field name), value string(field value), type 'password' }); } else { logmessage(`field of type "link" of the object ${data name}` \+ ` from collections ${vaultsnames} has not been imported`); } }); } return fields; } function getdirectories(passwordcollectionids, collections) { const directories = \[]; for (const collectionid of passwordcollectionids) { if (collections hasownproperty(collectionid)) { directories push(collections\[collectionid]); } } return directories; } function getdirectoriesnames(directories) { return directories length > 1 ? directories reduce((a, b) => (a name || a) + ", " + b name) directories\[0] name; } logmessage('import from file ' + jsonfilename); if (collectionstoimport length) { if (importvault) { // collections as folders const folders = {}; for (let c = 0; c < collectionstoimport length; c++) { const item = collectionstoimport\[c]; folders\[item id] = await passwork addfolder(importvault id, item name); logmessage(`a folder has been created ${folders\[item id] name} based on the collection ${item id}`) } for (let p = 0; p < jsondata items length; p++) { const passworddata = jsondata items\[p]; const folderslist = getdirectories(passworddata collectionids, folders); if (folderslist length === 0) { continue; } logmessage(`imports started ${passworddata name}`); let fields = preparepasswordfields(passworddata, folderslist); if (!fields) { continue; } fields vaultid = importvault id; for (const folder of folderslist) { fields folderid = folder id; await passwork addpassword(object assign({}, fields)); logmessage(`importation completed ${passworddata name}`); } } } else { // collections as vaults const vaults = \[]; for (let c = 0; c < collectionstoimport length; c++) { const item = collectionstoimport\[c]; const vaultid = await passwork addvault(item name); vaults\[item id] = await passwork getvault(vaultid); logmessage(`the vault has been created ${vaults\[item id] name} based on the collection ${item id}`) } for (let p = 0; p < jsondata items length; p++) { const passworddata = jsondata items\[p]; const vaultslist = getdirectories(passworddata collectionids, vaults); if (vaultslist length === 0) { continue; } logmessage(`imports started ${passworddata name}`); let fields = preparepasswordfields(passworddata, vaultslist); if (!fields) { continue; } for (const vault of vaultslist) { fields vaultid = vault id; await passwork addpassword(object assign({}, fields)); logmessage(`importation completed ${passworddata name}`); } } } logmessage(`import completed`); process exit(0); return; } if (collectionstoimport length === 0 && jsondata items\[0] organizationid === null) { // private vault import if (!importvault) { const vaultid = await passwork addvault('private safe', true); importvault = await passwork getvault(vaultid); logmessage(`vault ${importvault name} was created `); } const folders = {}; if (jsondata folders) { for (const folder of jsondata folders) { folders\[folder id] = await passwork addfolder(importvault id, folder name); } } for (let p = 0; p < jsondata items length; p++) { const passworddata = jsondata items\[p]; logmessage(`imports started ${passworddata name}`); let fields = preparepasswordfields(passworddata, \[importvault]); if (!fields) { continue; } fields vaultid = importvault id; if (passworddata folderid) { fields folderid = folders\[passworddata folderid] id; } await passwork addpassword(object assign({}, fields)); logmessage(`import completed ${passworddata name}`); } logmessage(`import completed`); process exit(0); return; } logmessage(`import format could not be determined`); process exit(0); } } catch (e) { throwfatalerror('error', e); } })(); create an env file and specify the passwork host, the user's api key and its master password host='https //your host/api/v4' api key= user master pass= load the bitwarden xml file and run the script the script will ask for the name of the file node import js you can also pass these parameters as arguments to the script node import js bitwarden export org json "collection 1"