Electron app Database with Dexie.js ( indexedDB ) and web worker

Database for a electron app made with Dexie.js, which is a indexedDB wrapper.

Overview

We will cover-

  • Running database in a worker
  • Import and export database
  • Make it persistent

Electron app and Dependencies

  1. take your electron app or clone this quick starter app
git clone https://github.com/electron/electron-quick-start
cd electron-quick-start
npm install
npm install dexie --save
npm install dexie-export-import --save
npm install stream-to-blob --save
npm install n-readlines --save

Enable node.js in electron app

  1. open main.js file and add below code in webPreferences
webPreferences: {   preload: path.join(__dirname, 'preload.js'),
nodeIntegration: true,
nodeIntegrationInWorker: true
}
  • nodeIntegrationInWorker enables node.js in web workers.

Database initialization

script to initialize dexie database

var Dexie = require('dexie');
var database = new Dexie("dexieDB");
database.version(1).stores({ Friends: 'name,quality,issue'})database.open().catch(function(error){ console.error("ERROR: "+ error);});module.exports = { database}
  • Catch any error(if any) while opening database.
  • Export database reference so that other script can access database.
ash<|>purely lovable<|>anger immatureray<|>trust worthy<|>angerkevin<|>understanding<|>have limitsrayon<|>innocence<|>idiotsaki<|>nature and understanding<|>too much of open

Database web worker script

Web Workers are a simple means for web content to run scripts in background threads.

const {database} = require('./databaseInit.js');const lineByLine = require('n-readlines');var duplicates=0, addedRecords=0, splitter;const liner = new lineByLine('./data.txt');let line, lineNumber = 0;database.transaction('rw', database.Friends, async () => {  while (line = liner.next()) {      splitter = line.toString().split('<|>');      await database.Friends.add({      name: splitter[0],      quality: splitter[1],      issue: splitter[2]    }).then(function(){      addedRecords++      lineNumber++;    }).catch(function (e) {      console.log(e.message);      duplicates++;      lineNumber++;    });  }}).then(function(){  postMessage("Duplicates " + duplicates.toString() + " Added " + addedRecords.toString());//use when you are using a web worker}).catch(error => {  console.error(error);});
  • Import n-readlines package to read file line by line, then initialize data file with line variable.
  • Start a database transaction(this will rollback in case of failure).
  • Start a loop for each line and split line using delimiter then add it in database.
  • If line is added successfully then increase addedRecords variables and if there is any failure then increase duplicates or failure case.
  • Once transaction is done send success and failure cases to renderer process (if this script is running as web worker ).

Call web worker

To communicate between DOM and web worker we need worker client(in this case we are using renderer.js).

var worker = new Worker('./databaseWorker.js');

worker.onmessage = function(event){
console.log("Database worker process is ", event.data);
worker.terminate();

document.querySelector("h1").innerHTML = (event.data);

//console.log("worker is done working ");
};
worker.onerror = function (event){
console.error(event.message, event);
};
  • onmessage is used to catch any message sent from web worker.
  • onerror is used to catch any error occurred in web worker.

Execute app to check run database

Now we can run app see our database working

npm start

Import and Export database and Persist database

  1. Create a importExportDatabase.js file and add below code in it.
const {database} = require('./databaseInit.js');require('dexie-export-import');const toBlob = require('stream-to-blob');const fileSystem = require('fs');var export_database = async function export_database(){  console.log("Exporting Database");  const blob = await database.export({prettyJson: true});  const text = await new Response(blob).text();   try{    fileSystem.writeFile("ExportedDatabase.json", text, function(error){     if(error){        console.log(error);      }    });  }catch(error){    console.error(''+error);  }  console.log("Exported");};var import_database = async function import_database(){  console.log("Importing Database");  const stream = fileSystem.createReadStream("ExportedDatabase.json");  const blob = await toBlob(stream);  try{    await database.import(blob);  }catch(error){    console.log('IMPORT ERROR: '+ error );  }  console.log("Imported");};
async function try_persist_without_promting_user() { if (!navigator.storage || !navigator.storage.persisted) { return "never"; } let persisted = await navigator.storage.persisted(); if (persisted) { return "persisted"; } if (!navigator.permissions || !navigator.permissions.query) { return "prompt"; // It MAY be successful to prompt. Don't know. } const permission = await navigator.permissions.query({ name: "persistent-storage"});if (permission.status === "granted") { persisted = await navigator.storage.persist(); if (persisted) { return "persisted"; } else { throw new Error("Failed to persist"); } } if (permission.status === "prompt") { return "prompt"; } return "never";}var init_storage_persistence = async function init_storage_persistence() { console.log("persisting data"); const persist = await try_persist_without_promting_user(); switch (persist) { case "never": return "Not possible to persist storage"; case "persisted": return "Successfully persisted storage silently"; case "prompt": return "Not persisted, but we may prompt user when we want to."; }}module.exports = { export_database, import_database, init_storage_persistence}
  • Import daxie-export-import , stream-to-blob , fs packages.
  • Export database: here we first create a blob from dexie database, then we convert blob into readable text(json), lastly write that text in a file.
  • Import database: read database file then convert it in a blob, lastly import blob into database
  • Both import export work in dexie database format of json.
  • Persist database: electron delete indexedDB database once it exceeds certain quota, to avoid that we persist database.
  • init_storage_persistence try to persist database print result on console.
  • Lastly export all functions to be accessed by other scripts.

Export and Persist database created in previous steps

  1. Since we have written code to import, export and persist database, we can use those function in renderer.js
var importExportDB = require("./importExportDatabase");importExportDB.export_database();var isPersisted = importExportDB.init_storage_persistence();
isPersisted.then(function(value){
console.log(value);
});
  • Export database
  • Try to persist database and return result on console
npm start

Project Code

You can access complete code of this project here.

Support my work

Buy Me A Coffee

Additional Possibilities

  • We can call import, export function in another web worker if our database is too big.
  • dexie can be used to design more complex database so please checkout their documentation.
  • You can ask any question in comments or find me on twitter.

Full Stack Developer and Concept Designer (https://atiqgauri.github.io/)

Get the Medium app

A button that says 'Download on the App Store', and if clicked it will lead you to the iOS App store
A button that says 'Get it on, Google Play', and if clicked it will lead you to the Google Play store