diff --git a/.gitignore b/.gitignore index 039b106..dfc897d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,9 @@ # Settings file (contains API token) shared.js +# Exported models folder +/backup/ + # Dependency directories node_modules diff --git a/README.md b/README.md index daa7c6e..baf8fe2 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,26 @@ node change-connection.js 1. Updates the table's `id` property according to new CSV file name 1. Builds the datamodel +### Demo #3: Exporting Datamodel Schemas + +**Available on: Sisense Linux L8.0.3.150 or later** + +This demo script downloads all available Datamodel schemas, without data, as `.smodel` files, which can be imported to Sisense via UI or API, into a folder called `backup`. + +**To Run:** + +``` +node export-all.js +``` + +**What it does** + +1. Creates a folder called `backup` if one doesn't already exist +1. Gets the `OID` and `title` of all available Datamodel entities +1. For each Datamodel found: + 1. Exports the Datamodel + 1. Stores the response JSON as `.smodel` + ## Extending this demo This demo uses Node.js with a minimal set of dependencies, listed below. @@ -118,6 +138,7 @@ Additionally, this project lists `eslint` and several plugins for it as a DevDep ├── upload.js --> Shared library for uploading CSV/XLSX files to Sisense ├── demo.js --> Demo #1 - Create datamodel from scratch ├── change-connection.js --> Demo #2 - Change connection of a dataset +├── export-all.js --> Demo #3 - Export all Datamodel schemas └── assets/ ├── demo.csv --> Sample data ├── demo2.csv --> Sample data diff --git a/export-all.js b/export-all.js new file mode 100644 index 0000000..746401c --- /dev/null +++ b/export-all.js @@ -0,0 +1,82 @@ +/** + * Sisense Datamodels API Demo - #3 Export All Schemas + * Written by Moti Granovsky, Sisense DevX, March 2020 + * + * Exports all existing Datamodel schemas as .smodel files + * + * **Available on: Sisense Linux L8.0.3.150 or later** + */ + +const fs = require('fs'); +const { token, baseUrl } = require('./shared.js'); +const client = new (require('./client.js'))(token, baseUrl); + +const TARGET_FOLDER = './backup'; + +/** + * Main script flow + */ +async function main() { + + /** + * Step 1: Create a folder to export to + */ + + console.log(`01. Create folder ${TARGET_FOLDER} if it didn't exist: starting`); + + if (!fs.existsSync(TARGET_FOLDER)) { + fs.mkdirSync(TARGET_FOLDER); + } + + console.log(`01. Create folder ${TARGET_FOLDER} if it didn't exist: done`); + + /** + * Step 2: Get a list of all datamodel OIDs and titles + */ + + console.log('02. Getting all datamodels: starting'); + + const datamodels = await client.get('datamodels/schema', { fields: 'oid, title' }); + + console.log(`02. Getting all datamodels: done | found ${datamodels.length} models`); + + /** + * Step 3: Iterate over all models and export each one + */ + + console.log('03. Exporting all datamodels: starting'); + + for (const item of datamodels) { + await exportAndSave(item); + } + + console.log('03. Exporting all datamodels: done'); + + return true; +} + +/** + * Exports a datamodel using the API and saves it to the target folder + * @param {object} datamodel Object containing a datamodel `oid` and `title` + */ +async function exportAndSave(datamodel) { + + console.log(`--> Exporting datamodel "${datamodel.title}"`); + + const schema = await client.get(`datamodel-exports/schema`, { datamodelId: datamodel.oid, type: 'schema-latest' }); + + console.log(` > Saving datamodel "${datamodel.title}" as "${TARGET_FOLDER}/${datamodel.title}.smodel"`); + + fs.writeFileSync(`${TARGET_FOLDER}/${datamodel.title}.smodel`, JSON.stringify(schema), { encoding: 'utf8' }); + + console.log(` > Datamodel "${datamodel.title}" downloaded`); + + return true; +} + +process.on('unhandledRejection', (err) => { + console.error(err); + process.exit(1); +}); + +main().then(console.log).catch(console.error);