Skip to content
This repository was archived by the owner on Mar 13, 2025. It is now read-only.

Updates to the winning submission for enrich processor #54

Merged
merged 3 commits into from
Oct 21, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Working db dump script and misc
  • Loading branch information
callmekatootie committed Oct 21, 2020
commit 7f7a6cb007221349fcd110de57e9f0fc35481240
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ Configuration for the application is at config/default.js and config/production.
- UBAHN_AGGREGATE_TOPIC: Kafka topic that is used to combine all create, update and delete message(s)
- ES.HOST: Elasticsearch host
- ES.DOCUMENTS: Elasticsearch index, type and id mapping for resources.
- ATTRIBUTE_GROUP_PIPELINE_ID: The pipeline id for enrichment with attribute group. Default is `attributegroup-pipeline`
- SKILL_PROVIDER_PIPELINE_ID: The pipeline id for enrichment with skill provider. Default is `skillprovider-pipeline`
- USER_PIPELINE_ID: The pipeline id for enrichment of user details. Default is `user-pipeline`

For `ES.DOCUMENTS` configuration, you will find multiple other configurations below it. Each has default values that you can override using the environment variables

Expand Down
10 changes: 6 additions & 4 deletions config/default.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ module.exports = {
// ElasticSearch
ES: {
HOST: process.env.ES_HOST || 'http://localhost:9200',
ENRICH_USER_PIPELINE_NAME: process.env.ENRICH_USER_PIPELINE_NAME || 'enrich_user',
// es mapping: _index, _type, _id
DOCUMENTS: {
achievementprovider: {
Expand All @@ -64,7 +63,8 @@ module.exports = {
},
attributegroup: {
index: process.env.ATTRIBUTE_GROUP_INDEX || 'attribute_group',
type: '_doc'
type: '_doc',
pipelineId: process.env.ATTRIBUTE_GROUP_PIPELINE_ID || 'attributegroup-pipeline'
},
organization: {
index: process.env.ORGANIZATION_INDEX || 'organization',
Expand All @@ -80,11 +80,13 @@ module.exports = {
},
skillprovider: {
index: process.env.SKILL_PROVIDER_INDEX || 'skill_provider',
type: '_doc'
type: '_doc',
pipelineId: process.env.SKILL_PROVIDER_PIPELINE_ID || 'skillprovider-pipeline'
},
user: {
index: process.env.USER_INDEX || 'user',
type: '_doc'
type: '_doc',
pipelineId: process.env.USER_PIPELINE_ID || 'user-pipeline'
},
// sub resources under user
achievement: {
Expand Down
12 changes: 6 additions & 6 deletions scripts/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const topResources = {
enrichFields: ['id', 'name', 'created', 'updated', 'createdBy', 'updatedBy']
},
pipeline: {
id: 'skillprovider-pipeline',
id: config.get('ES.DOCUMENTS.skillprovider.pipelineId'),
field: 'skillProviderId',
targetField: 'skillprovider',
maxMatches: '1'
Expand Down Expand Up @@ -51,7 +51,7 @@ const topResources = {
enrichFields: ['id', 'name', 'organizationId', 'created', 'updated', 'createdBy', 'updatedBy']
},
pipeline: {
id: 'attributegroup-pipeline',
id: config.get('ES.DOCUMENTS.attributegroup.pipelineId'),
field: 'attributeGroupId',
targetField: 'attributegroup',
maxMatches: '1'
Expand All @@ -68,7 +68,7 @@ const topResources = {
},
ingest: {
pipeline: {
id: 'skillprovider-pipeline'
id: config.get('ES.DOCUMENTS.skillprovider.pipelineId')
}
}
},
Expand All @@ -83,21 +83,21 @@ const topResources = {
},
ingest: {
pipeline: {
id: 'attributegroup-pipeline'
id: config.get('ES.DOCUMENTS.attributegroup.pipelineId')
}
}
},

organization: {
index: config.get('ES.DOCUMENTS.organization.index'),
type: config.get('ES.DOCUMENTS.organization.type'),
type: config.get('ES.DOCUMENTS.organization.type')
},

user: {
index: config.get('ES.DOCUMENTS.user.index'),
type: config.get('ES.DOCUMENTS.user.type'),
pipeline: {
id: 'user-pipeline',
id: config.get('ES.DOCUMENTS.user.pipelineId'),
processors: [
{
referenceField: config.get('ES.DOCUMENTS.achievement.userField'),
Expand Down
41 changes: 40 additions & 1 deletion scripts/db/dropAll.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,63 @@
const _ = require('lodash')
const models = require('../../src/models')
const logger = require('../../src/common/logger')
const { topResources, modelToESIndexMapping } = require('../constants')
const {
topResources,
organizationResources,
modelToESIndexMapping
} = require('../constants')
const { getESClient } = require('../../src/common/es-client')

async function main () {
const client = getESClient()

try {
logger.info('Deleting all pipelines...')
await client.ingest.deletePipeline({
id: topResources.user.pipeline.id
})
await client.ingest.deletePipeline({
id: topResources.skillprovider.pipeline.id
})
await client.ingest.deletePipeline({
id: topResources.attributegroup.pipeline.id
})
logger.info('Successfully deleted')
} catch (e) {
console.error(e)
logger.warn('Delete all ingest pipelines failed')
}

const keys = Object.keys(models)
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
if (models[key].tableName) {
const esResourceName = modelToESIndexMapping[key]
try {
if (_.includes(_.keys(topResources), esResourceName)) {
if (topResources[esResourceName].enrich) {
logger.info(`Deleting enrich policy for ${esResourceName}`)
await client.enrich.deletePolicy({
name: topResources[esResourceName].enrich.policyName
})
logger.info(`Successfully deleted enrich policy for ${esResourceName}`)
}
logger.info(`Deleting index for ${esResourceName}`)
await client.indices.delete({
index: topResources[esResourceName].index
})
logger.info(`Successfully deleted enrich policy for ${esResourceName}`)
} else if (_.includes(_.keys(organizationResources), esResourceName)) {
logger.info('Deleting enrich policy for organization')
await client.enrich.deletePolicy({
name: organizationResources[esResourceName].enrich.policyName
})
logger.info('Successfully deleted enrich policy for organization')
}

logger.info(`Deleting data in QLDB for ${esResourceName}`)
await models.DBHelper.clear(models[key])
logger.info(`Successfully deleted data in QLDB for ${esResourceName}`)
} catch (e) {
console.error(e)
logger.warn(`drop table ${key} failed`)
Expand Down
Loading