diff --git a/.circleci/config.yml b/.circleci/config.yml index efa2358b..1991f346 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,8 +9,8 @@ install_dependency: &install_dependency sudo apt update sudo apt install python3-pip sudo pip3 install awscli --upgrade - sudo pip3 install docker==6.1.3 - sudo pip3 install docker-compose + # sudo pip3 install docker==6.1.3 + # sudo pip3 install docker-compose install_deploysuite: &install_deploysuite name: Installation of install_deploysuite. diff --git a/README.md b/README.md index ee92fadc..3e01a425 100755 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Dev: [![CircleCI](https://circleci.com/gh/topcoder-platform/submissions-api/tree ## Related repos -- [ES Processor](https://github.com/topcoder-platform/submission-processor-es) - Updates data in ElasticSearch +- [ES Processor](https://github.com/topcoder-platform/submission-processor-es) - Updates data in Opensearch ## Pre-requisites @@ -179,8 +179,8 @@ npm run start ``` This command will do 2 things: - - Import the data to the database and index it to ElasticSearch - - Note, to migrate the existing data from DynamoDB to ES, run the following script + - Import the data to the database and index it to OpenSearch + - Note, to migrate the existing data from DynamoDB to OS, run the following script ``` npm run db-to-es ``` diff --git a/build.sh b/build.sh index bc81843a..0b1cc2d5 100755 --- a/build.sh +++ b/build.sh @@ -10,5 +10,20 @@ set -eo pipefail #sed -i='' "s|submissions-api:latest|$TAG|" docker/docker-compose.yml echo "" > docker/api.env docker-compose -f docker/docker-compose.yml build submissions-api -docker images +#docker images +docker create --name app submissions-api:latest +if [ -d node_modules ] +then + mv package-lock.json old-package-lock.json + docker cp app:/submissions-api/package-lock.json package-lock.json + set +eo pipefail + UPDATE_CACHE=$(cmp package-lock.json old-package-lock.json) + set -eo pipefail +else + UPDATE_CACHE=1 +fi +if [ "$UPDATE_CACHE" == 1 ] +then + docker cp app:/submissions-api/node_modules . +fi diff --git a/config/default.js b/config/default.js index 967e4b03..1892f7b3 100755 --- a/config/default.js +++ b/config/default.js @@ -29,11 +29,10 @@ module.exports = { TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME, AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID, AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET, - esConfig: { - HOST: process.env.ES_HOST || 'localhost:9200', - API_VERSION: process.env.ES_API_VERSION || '6.3', - ES_INDEX: process.env.ES_INDEX || 'submission', - ES_TYPE: process.env.ES_TYPE || '_doc' // ES 6.x accepts only 1 Type per index and it's mandatory to define it + osConfig: { + HOST: process.env.OS_HOST || 'localhost:9200', + OS_INDEX: process.env.OS_INDEX || 'submission-api', + OS_TYPE: process.env.OS_TYPE || '_doc' // ES 6.x accepts only 1 Type per index and it's mandatory to define it }, PAGE_SIZE: process.env.PAGE_SIZE || 20, MAX_PAGE_SIZE: parseInt(process.env.MAX_PAGE_SIZE) || 100, diff --git a/config/test.js b/config/test.js index 01ca34a8..86f6895c 100644 --- a/config/test.js +++ b/config/test.js @@ -19,9 +19,9 @@ module.exports = { BUSAPI_EVENTS_URL: 'https://api.topcoder-dev.com/v5/bus/events', BUSAPI_URL: 'https://api.topcoder-dev.com/v5', CHALLENGEAPI_V5_URL: 'https://api.topcoder-dev.com/v5/challenges', - esConfig: { - ES_INDEX: process.env.ES_INDEX_TEST || 'submission-test', - ES_TYPE: process.env.ES_TYPE_TEST || '_doc' // ES 6.x accepts only 1 Type per index and it's mandatory to define it + osConfig: { + OS_INDEX: process.env.OS_INDEX_TEST || 'submission-test', + OS_TYPE: process.env.OS_TYPE_TEST || '_doc' // ES 6.x accepts only 1 Type per index and it's mandatory to define it }, AUTH0_URL: process.env.AUTH0_URL, // Auth0 credentials for Submission Service AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE, diff --git a/package-lock.json b/package-lock.json index 580dfc43..65342f1c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -287,20 +287,6 @@ "kuler": "^2.0.0" } }, - "@elastic/elasticsearch": { - "version": "6.8.8", - "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-6.8.8.tgz", - "integrity": "sha512-51Jp3ZZ0oPqYPNlPG58XJ773MqJBx91rGNWCgVvy2UtxjxHsExAJooesOyLcoADnW0Dhyxu6yB8tziHnmyl8Vw==", - "requires": { - "debug": "^4.1.1", - "decompress-response": "^4.2.0", - "into-stream": "^5.1.0", - "ms": "^2.1.1", - "once": "^1.4.0", - "pump": "^3.0.0", - "secure-json-parse": "^2.1.0" - } - }, "@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", @@ -401,6 +387,19 @@ "fastq": "^1.6.0" } }, + "@opensearch-project/opensearch": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/@opensearch-project/opensearch/-/opensearch-2.11.0.tgz", + "integrity": "sha512-G+SZwtWRDv90IrtTSNnCt0MQjHVyqrcIXcpwN68vjHnfbun2+RHn+ux4K7dnG+s/KwWzVKIpPFoRjg2gfFX0Mw==", + "requires": { + "aws4": "^1.11.0", + "debug": "^4.3.1", + "hpagent": "^1.2.0", + "json11": "^1.1.2", + "ms": "^2.1.3", + "secure-json-parse": "^2.4.0" + } + }, "@sideway/address": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz", @@ -1976,14 +1975,6 @@ "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", "dev": true }, - "decompress-response": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz", - "integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==", - "requires": { - "mimic-response": "^2.0.0" - } - }, "deep-eql": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", @@ -3063,15 +3054,6 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, - "from2": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", - "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", - "requires": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.0" - } - }, "fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", @@ -3421,6 +3403,11 @@ "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", "dev": true }, + "hpagent": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-1.2.0.tgz", + "integrity": "sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==" + }, "http-errors": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", @@ -3627,15 +3614,6 @@ "side-channel": "^1.0.4" } }, - "into-stream": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-5.1.1.tgz", - "integrity": "sha512-krrAJ7McQxGGmvaYbB7Q1mcA+cRwg9Ij2RfWIeVesNBgVDZmzY/Fa4IpZUT3bmdRzMzdf/mzltCG2Dq99IZGBA==", - "requires": { - "from2": "^2.3.0", - "p-is-promise": "^3.0.0" - } - }, "invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", @@ -4182,6 +4160,11 @@ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==" }, + "json11": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/json11/-/json11-1.1.2.tgz", + "integrity": "sha512-5r1RHT1/Gr/jsI/XZZj/P6F11BKM8xvTaftRuiLkQI9Z2PFDukM82Ysxw8yDszb3NJP/NKnRlSGmhUdG99rlBw==" + }, "json5": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", @@ -4558,11 +4541,6 @@ "mime-db": "1.52.0" } }, - "mimic-response": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz", - "integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==" - }, "minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -9278,11 +9256,6 @@ "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", "dev": true }, - "p-is-promise": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-3.0.0.tgz", - "integrity": "sha512-Wo8VsW4IRQSKVXsJCn7TomUaVtyfjVDn3nUP7kE967BQk0CwFpdbZs0X0uk5sW9mkBa9eNM7hCMaG93WUAwxYQ==" - }, "p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -9555,6 +9528,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" diff --git a/package.json b/package.json index 75f78a5a..72035cdf 100755 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ "local:init": "npm run init-db && npm run init-es" }, "dependencies": { - "@elastic/elasticsearch": "^6.8.8", + "@opensearch-project/opensearch": "^2.11.0", "amazon-s3-uri": "0.1.1", "aws-sdk": "^2.265.1", "axios": "^1.4.0", diff --git a/scripts/ESloadHelper.js b/scripts/ESloadHelper.js index d7e73830..91cf53bd 100644 --- a/scripts/ESloadHelper.js +++ b/scripts/ESloadHelper.js @@ -12,107 +12,102 @@ const submissions = require('./data/Submissions.json') const reviews = require('./data/Reviews.json') const reviewSummations = require('./data/ReviewSummations.json') -const esClient = helper.getEsClient() +const osClient = helper.getOsClient() /* - * Delete all data from ES + * Delete all data from OS */ -function deleteDatafromES () { - logger.info('Clear data from ES if any') +function deleteDatafromOS () { + logger.info('Clear data from OS if any') const filter = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + index: config.get('osConfig.OS_INDEX'), q: '*' } - return esClient.deleteByQuery(filter) + return osClient.deleteByQuery(filter) } /* - * Load Review Types from JSON into ES + * Load Review Types from JSON into OS */ async function loadReviewTypes () { const promises = [] reviewTypes.forEach((reviewType) => { const record = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + index: config.get('osConfig.OS_INDEX'), id: reviewType.id, body: _.extend({ resource: 'reviewType' }, reviewType) } - promises.push(esClient.create(record)) + promises.push(osClient.index(record)) }) await Promise.all(promises) } /* - * Load Submissions from JSON into ES + * Load Submissions from JSON into OS */ async function loadSubmissions () { const promises = [] submissions.forEach((submission) => { const record = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + index: config.get('osConfig.OS_INDEX'), id: submission.id, body: _.extend({ resource: 'submission' }, submission) } - promises.push(esClient.create(record)) + promises.push(osClient.index(record)) }) await Promise.all(promises) } /* - * Load Reviews from JSON into ES + * Load Reviews from JSON into OS */ async function loadReviews () { const promises = [] reviews.forEach((review) => { const record = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + index: config.get('osConfig.OS_INDEX'), id: review.id, body: _.extend({ resource: 'review' }, review) } - promises.push(esClient.create(record)) + promises.push(osClient.index(record)) }) await Promise.all(promises) } /* - * Load Review Summations from JSON into ES + * Load Review Summations from JSON into OS */ async function loadReviewSummations () { const promises = [] reviewSummations.forEach((reviewSummation) => { const record = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + index: config.get('osConfig.OS_INDEX'), id: reviewSummation.id, body: _.extend({ resource: 'reviewSummation' }, reviewSummation) } - promises.push(esClient.create(record)) + promises.push(osClient.index(record)) }) await Promise.all(promises) } /* - * Load data into ES after removing existing data + * Load data into OS after removing existing data */ -async function loadES () { - await deleteDatafromES() - logger.info('ES Loading started!') +async function loadOS () { + await deleteDatafromOS() + logger.info('OS Loading started!') await loadReviewTypes() await loadSubmissions() await loadReviews() await loadReviewSummations() - logger.info('ES Loading succeeded!') + logger.info('OS Loading succeeded!') } module.exports = { - deleteDatafromES, + deleteDatafromOS, loadReviewTypes, loadSubmissions, loadReviews, loadReviewSummations, - loadES + loadOS } diff --git a/scripts/createIndex.js b/scripts/createIndex.js index 9fd04cfb..cbea146c 100644 --- a/scripts/createIndex.js +++ b/scripts/createIndex.js @@ -1,5 +1,5 @@ /** - * Create index in Elasticsearch + * Create index in Opensearch */ const config = require('config') @@ -8,9 +8,9 @@ const helper = require('../src/common/helper') async function createIndex () { logger.info('ES Index creation started!') - const esClient = helper.getEsClient() + const osClient = helper.getOsClient() const body = { mappings: {} } - body.mappings[config.get('esConfig.ES_TYPE')] = { + body.mappings[config.get('osConfig.ES_TYPE')] = { // keyword fields will do exact match // text field will be analyzed // fields not specified below will be 'text' by default @@ -39,11 +39,11 @@ async function createIndex () { reviewSummation: { type: 'nested' } } } - await esClient.indices.create({ - index: config.get('esConfig.ES_INDEX'), + await osClient.indices.create({ + index: config.get('osConfig.OS_INDEX'), body }) - logger.info('ES Index creation succeeded!') + logger.info('OS Index creation succeeded!') process.exit(0) } diff --git a/scripts/deleteIndex.js b/scripts/deleteIndex.js index ec7c67e4..d4e02398 100644 --- a/scripts/deleteIndex.js +++ b/scripts/deleteIndex.js @@ -1,5 +1,5 @@ /** - * Delete index in Elasticsearch + * Delete index in Opensearch */ const config = require('config') @@ -7,12 +7,12 @@ const logger = require('../src/common/logger') const helper = require('../src/common/helper') async function deleteIndex () { - logger.info('ES Index deletion started!') - const esClient = helper.getEsClient() - await esClient.indices.delete({ - index: config.get('esConfig.ES_INDEX') + logger.info('OS Index deletion started!') + const osClient = helper.getOsClient() + await osClient.indices.delete({ + index: config.get('osConfig.OS_INDEX') }) - logger.info('ES Index deletion succeeded!') + logger.info('OS Index deletion succeeded!') process.exit(0) } diff --git a/scripts/migrateFromDBToES.js b/scripts/migrateFromDBToES.js index f27055c4..c6e5bcaf 100644 --- a/scripts/migrateFromDBToES.js +++ b/scripts/migrateFromDBToES.js @@ -8,10 +8,10 @@ const logger = require('../src/common/logger') const dbhelper = require('../src/common/dbhelper') const helper = require('../src/common/helper') -const esClient = helper.getEsClient() +const osClient = helper.getOsClient() /* - * Migrate records from DB to ES + * Migrate records from DB to OS * @param tableName {String} DynamoDB table name * @param customFunction {Function} custom function to handle record * @returns {Promise} @@ -38,11 +38,10 @@ async function migrateRecords (tableName, customFunction) { // data body.push(_.extend({ resource: helper.camelize(tableName) }, item)) - if (i % config.ES_BATCH_SIZE === 0) { + if (i % config.OS_BATCH_SIZE === 0) { logger.debug(`${tableName} - Processing batch # ` + batchCounter) - await esClient.bulk({ - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + await osClient.bulk({ + index: config.get('osConfig.OS_INDEX'), body }) body = [] @@ -57,9 +56,8 @@ async function migrateRecords (tableName, customFunction) { } else { if (body.length > 0) { logger.debug(`${tableName} - Final batch processing...`) - await esClient.bulk({ - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + await osClient.bulk({ + index: config.get('osConfig.OS_INDEX'), body }) } diff --git a/scripts/updateToV5ChallengeIdSmallChunk.js b/scripts/updateToV5ChallengeIdSmallChunk.js index 3da45872..0902c906 100644 --- a/scripts/updateToV5ChallengeIdSmallChunk.js +++ b/scripts/updateToV5ChallengeIdSmallChunk.js @@ -8,7 +8,7 @@ const logger = require('../src/common/logger') const dbhelper = require('../src/common/dbhelper') const helper = require('../src/common/helper') -const esClient = helper.getEsClient() +const osClient = helper.getOsClient() /** * Update Submission's challenge id to v5 @@ -46,13 +46,12 @@ async function updateRecord (submission, failedContainer) { await dbhelper.updateRecord(record) try { - const response = await esClient.update({ - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), + const response = await osClient.update({ + index: config.get('osConfig.OS_INDEX'), id: submission.id, body: { doc: { challengeId: v5challengeId, legacyChallengeId: submission.challengeId } } }) - logger.info(`updated ES for submission ${submission.id}, response: ${JSON.stringify(response)}`) + logger.info(`updated OS for submission ${submission.id}, response: ${JSON.stringify(response)}`) } catch (error) { logger.error(error.message) } diff --git a/src/common/helper.js b/src/common/helper.js index 17c69b70..e407f0f5 100755 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -7,7 +7,7 @@ const _ = require('lodash') const AWS = require('aws-sdk') const AmazonS3URI = require('amazon-s3-uri') const config = require('config') -const elasticsearch = require('@elastic/elasticsearch') +const opensearch = require('@opensearch-project/opensearch') const logger = require('./logger') const busApi = require('tc-bus-api-wrapper') const errors = require('common-errors') @@ -17,8 +17,8 @@ const { axiosInstance } = require('./axiosInstance') AWS.config.region = config.get('aws.AWS_REGION') const s3 = new AWS.S3() -// ES Client mapping -const esClients = {} +// OS Client mapping +const osClients = {} const REVIEW_TYPES_KEY = 'ReviewTypes' @@ -79,13 +79,13 @@ function getBusApiClient () { /** * Get ES Client - * @return {Object} Elastic Host Client Instance + * @return {Object} Open search Host Client Instance */ -function getEsClient () { - if (!esClients.client) { - esClients.client = new elasticsearch.Client({ node: config.get('esConfig.HOST') }) +function getOsClient () { + if (!osClients.client) { + osClients.client = new opensearch.Client({ node: config.get('osConfig.HOST') }) } - return esClients.client + return osClients.client } /* @@ -136,12 +136,12 @@ async function getReviewTypeId (scorecardName) { } /** - * Parse the Query filters and prepare ES filter + * Parse the Query filters and prepare OS filter * @param {Object} query Query filters passed in HTTP request - * @param {String} actResource Resource name in ES - * @return {Object} search request body that can be passed to ES + * @param {String} actResource Resource name in OS + * @return {Object} search request body that can be passed to OS */ -function prepESFilter (query, actResource) { +function prepOSFilter (query, actResource) { const pageSize = query.perPage || config.get('PAGE_SIZE') const page = query.page || 1 const { sortBy, orderBy } = query @@ -195,18 +195,14 @@ function prepESFilter (query, actResource) { } const searchCriteria = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), size: pageSize, - from: (page - 1) * pageSize, // Es Index starts from 0 - body: { - _source: { - exclude: ['resource'] // Remove the resource field which is not required - }, - query: { - bool: { - filter: boolQuery - } + from: (page - 1) * pageSize, // OS Index starts from 0 + _source: { + exclude: ['resource'] // Remove the resource field which is not required + }, + query: { + bool: { + filter: boolQuery } } } @@ -227,7 +223,7 @@ function prepESFilter (query, actResource) { } if (esQuerySortArray.length > 0) { - searchCriteria.body.sort = esQuerySortArray + searchCriteria.sort = esQuerySortArray } return searchCriteria @@ -240,12 +236,16 @@ function prepESFilter (query, actResource) { * @return {Promise} Data fetched from ES based on the filters */ async function fetchFromES (query, resource) { - const esClient = getEsClient() - // Construct ES filter - const filter = prepESFilter(query, resource) + const osClient = getOsClient() + // Construct OS filter + const filter = prepOSFilter(query, resource) // Search with constructed filter - logger.debug(`The elasticsearch query is ${JSON.stringify(filter)}`) - const docs = await esClient.search(filter) + logger.debug(`The opensearch query is ${JSON.stringify(filter)}`) + const docs = await osClient.search({ + index: config.get('osConfig.OS_INDEX'), + body: filter +}) + // Extract data from hits const rows = _.map(docs.body.hits.hits, single => single._source) @@ -916,7 +916,7 @@ function flushInternalCache () { module.exports = { wrapExpress, autoWrapExpress, - getEsClient, + getOsClient, fetchFromES, camelize, setPaginationHeaders, diff --git a/src/services/HealthCheckService.js b/src/services/HealthCheckService.js index 5ad9978b..ccd578d1 100644 --- a/src/services/HealthCheckService.js +++ b/src/services/HealthCheckService.js @@ -6,18 +6,16 @@ const errors = require('common-errors') const helper = require('../common/helper') /** - * Check if the elasticsearch connection is active + * Check if the opensearch connection is active */ async function check () { - const esClient = helper.getEsClient() + const osClient = helper.getOsClient() try { - await esClient.ping({}, { - requestTimeout: 30000 - }) + await osClient.ping() } catch (e) { - throw new errors.HttpStatusError(503, 'Elasticsearch instance cannot be reached') + throw new errors.HttpStatusError(503, 'Opensearch instance cannot be reached') } return { diff --git a/src/services/SubmissionService.js b/src/services/SubmissionService.js index 5635a94b..b5625640 100755 --- a/src/services/SubmissionService.js +++ b/src/services/SubmissionService.js @@ -741,32 +741,31 @@ deleteSubmission.schema = joi.object({ async function countSubmissions (challengeId) { logger.debug(`countSubmissions ${challengeId}`) const esQuery = { - index: config.get('esConfig.ES_INDEX'), - type: config.get('esConfig.ES_TYPE'), size: 0, - body: { - query: { - bool: { - must: [ - { term: { resource: 'submission' } }, - { term: { challengeId } } - ] - } - }, - aggs: { - group_by_type: { - terms: { - field: 'type' - } + query: { + bool: { + must: [ + { term: { resource: 'submission' } }, + { term: { challengeId } } + ] + } + }, + aggs: { + group_by_type: { + terms: { + field: 'type' } } } } - const esClient = helper.getEsClient() + const osClient = helper.getOsClient() let result try { - result = await esClient.search(esQuery) + result = await osClient.search({ + index: config.get('osConfig.OS_INDEX'), + body: esQuery + }) } catch (err) { logger.error(`Get Submission Count Error ${JSON.stringify(err)}`) throw err diff --git a/test/e2e/prepare.js b/test/e2e/prepare.js index fb23e5ec..587c3813 100644 --- a/test/e2e/prepare.js +++ b/test/e2e/prepare.js @@ -1,15 +1,15 @@ /* - * Setting up ES for tests + * Setting up OS for tests */ // During the test the env variable is set to test process.env.NODE_ENV = 'test' const prepare = require('mocha-prepare') -const { deleteDatafromES } = require('../../scripts/ESloadHelper') +const { deleteDatafromOS } = require('../../scripts/ESloadHelper') prepare(function (done) { - deleteDatafromES().then((data) => { + deleteDatafromOS().then((data) => { done() }) }) diff --git a/test/unit/prepare.js b/test/unit/prepare.js index b6227853..59e14286 100644 --- a/test/unit/prepare.js +++ b/test/unit/prepare.js @@ -112,31 +112,31 @@ prepare(function (done) { .reply(200, testData.testChallengeResources) .get(resourceRolesApi.path) .reply(200, testData.testResourceRoles) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'reviewType') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'reviewType') .query(true) .reply(200, testData.testReviewTypesES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'submission') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'submission') .query(true) .reply(200, testData.testSubmissionsES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'review') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'review') .query(true) .reply(200, testData.testReviewsES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'reviewSummation') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'reviewSummation') .query(true) .reply(200, testData.testReviewSummationsES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'nonExistent') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'nonExistent') .query(true) .reply(200, { hits: { total: 0, hits: [] } }) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'c56a4180-65aa-42ec-a945-5fd21dec0501') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'c56a4180-65aa-42ec-a945-5fd21dec0501') .query(true) .reply(200, testData.testReviewTypeES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'a12a4180-65aa-42ec-a945-5fd21dec0501') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'a12a4180-65aa-42ec-a945-5fd21dec0501') .query(true) .reply(200, testData.testSubmissionES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'd24d4180-65aa-42ec-a945-5fd21dec0502') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'd24d4180-65aa-42ec-a945-5fd21dec0502') .query(true) .reply(200, testData.testReviewES) - .post(`/${config.esConfig.ES_INDEX}/${config.esConfig.ES_TYPE}/_search`, 'e45e4180-65aa-42ec-a945-5fd21dec1504') + .post(`/${config.osConfig.OS_INDEX}/${config.osConfig.OS_TYPE}/_search`, 'e45e4180-65aa-42ec-a945-5fd21dec1504') .query(true) .reply(200, testData.testReviewSummationES)