Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 32 additions & 10 deletions Controllers/dataset.controllers.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,18 @@ const cache = require('../Components/cache');
const config = require('../Config');
const path = require('path');
const { Parser } = require('json2csv');
const { datasetFields } = require('../Utils/datasetFields');
const { DATASET_DEFAULT_SORT_FIELD, datasetFields } = require('../Utils/datasetFields');
const { getObjectParam, getStringParam } = require('../Utils/params');
const datasetService = require('../Services/dataset.service');

const search = async (req, res) => {
const body = req.body;
const body = getObjectParam(req, 'body');
const data = {};
const filters = body.filters ?? {};
const filters = getObjectParam(body, 'filters');
const options = {};
const pageInfo = body.pageInfo ?? {page: 1, pageSize: 10};
const searchText = body.search_text?.trim() ?? '';
const sort = body.sort ?? {k: 'dbGaP_phs', v: 'asc'};
const pageInfo = getObjectParam(body, 'pageInfo', {page: 1, pageSize: 10});
const searchText = getStringParam(body, 'search_text');
const sort = getObjectParam(body, 'sort', {k: DATASET_DEFAULT_SORT_FIELD, v: 'asc'});

if (pageInfo.page !== parseInt(pageInfo.page, 10) || pageInfo.page <= 0) {
pageInfo.page = 1;
Expand All @@ -37,7 +38,7 @@ const search = async (req, res) => {
// sort.name = "Resource";
// sort.k = "data_resource_id";
// }
if (!(sort.v && ['asc', 'desc'].includes(sort.v))) {
if (!(sort?.v && ['asc', 'desc'].includes(sort.v))) {
sort.v = 'asc';
}

Expand All @@ -48,6 +49,17 @@ const search = async (req, res) => {

const searchResult = await datasetService.search(searchText, filters, options);

// Error response if there's an error
if (searchResult.error) {
res.status(500).json({
status: "error",
aggs: 'all',
data: {},
error: searchResult.error,
});
return;
}

if (searchResult.total !== 0 && (options.pageInfo.page - 1) * options.pageInfo.pageSize >= searchResult.total) {
let lastPage = Math.ceil(searchResult.total / options.pageInfo.pageSize);
options.pageInfo.page = lastPage;
Expand Down Expand Up @@ -119,12 +131,22 @@ const getById = async (req, res) => {
};

const getFilters = async (req, res) => {
const body = req.body;
const searchText = body.search_text?.trim() ?? '';
const searchFilters = body.filters ?? {};
const body = getObjectParam(req, 'body');
const searchText = getStringParam(body, 'search_text');
const searchFilters = getObjectParam(body, 'filters');

const filters = await datasetService.getFilters(searchText, searchFilters);

// Error response if there's an error
if (filters.error) {
res.status(500).json({
status: "error",
data: {},
error: filters.error,
});
return;
}

res.json({status: 'success', data: filters});
};

Expand Down
40 changes: 37 additions & 3 deletions Services/dataset.service.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
const config = require('../Config');
const elasticsearch = require('../Components/elasticsearch');
const cache = require('../Components/cache');
const logger = require('../Components/logger');
const mysql = require('../Components/mysql');
const queryGenerator = require('./queryGenerator');
const cacheKeyGenerator = require('./cacheKeyGenerator');
const utils = require('../Utils');

const { DATASET_RETURN_FIELDS } = require('../Utils/datasetFields.js');
const FACET_FILTERS = [
'dataset_source_repo',
Expand All @@ -16,6 +16,7 @@ const search = async (searchText, filters, options) => {
let query = null;
let result = {};
let searchableText = null;
let searchResults;

// Check searchText type
if (searchText && typeof searchText !== 'string') {
Expand Down Expand Up @@ -60,7 +61,15 @@ const search = async (searchText, filters, options) => {
result.aggs = 'all';
}

let searchResults = await elasticsearch.searchWithAggregations(config.indexDS, query);
try {
searchResults = await elasticsearch.searchWithAggregations(config.indexDS, query);
} catch (error) {
logger.error(`Error searching datasets: ${error}`);
return {
error: error?.body?.error?.root_cause ? JSON.stringify(error.body.error.root_cause).replace(/\\n/g, '') : error.message,
};
}

let datasets = searchResults.hits.hits.map((ds) => {
if (ds.inner_hits) {
const terms = Object.keys(ds.inner_hits);
Expand Down Expand Up @@ -133,11 +142,36 @@ const getFilters = async (searchText, searchFilters) => {

filters = {};

// Check searchText type
if (searchText && typeof searchText !== 'string') {
return filters;
}

// Check filters type
if (searchFilters && (typeof searchFilters !== 'object' || Array.isArray(searchFilters))) {
return filters;
}

// Format the search text
if (searchText) {
const sanitizedSearchText = searchText.replace(/[^a-zA-Z0-9]+/g, ' '); // Ignore special characters
searchableText = utils.getSearchableText(sanitizedSearchText);
}

// Must obtain counts for each filter as if the filter were not applied
await Promise.all(FACET_FILTERS.map(async (filterName) => {
// Obtain counts from Opensearch
let filtersResponse;
const query = queryGenerator.getDatasetFiltersQuery(searchText, searchFilters, filterName);
const filtersResponse = await elasticsearch.searchWithAggregations(config.indexDS, query);

try {
filtersResponse = await elasticsearch.searchWithAggregations(config.indexDS, query);
} catch (error) {
logger.error(`Error searching datasets: ${error}`);
return {
error: error?.body?.error?.root_cause ? JSON.stringify(error.body.error.root_cause).replace(/\\n/g, '') : error.message,
};
}

// Extract counts from response
filters[filterName] = filtersResponse.aggs[filterName].buckets.map((bucket) => ({
Expand Down
32 changes: 32 additions & 0 deletions Services/dataset.service.test.fixtures.js
Original file line number Diff line number Diff line change
Expand Up @@ -180,3 +180,35 @@ export const normalOpensearchResults = {
},
aggs: undefined,
};

// Example of Opensearch response with error
export const errorOpensearchResults = {
"error": {
"root_cause": [
{
"type": "query_shard_exception",
"reason": "No mapping found for [dbGaP_phs] in order to sort on",
"index": "datasets",
"index_uuid": "JHoru6szQ8G_-NtFJd--Bg"
}
],
"type": "search_phase_execution_exception",
"reason": "all shards failed",
"phase": "query",
"grouped": true,
"failed_shards": [
{
"shard": 0,
"index": "datasets",
"node": "YzqehJMeSham0G7I2LQdmw",
"reason": {
"type": "query_shard_exception",
"reason": "No mapping found for [dbGaP_phs] in order to sort on",
"index": "datasets",
"index_uuid": "JHoru6szQ8G_-NtFJd--Bg"
}
}
]
},
"status": 400
};
12 changes: 12 additions & 0 deletions Services/dataset.service.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import {
normalOptions,
normalSearchText,
normalOpensearchResults,
errorOpensearchResults,
} from './dataset.service.test.fixtures.js';

beforeEach(() => {
Expand Down Expand Up @@ -83,4 +84,15 @@ describe('search', () => {
const resultEmpty = await datasetService.search(normalSearchText, normalFilters, {});
expect(resultNull).toEqual(resultEmpty);
});

it('should handle Opensearch error response', async () => {
const error = new Error("Test Opensearch failure");
let result;
error.body = errorOpensearchResults;
vi.spyOn(elasticsearch, "searchWithAggregations").mockRejectedValue(error);
result = await datasetService.search(normalSearchText, normalFilters, normalOptions);
expect(elasticsearch.searchWithAggregations).toHaveBeenCalled();
expect(result).toHaveProperty('error');
expect(result.error).toBeDefined();
});
});
1 change: 0 additions & 1 deletion Services/queryGenerator.js
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,6 @@ queryGenerator.getDatasetFiltersQuery = (searchText, searchFilters, excludedFiel
// Customize search query
body.aggs = {};
body.size = 0;
delete query.highlight;

// Aggregate on the target field
body.aggs[excludedField] = {
Expand Down
3 changes: 2 additions & 1 deletion Utils/datasetFields.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
const DATASET_DEFAULT_SORT_FIELD = 'dataset_title_sort';
// Maps Dataset natural field names to property names
const DATASET_SEARCH_FIELDS = [
// 'dataset_uuid',
Expand Down Expand Up @@ -93,9 +94,9 @@ const datasetFields = {
};

module.exports = {
DATASET_DEFAULT_SORT_FIELD,
DATASET_SEARCH_FIELDS,
DATASET_HIGHLIGHT_FIELDS,
DATASET_RETURN_FIELDS,
datasetFields,
};

58 changes: 58 additions & 0 deletions Utils/params.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/**
* Gets an object parameter from an object
* @param {Object} obj The object to get the parameter from
* @param {String} propName The name of the parameter to get
* @param {Object} defaultReturn The default value to return if there are problems
* @returns {Object} The parameter value
*/
const getObjectParam = (obj, propName, defaultReturn = {}) => {
let val;

if (typeof obj !== 'object' || Array.isArray(obj)) {
return defaultReturn;
}

if (!obj[propName]) {
return defaultReturn;
}

if (typeof obj[propName] !== 'object' || Array.isArray(obj[propName])) {
return defaultReturn;
}

val = obj[propName];

return val;
};

/**
* Gets a string parameter from an object
* @param {Object} obj The object to get the parameter from
* @param {String} propName The name of the parameter to get
* @param {String} defaultReturn The default value to return if there are problems
* @returns {String} The parameter value
*/
const getStringParam = (obj, propName, defaultReturn = '') => {
let val = '';

if (typeof obj !== 'object' || Array.isArray(obj)) {
return defaultReturn;
}

if (!obj[propName]) {
return defaultReturn;
}

if (typeof obj[propName] !== 'string') {
return defaultReturn;
}

val = obj[propName].trim();

return val;
};

module.exports = {
getObjectParam,
getStringParam,
};
Loading