diff --git a/Projects/Data-Mining/TS/Bot-Modules/API-Data-Fetcher-Bot/FetchingProcess.js b/Projects/Data-Mining/TS/Bot-Modules/API-Data-Fetcher-Bot/FetchingProcess.js index d44a1f72c3..6c5891695d 100644 --- a/Projects/Data-Mining/TS/Bot-Modules/API-Data-Fetcher-Bot/FetchingProcess.js +++ b/Projects/Data-Mining/TS/Bot-Modules/API-Data-Fetcher-Bot/FetchingProcess.js @@ -10,6 +10,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { let fileStorage = TS.projects.foundations.taskModules.fileStorage.newFileStorage(processIndex) let statusDependencies + let lastQueryRun // This holds the epoch value of the last run value passed to the query string. return thisObject; @@ -81,7 +82,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: "startProcess -> API Map Reference Node Missing. " }, 'API Map Reference Node Missing', TS.projects.foundations.globals.taskConstants.TASK_NODE.bot.processes[processIndex] ) @@ -93,7 +94,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: "startProcess -> API Map Reference Parent Missing. " }, 'Reference Parent Missing', TS.projects.foundations.globals.taskConstants.TASK_NODE.bot.processes[processIndex].apiMapReference ) @@ -109,7 +110,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - { missingProperty: "hostName" }, + { errorDetails: "startProcess -> API Map Config hostName property missing. ", missingProperty: "hostName" }, 'Config Property Missing', apiMap ) @@ -134,7 +135,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: "startProcess -> API Version Node Missing. " }, 'API Version Node Missing', apiMap ) @@ -144,7 +145,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: "startProcess -> Process Output Node Missing. " }, 'Process Output Node Missing', processNode ) @@ -174,6 +175,8 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { let dataReceivedArray = [] // This hold the cumulative data received from all calls to the API (multiple pages of data). let dataReceivedObject // This hold a single data object when there is no pagination. The Data object is a node within the whole response received from the API. let pageNumberParameter // This holds the node that represents a Page Number parameter. + let pageToken // This holds the current Page Token value. + let pageTokenPath // This holds the path within apiData to the Page Token. let pageQueryString = "" // This holds the node part of query sting that deals with page numbers let recordPropertiesNodePathMap = new Map() // This holds the calculated nodePath for each record property, in order to help find the property value on the data received. @@ -211,7 +214,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: "getApiEndpointAndSchema -> API Response Field Not Descendant From Endpoint. " }, 'API Response Field Not Descendant From Endpoint', apiResponseField ) @@ -233,7 +236,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: "getApiEndpointAndSchema -> More Than Two API Endpoints Detected. " }, 'More Than Two Endpoints Detected', apiResponseField ) @@ -276,6 +279,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { if (recordProperty.apiResponseFieldReference.referenceParent !== undefined) { let apiResponseField = recordProperty.apiResponseFieldReference.referenceParent let nodePath = getPath(apiResponseField) + nodePath = nodePath.replace('..','.') recordPropertiesNodePathMap.set(recordProperty.config.codeName, nodePath) function getPath(node, path) { @@ -343,12 +347,9 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { This will be treated differently since we will need to iterate to get each possible page. The page number will no be added here to the query string. */ - if (apiQueryParameter.config.isPageNumber === true) { + if (apiQueryParameter.config.isPageNumber === true || apiQueryParameter.config.isPageToken) { pageNumberParameter = apiQueryParameter } else { - /* - Whatever is in Value is the default value. - */ if (apiQueryParameter.config.codeName !== undefined && apiQueryParameter.config.value !== undefined) { parametersMap.set(apiQueryParameter.config.codeName, apiQueryParameter.config.value) } @@ -465,6 +466,16 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { parameterValue = Math.trunc((new Date()).valueOf() / SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS) * SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS break } + case '@LastQueryRunISO1601': { + if (lastQueryRun !== undefined) { + parameterValue = new Date(new Date(thisReport.file.lastQueryRun).valueOf() + SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS).toISOString() + lastQueryRun = parameterValue + } else { + parameterValue = new Date(((new Date()).valueOf()) - SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS).toISOString() + lastQueryRun = parameterValue + } + break + } case '@EndCurrentMinute': { parameterValue = Math.trunc((new Date()).valueOf() / SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS) * SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS + SA.projects.foundations.globals.timeConstants.ONE_MIN_IN_MILISECONDS - 1 break @@ -506,66 +517,133 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { of pages available to fetch, and we will also assume that once we get an empty array that will mean that we have requested already the last page with data. */ - let initialPage = 1 - let finalPage = 1 // Number.MAX_SAFE_INTEGER - if (thisReport.file.lastPage !== undefined) { - initialPage = thisReport.file.lastPage[endpointNode.config.codeName] - } - for (let page = initialPage; page <= finalPage; page++) { - if (queryString === "") { - pageQueryString = "?" + pageNumberParameter.config.codeName + "=" + page - } else { - pageQueryString = "&" + pageNumberParameter.config.codeName + "=" + page + if (pageNumberParameter.config.isPageNumber === true) { + let initialPage = 1 + let finalPage = 1 // Number.MAX_SAFE_INTEGER + if (thisReport.file.lastPage !== undefined) { + initialPage = thisReport.file.lastPage[endpointNode.config.codeName] } - lastPage[endpointNode.config.codeName] = page - - await sleep(apiMap.config.millisecondsBetweenCalls) - let fetchResult = await fetchAPIData() - switch (fetchResult) { - case 'NO_CONNECTION': { - /* - When there is not Internet Connection or the server can not be reached - we will return requesting a retry later. - */ - TS.projects.foundations.globals.loggerVariables.VARIABLES_BY_PROCESS_INDEX_MAP.get(processIndex).BOT_MAIN_LOOP_LOGGER_MODULE_OBJECT.write(MODULE_NAME, - "[WARN] start -> fetchAllPages -> Server not found or no Internet Connection. Requesting a Retry. ") - callBackFunction(TS.projects.foundations.globals.standardResponses.DEFAULT_RETRY_RESPONSE) - return 'RETRYING' - } - case 'UNEXPECTED_API_RESPONSE': { - /* - Any unexpected response will abort this loop and allow the process to continue, - possibly saving accumulated data. - */ - page = finalPage - break + for (let page = initialPage; page <= finalPage; page++) { + if (queryString === "") { + pageQueryString = "?" + pageNumberParameter.config.codeName + "=" + page + } else { + pageQueryString = "&" + pageNumberParameter.config.codeName + "=" + page } - case 'ERROR_CODE_RECEIVED': { - /* - An error code at the response will abort this loop and allow the process to continue, - possibly saving accumulated data. - */ - page = finalPage - break + + lastPage[endpointNode.config.codeName] = page + + await sleep(apiMap.config.millisecondsBetweenCalls) + let fetchResult = await fetchAPIData() + + switch (fetchResult) { + case 'NO_CONNECTION': { + /* + When there is not Internet Connection or the server can not be reached + we will return requesting a retry later. + */ + TS.projects.foundations.globals.loggerVariables.VARIABLES_BY_PROCESS_INDEX_MAP.get(processIndex).BOT_MAIN_LOOP_LOGGER_MODULE_OBJECT.write(MODULE_NAME, + "[WARN] start -> fetchAllPages -> Server not found or no Internet Connection. Requesting a Retry. ") + callBackFunction(TS.projects.foundations.globals.standardResponses.DEFAULT_RETRY_RESPONSE) + return 'RETRYING' + } + case 'UNEXPECTED_API_RESPONSE': { + /* + Any unexpected response will abort this loop and allow the process to continue, + possibly saving accumulated data. + */ + page = finalPage + break + } + case 'ERROR_CODE_RECEIVED': { + /* + An error code at the response will abort this loop and allow the process to continue, + possibly saving accumulated data. + */ + page = finalPage + break + } + case 'NO_MORE_PAGES': { + /* + We will just abort this loop and continue. + */ + page = finalPage + break + } + case 'PAGE_FETCHED': { + /* + Just stay at the current loop and try to fetch more pages. + This is how we accumulate the data from multiple pages into a single array. + */ + dataReceivedArray = dataReceivedArray.concat(apiData) + break + } } - case 'NO_MORE_PAGES': { - /* - We will just abort this loop and continue. - */ - page = finalPage - break + } + } else if (pageNumberParameter.config.isPageToken === true) { + + pageToken = undefined + + do { + + if (pageToken !== undefined) { + if (queryString === "") { + pageQueryString = "?" + pageNumberParameter.config.codeName + "=" + pageToken + } else { + pageQueryString = "&" + pageNumberParameter.config.codeName + "=" + pageToken + } } - case 'PAGE_FETCHED': { - /* - Just stay at the current loop and try to fetch more pages. - This is how we accumulate the data from multiple pages into a single array. - */ - dataReceivedArray = dataReceivedArray.concat(apiData) - break + + await sleep(apiMap.config.millisecondsBetweenCalls) + let fetchResult = await fetchAPIData() + + switch (fetchResult) { + case 'NO_CONNECTION': { + /* + When there is not Internet Connection or the server can not be reached + we will return requesting a retry later. + */ + TS.projects.foundations.globals.loggerVariables.VARIABLES_BY_PROCESS_INDEX_MAP.get(processIndex).BOT_MAIN_LOOP_LOGGER_MODULE_OBJECT.write(MODULE_NAME, + "[WARN] start -> fetchAllPages -> Server not found or no Internet Connection. Requesting a Retry. ") + callBackFunction(TS.projects.foundations.globals.standardResponses.DEFAULT_RETRY_RESPONSE) + return 'RETRYING' + } + case 'UNEXPECTED_API_RESPONSE': { + /* + Any unexpected response will abort this loop and allow the process to continue, + possibly saving accumulated data. + */ + pageToken = false + break + } + case 'ERROR_CODE_RECEIVED': { + /* + An error code at the response will abort this loop and allow the process to continue, + possibly saving accumulated data. + */ + pageToken = false + break + } + case 'NO_MORE_PAGES': { + /* + A page token is slightly to a page, for a page you continue until you find a blank page. + For a page token you continue until you don't get a token, but still need to concat the last data. + */ + dataReceivedArray = dataReceivedArray.concat(apiData) + pageToken = false + break + } + case 'PAGE_FETCHED': { + /* + Just stay at the current loop and try to fetch more pages. + This is how we accumulate the data from multiple pages into a single array. + */ + dataReceivedArray = dataReceivedArray.concat(apiData) + break + } } - } + } while(pageToken !== false) } } @@ -599,7 +677,31 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { TS.projects.foundations.globals.loggerVariables.VARIABLES_BY_PROCESS_INDEX_MAP.get(processIndex).BOT_MAIN_LOOP_LOGGER_MODULE_OBJECT.write(MODULE_NAME, "[INFO] start -> startProcess -> fetchAPIData -> url = " + url) - fetch(url) + /* + Look to see if there is a key reference and if so include the keys in the fetch headers + */ + + let apiAuthKey = TS.projects.foundations.globals.taskConstants.TASK_NODE.keyReference.referenceParent + let options = {} + + if (apiAuthKey !== undefined) { + let authKeyConfig = apiAuthKey.config + + if (authKeyConfig !== undefined) { + if (authKeyConfig.bearer_token !== undefined) { + let token = authKeyConfig.bearer_token + let method = "GET"; + options['method'] = method + options['headers'] = { + "Content-type": "application/json", + Authorization: "Bearer " + token, + } + } + } + } + + console.log(url) + fetch(url, options) .then((response) => { let apiResponseSchemaNode @@ -709,65 +811,53 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { */ apiResponseReceivedObject = JSON.parse(apiResponseReceivedText) apiData = eval(apiResponseSchemaNode.config.nodePath) + /* - We will expect the apiData to be an Array. Depending if it has data or not we will return - NO_MORE_PAGES or PAGE_FETCHED so that pagination procedure knows when to stop. - */ - if (apiData.length === 0) { - resolve('NO_MORE_PAGES') - } else { - resolve('PAGE_FETCHED') - } - /* - If we received an error code, we abort the processing at this point. - */ - if (errorCodeReceived === true) { - resolve('ERROR_CODE_RECEIVED') - return - } - /* - Here we will validate that the overall format is what we are expecting. + If we are using a Page Token, we need to search the apiData for the Page Token and set it for the next pull */ - switch (apiResponseSchemaNode.apiResponseFields.config.fieldType) { - case 'object': { - /* - If we did not received an object, that probably means something is not - good, and we got an HTML with the reason inside. - */ - if (apiResponseReceivedText.substring(0, 1) !== "{") { - TS.projects.foundations.globals.loggerVariables.VARIABLES_BY_PROCESS_INDEX_MAP.get(processIndex).BOT_MAIN_LOOP_LOGGER_MODULE_OBJECT.write(MODULE_NAME, - "[WARN] start -> startProcess -> fetchAllPages -> fetchAPIData -> onResponse -> onEnd -> Unexpected Response. Not an JSON Object. -> apiResponseReceivedText = " + apiResponseReceivedText) - resolve('UNEXPECTED_API_RESPONSE') - return - } - break - } - case 'array': { - /* - If we did not received an array, that probably means something is not - good, and we got an HTML with the reason inside. - */ - if (apiResponseReceivedText.substring(0, 1) !== "[") { - TS.projects.foundations.globals.loggerVariables.VARIABLES_BY_PROCESS_INDEX_MAP.get(processIndex).BOT_MAIN_LOOP_LOGGER_MODULE_OBJECT.write(MODULE_NAME, - "[WARN] start -> startProcess -> fetchAllPages -> fetchAPIData -> onResponse -> onEnd -> Unexpected Response. Not an Array with Data. -> apiResponseReceivedText = " + apiResponseReceivedText) - resolve('UNEXPECTED_API_RESPONSE') - return + + pageTokenPath = undefined + + if(pageNumberParameter !== undefined) { + if (pageNumberParameter.config.isPageToken === true) { + getPageTokenPath(apiResponseSchemaNode.apiResponseFields) + + function getPageTokenPath(node) { + if (node.config.isPageToken !== undefined) { + getPath(node) + + function getPath(nodeToSearch) { + if (nodeToSearch.config.codeName !== "") { + if (pageTokenPath !== undefined) { + pageTokenPath = nodeToSearch.config.codeName + "." + pageTokenPath + } else { + pageTokenPath = nodeToSearch.config.codeName + } + } + + if (nodeToSearch.parentNode !== undefined) { + getPath(nodeToSearch.parentNode) + } else { + pageToken = pageTokenPath + } + } + return + } + if (node.apiResponseFields.length > 0) { + for(let i = 0; i < node.apiResponseFields.length; i++) { + getPageTokenPath(node.apiResponseFields[i]) + } + } } - break + pageToken = eval('apiData.' + pageTokenPath) } } - /* - The actual data we need could be anywhere within the data structure received. - The exact place is configured at the apiResponseSchemaNode property nodePath. - We will eval the nodePath property (this assumes that the apiResponseReceivedObject is defined) - */ - apiResponseReceivedObject = JSON.parse(apiResponseReceivedText) - apiData = eval(apiResponseSchemaNode.config.nodePath) + /* We will expect the apiData to be an Array. Depending if it has data or not we will return NO_MORE_PAGES or PAGE_FETCHED so that pagination procedure knows when to stop. */ - if (apiData.length === 0) { + if (apiData.length === 0 || pageToken === undefined) { resolve('NO_MORE_PAGES') } else { resolve('PAGE_FETCHED') @@ -810,7 +900,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - { datasetType: dataset.config.codeName }, + { errorDetails: 'saveDataReceived -> Unsupported Dataset Type', datasetType: dataset.config.codeName }, 'Unsupported Dataset Type', dataset ) @@ -991,6 +1081,61 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { existingFileArray.push(record.values) } } + + let primaryKeys = [] + let existingKeys = new Map() + /* + Setup the primaryKeys array. + */ + for (let j = 0; j < productDefinition.record.properties.length; j++) { + let recordProperty = productDefinition.record.properties[j] + if (recordProperty.config.primaryKey === true) { + primaryKeys.push(recordProperty.config.codeName) + } + } + /* + Setup the existingKeys map. + */ + for (let i = 0; i < existingFileArray.length; i++) { + let record = { + values: existingFileArray[i], + map: new Map() + } + /* + Building the record map... + */ + for (let j = 0; j < productDefinition.record.properties.length; j++) { + let recordProperty = productDefinition.record.properties[j] + record.map.set(recordProperty.config.codeName, record.values[j]) + } + /* + Building the current key... + */ + let key = "" + for (let j = 0; j < primaryKeys.length; j++) { + let keyValue = record.map.get(primaryKeys[j]) + key = key + '->' + keyValue + } + existingKeys.set(key, record) + } + /* + If we received an array of data then we will try to append it to the current file. + */ + if (dataReceivedArray !== undefined) { + for (let i = 0; i < dataReceivedArray.length; i++) { + let record = getRecord(dataReceivedArray[i]) + + let key = "" + for (let j = 0; j < primaryKeys.length; j++) { + let keyValue = record.map.get(primaryKeys[j]) + key = key + '->' + keyValue + } + if (existingKeys.get(key) === undefined) { + existingFileArray.push(record.values) + existingKeys.set(key, record.values) + } + } + } existingFileContent = JSON.stringify(existingFileArray) } @@ -999,7 +1144,15 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { The timestamp is data that comes in the API response. Here we will extract the timestamp and return it. */ - let record = getRecord(dataReceivedObject) + let record + + if (dataReceivedObject !== undefined) { + record = getRecord(dataReceivedObject) + } + if (dataReceivedArray !== undefined) { + record = getRecord(dataReceivedArray[0]) + } + let timestamp = record.map.get('timestamp') let startStamp = timestamp let numberOfDigits = 0 @@ -1027,7 +1180,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { processIndex, MODULE_NAME, 'API Data Fetcher Bot', - undefined, + { errorDetails: 'saveDataReceived -> saveOneMinFile -> getTimestamp -> Timestamp Record Property Missing'}, 'Timestamp Record Property Missing', productDefinition.record ) @@ -1149,7 +1302,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { /* Check that we do not accept values that will break the JSON format of the file. */ - if (recordProperty.config.isString !== true && recordProperty.config.isBoolean !== true) { + if (recordProperty.config.isString !== true && recordProperty.config.isBoolean !== true && recordProperty.config.isArray !== true) { /* At this point Dates have been converted to numbers, so if the Record Property is not a string then it must be a number. @@ -1187,6 +1340,7 @@ exports.newDataMiningBotModulesFetchingProcess = function (processIndex) { try { thisReport.file = { lastRun: (new Date()).toISOString(), + lastQueryRun: lastQueryRun, lastPage: lastPage, beginingOfMarket: contextVariables.beginingOfMarket } diff --git a/Projects/Foundations/Schemas/App-Schema/Api/apis.json b/Projects/Foundations/Schemas/App-Schema/Api/apis.json index 992f4d6be1..ddf9a05c8b 100644 --- a/Projects/Foundations/Schemas/App-Schema/Api/apis.json +++ b/Projects/Foundations/Schemas/App-Schema/Api/apis.json @@ -21,6 +21,16 @@ "actionProject": "Visual-Scripting", "relatedUiObjectProject": "Foundations" }, + { + "action": "Add UI Object", + "label": "Add API Authorization Key", + "relatedUiObject": "API Authorization Key", + "iconPathOn": "exchange-account-key", + "iconPathOff": "exchange-account-key", + "actionFunction": "payload.executeAction", + "actionProject": "Visual-Scripting", + "relatedUiObjectProject": "Foundations" + }, { "action": "Delete UI Object", "actionProject": "Visual-Scripting", @@ -45,6 +55,11 @@ "name": "web3API", "type": "node", "childType": "Web3 API" + }, + { + "name": "apiAuthorizationKey", + "type": "node", + "childType": "API Authorization Key" } ], "isPersonalData": true diff --git a/Projects/Foundations/Schemas/App-Schema/api-authorization-key.json b/Projects/Foundations/Schemas/App-Schema/api-authorization-key.json new file mode 100644 index 0000000000..21bafa4391 --- /dev/null +++ b/Projects/Foundations/Schemas/App-Schema/api-authorization-key.json @@ -0,0 +1,37 @@ +{ + "type": "API Authorization Key", + "menuItems": [ + { + "action": "Configure", + "label": "Configure", + "iconPathOn": "configuration", + "iconPathOff": "configuration", + "dontShowAtFullscreen": true, + "actionFunction": "uiObject.configEditor.activate" + }, + { + "action": "Delete UI Object", + "actionProject": "Visual-Scripting", + "askConfirmation": true, + "confirmationLabel": "Confirm to Delete", + "label": "Delete", + "iconPathOn": "delete-entity", + "iconPathOff": "delete-entity", + "actionFunction": "payload.executeAction" + } + ], + "editors": { + "config": true + }, + "initialValues": { + "config": "{ \n \"api_key\": \"\",\n \"api_key_secret\": \"\",\n \"bearer_token\": \"\"\n}" + }, + "addLeftIcons": true, + "level": 1, + "attachingRules": { + "compatibleTypes": "->APIs->" + }, + "propertyNameAtParent": "apiAuthorizationKey", + "isPersonalData": true, + "icon": "exchange-account-key" +} \ No newline at end of file diff --git a/Projects/Foundations/Schemas/App-Schema/key-reference.json b/Projects/Foundations/Schemas/App-Schema/key-reference.json index 11610360d6..e3906b9a3c 100644 --- a/Projects/Foundations/Schemas/App-Schema/key-reference.json +++ b/Projects/Foundations/Schemas/App-Schema/key-reference.json @@ -21,7 +21,7 @@ "compatibleTypes": "->Task->" }, "referencingRules": { - "compatibleTypes": "->Exchange Account Key->" + "compatibleTypes": "->Exchange Account Key->API Authorization Key->" }, "propertyNameAtParent": "keyReference" } \ No newline at end of file diff --git a/Projects/Foundations/TS/Function-Libraries/FromOneMinToMultiTimeFrameFunctions.js b/Projects/Foundations/TS/Function-Libraries/FromOneMinToMultiTimeFrameFunctions.js index 04c3115f14..3c239953b3 100644 --- a/Projects/Foundations/TS/Function-Libraries/FromOneMinToMultiTimeFrameFunctions.js +++ b/Projects/Foundations/TS/Function-Libraries/FromOneMinToMultiTimeFrameFunctions.js @@ -443,7 +443,7 @@ exports.newFoundationsFunctionLibrariesFromOneMinToMultiTimeFrameFunctions = fun if (property.config.isString === true) { fileContent = fileContent + propertySeparator + '"' + element[property.config.codeName] + '"' } else { - fileContent = fileContent + propertySeparator + element[property.config.codeName] + fileContent = fileContent + propertySeparator + JSON.stringify(element[property.config.codeName]) } propertySeparator = "," } @@ -497,6 +497,9 @@ exports.newFoundationsFunctionLibrariesFromOneMinToMultiTimeFrameFunctions = fun else if (property.config.isBoolean === true) { outputElement[property.config.codeName] = false // Default Value } + else if (property.config.isArray === true) { + outputElement[property.config.codeName] = [] // Default Value + } else { outputElement[property.config.codeName] = 0 // Default Value } @@ -568,6 +571,7 @@ exports.newFoundationsFunctionLibrariesFromOneMinToMultiTimeFrameFunctions = fun aggregationMethodMax() aggregationMethodSum() aggregationMethodAvg() + aggregationMethodConcat() saveElement = true @@ -610,9 +614,13 @@ exports.newFoundationsFunctionLibrariesFromOneMinToMultiTimeFrameFunctions = fun */ for (let j = 0; j < node.outputDataset.referenceParent.parentNode.record.properties.length; j++) { let property = node.outputDataset.referenceParent.parentNode.record.properties[j] - if (property.config.aggregationMethod === 'Min' || saveElement === false) { - if (outputElement[property.config.codeName] === 0) { // Set initial value if default value is present - outputElement[property.config.codeName] = record.map.get(property.config.codeName) + if (property.config.aggregationMethod === 'Min') { + if (saveElement === false) { + if (outputElement[property.config.codeName] === 0) { // Set initial value if default value is present + outputElement[property.config.codeName] = record.map.get(property.config.codeName) + } else if (record.map.get(property.config.codeName) < outputElement[property.config.codeName]) { + outputElement[property.config.codeName] = record.map.get(property.config.codeName) + } } else if (record.map.get(property.config.codeName) < outputElement[property.config.codeName]) { outputElement[property.config.codeName] = record.map.get(property.config.codeName) } @@ -667,6 +675,18 @@ exports.newFoundationsFunctionLibrariesFromOneMinToMultiTimeFrameFunctions = fun } } } + + function aggregationMethodConcat() { + /* + This is the Concat type of aggregation. + */ + for (let j = 0; j < node.outputDataset.referenceParent.parentNode.record.properties.length; j++) { + let property = node.outputDataset.referenceParent.parentNode.record.properties[j] + if (property.config.aggregationMethod === 'Concat') { + outputElement[property.config.codeName] = outputElement[property.config.codeName].concat(record.map.get(property.config.codeName)) + } + } + } } } if (saveElement === true) { // then we have a valid element, otherwise it means there were no elements to fill this one in its time range. diff --git a/Projects/Foundations/UI/Node-Action-Functions/TaskFunctions.js b/Projects/Foundations/UI/Node-Action-Functions/TaskFunctions.js index d4a2cb125a..b57caac0cb 100644 --- a/Projects/Foundations/UI/Node-Action-Functions/TaskFunctions.js +++ b/Projects/Foundations/UI/Node-Action-Functions/TaskFunctions.js @@ -155,7 +155,7 @@ function newFoundationsFunctionLibraryTaskFunctions() { 'Learning Bot Instance->' + 'Sensor Process Instance->Time Frames Filter->API Data Fetcher Process Instance->Indicator Process Instance->Study Process Instance->Trading Process Instance->Portfolio Process Instance->Learning Process Instance->' + 'Execution Started Event->' + - 'Key Reference->Exchange Account Key->' + + 'Key Reference->Exchange Account Key->API Authorization Key->' + 'Task Manager->' + 'Data Mine Tasks->Trading Mine Tasks->Portfolio Mine Tasks->Learning Mine Tasks->' + 'Market Data Tasks->Market Trading Tasks->Market Portfolio Tasks->Market Learning Tasks->' + @@ -249,7 +249,7 @@ function newFoundationsFunctionLibraryTaskFunctions() { let managedTasksLightingPath = '->Task->Managed Tasks->Portfolio Bot Instance->' + 'Task Reference->Task->Sensor Bot Instance->API Data Fetcher Bot->Indicator Bot Instance->Study Bot Instance->Trading Bot Instance->Learning Bot Instance->' + 'Sensor Process Instance->Time Frames Filter->API Data Fetcher Process Instance->Indicator Process Instance->Study Process Instance->Trading Process Instance->Learning Process Instance->' + - 'Execution Started Event->Key Reference->Exchange Account Key->' + + 'Execution Started Event->Key Reference->Exchange Account Key->API Authorization Key->' + 'Task Manager->' + 'Data Mine Tasks->Trading Mine Tasks->Learning Mine Tasks->Portfolio Mine Tasks->' + 'Market Trading Tasks->Market Data Tasks->Market Learning Tasks->Market Portfolio Tasks->' + diff --git a/TaskServerRoot.js b/TaskServerRoot.js index 26d38113c7..258c0e9ae9 100644 --- a/TaskServerRoot.js +++ b/TaskServerRoot.js @@ -65,7 +65,8 @@ async function runRoot() { axios: require('axios'), crypto: require('crypto'), simpleGit: require('simple-git'), - ethers: require('ethers') + ethers: require('ethers'), + vaderSentiment: require('vader-sentiment') } SA.version = require('./package.json').version /* diff --git a/package-lock.json b/package-lock.json index 764065fb62..e1443923a5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -36,6 +36,7 @@ "telegraf": "^4.4.2", "twitter-api-v2": "^1.7.1", "util": "^0.12.4", + "vader-sentiment": "^1.1.3", "web3": "^1.6.1", "workbox-webpack-plugin": "5.1.4", "ws": "^8.4.0" @@ -16482,6 +16483,11 @@ "node": ">= 8" } }, + "node_modules/vader-sentiment": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/vader-sentiment/-/vader-sentiment-1.1.3.tgz", + "integrity": "sha512-z9ys48I7pd3ngy/YzYDCTcOp6//z96wiXaJnQnuyxiPT3PTU0mWLWVCKvips11S5euvHfXsr0ahsmWT2oeXcUw==" + }, "node_modules/varint": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/varint/-/varint-5.0.2.tgz", @@ -30598,6 +30604,11 @@ } } }, + "vader-sentiment": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/vader-sentiment/-/vader-sentiment-1.1.3.tgz", + "integrity": "sha512-z9ys48I7pd3ngy/YzYDCTcOp6//z96wiXaJnQnuyxiPT3PTU0mWLWVCKvips11S5euvHfXsr0ahsmWT2oeXcUw==" + }, "varint": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/varint/-/varint-5.0.2.tgz", diff --git a/package.json b/package.json index d5b68d60fd..9ec583a967 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "telegraf": "^4.4.2", "twitter-api-v2": "^1.7.1", "util": "^0.12.4", + "vader-sentiment": "^1.1.3", "web3": "^1.6.1", "workbox-webpack-plugin": "5.1.4", "ws": "^8.4.0"