From a85db6c59c92e4f42df19cb50ad31c780f2c360a Mon Sep 17 00:00:00 2001 From: Nabeel Date: Tue, 13 Mar 2018 13:01:46 +1100 Subject: [PATCH] Initial implementation. Facebook Prophet : Functions for timeseries forecasting. Correlation: Functions for calculating linear correlations. Signed-off-by: Nabeel --- Qlik-Py-Init.bat | 22 + Qlik-Py-Start.bat | 6 + core/__init__.py | 0 core/__main__.py | 425 +++++++++++++ core/_prophet_forecast.py | 697 +++++++++++++++++++++ core/functions.json | 70 +++ core/logger.config | 32 + generated/ServerSideExtension_pb2.py | 886 +++++++++++++++++++++++++++ generated/__init__.py | 0 9 files changed, 2138 insertions(+) create mode 100644 Qlik-Py-Init.bat create mode 100644 Qlik-Py-Start.bat create mode 100644 core/__init__.py create mode 100644 core/__main__.py create mode 100644 core/_prophet_forecast.py create mode 100644 core/functions.json create mode 100644 core/logger.config create mode 100644 generated/ServerSideExtension_pb2.py create mode 100644 generated/__init__.py diff --git a/Qlik-Py-Init.bat b/Qlik-Py-Init.bat new file mode 100644 index 0000000..b3febb4 --- /dev/null +++ b/Qlik-Py-Init.bat @@ -0,0 +1,22 @@ +@echo off +echo Setting up the Python virtual environment... & echo. +python -m venv qlik-py-env +echo. +echo Moving project files to the new directory... & echo. +move generated "%~dp0\qlik-py-env" +move core "%~dp0\qlik-py-env" +echo. +echo Activating the virtual environment... & echo. +cd "%~dp0\qlik-py-env\Scripts" +call activate +cd .. +echo. +echo Installing required packages... & echo. +pip install grpcio +pip install numpy +pip install scipy +pip install pandas +pip install fbprophet +echo. +echo All done. Run Qlik-Py-Start.bat to start the SSE Extension Service. & echo. +pause \ No newline at end of file diff --git a/Qlik-Py-Start.bat b/Qlik-Py-Start.bat new file mode 100644 index 0000000..816cddd --- /dev/null +++ b/Qlik-Py-Start.bat @@ -0,0 +1,6 @@ +@echo off +cd "%~dp0\qlik-py-env\Scripts" +call activate +cd ..\core +python __main__.py +pause \ No newline at end of file diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/__main__.py b/core/__main__.py new file mode 100644 index 0000000..c4eb41e --- /dev/null +++ b/core/__main__.py @@ -0,0 +1,425 @@ +#! /usr/bin/env python3 +import argparse +import json +import logging +import logging.config +import os +import sys +import time +from concurrent import futures + +# Add Generated folder to module path. +PARENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(os.path.join(PARENT_DIR, 'generated')) + +import ServerSideExtension_pb2 as SSE +import grpc + +# Import libraries for added functions +import numpy as np +import pandas as pd +from _prophet_forecast import ProphetForQlik + +# Set the default port for this SSE Extension +_DEFAULT_PORT = '50054' + +_ONE_DAY_IN_SECONDS = 60 * 60 * 24 +_MINFLOAT = float('-inf') + + +class ExtensionService(SSE.ConnectorServicer): + """ + A SSE-plugin to provide Python data science functions for Qlik. + """ + + def __init__(self, funcdef_file): + """ + Class initializer. + :param funcdef_file: a function definition JSON file + """ + self._function_definitions = funcdef_file + os.makedirs('logs', exist_ok=True) + log_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logger.config') + logging.config.fileConfig(log_file) + logging.info('Logging enabled') + + @property + def function_definitions(self): + """ + :return: json file with function definitions + """ + return self._function_definitions + + @property + def functions(self): + """ + :return: Mapping of function id and implementation + """ + return { + 0: '_correlation', + 1: '_correlation', + 2: '_prophet', + 3: '_prophet', + 4: '_prophet', + 5: '_prophet_seasonality' + } + + """ + Implementation of added functions. + """ + + @staticmethod + def _string_to_float(s): + """ + Utility function + :return: Float for the String parameter s, or None in the case of a ValueError exception + """ + try: + f = float(s) + return f + except ValueError: + return None + + @staticmethod + def _correlation(request, context): + """ + Calculate the correlation coefficient for two columns. Scalar function. + :param request: an iterable sequence of RowData + :param context: + :return: the correlation coefficient for each row + :Qlik expression examples: + :.Pearson('1;NA;3;4;5;6.9', ';11;12;;14;') + :.Correlation('1;NA;3;4;5;6.9', ';11;12;;14;', 'pearson') + :Possible values for the third argument are 'pearson', 'kendall' or 'spearman' + """ + # Iterate over bundled rows + for request_rows in request: + response_rows = [] + + # Set to True for additional info in terminal and log file + debug = False + + if debug: + # Create a log file for the + logfile = os.path.join(os.getcwd(), 'logs', 'Correlation Log.txt') + + sys.stdout.write("Function Call: {0} \n\n".format(time.ctime(time.time()))) + with open(logfile,'a') as f: + f.write("Function Call: {0} \n\n".format(time.ctime(time.time()))) + + # Iterating over rows + for row in request_rows.rows: + # Retrieve the value of the parameters + # Two or Three columns are sent from the client, hence the length of params will be 2 or 3 + params = [col.strData for col in row.duals] + + if debug: + sys.stdout.write("\nPARAMETERS:\n\n{0}\n".format("\n\n".join(str(x) for x in params))) + with open(logfile,'a') as f: + f.write("\nPARAMETERS:\n\n{0}\n".format("\n\n".join(str(x) for x in params))) + + # Create lists for the two series + x = params[0].split(";") + y = params[1].split(";") + + # Set the correlation type based on the third argument. + # Default is Pearson if the arg is missing. + try: + corr_type = params[2].lower() + except IndexError: + corr_type = 'pearson' + + if debug: + sys.stdout.write("\n\nx ({0:d} data points):\n{1}\n".format(len(x), " ".join(str(v) for v in x))) + sys.stdout.write("\ny ({0:d} data points):\n{1}\n".format(len(y), " ".join(str(v) for v in y))) + sys.stdout.write("\nCorrelation Type: {0}\n\n".format(corr_type)) + + with open(logfile,'a') as f: + f.write("\n\nx ({0:d} data points):\n{1}\n".format(len(x), " ".join(str(v) for v in x))) + f.write("\ny ({0:d} data points):\n{1}\n".format(len(y), " ".join(str(v) for v in y))) + f.write("\nCorrelation Type: {0}\n\n".format(corr_type)) + + # Check that the lists are of equal length + if len(x) == len(y) and len(x) > 0: + # Create a Pandas data frame using the lists + df = pd.DataFrame({'x': [ExtensionService._string_to_float(d) for d in x], \ + 'y': [ExtensionService._string_to_float(d) for d in y]}) + + # Calculate the correlation matrix for the two series in the data frame + corr_matrix = df.corr(method=corr_type) + + if debug: + sys.stdout.write("\n\nCorrelation Matrix:\n{}\n".format(corr_matrix.to_string())) + with open(logfile,'a') as f: + f.write("\n\nCorrelation Matrix:\n{}\n".format(corr_matrix.to_string())) + + # Prepare the result + if corr_matrix.size > 1: + result = corr_matrix.iloc[0,1] + else: + result = None + else: + result = None + + # Create an iterable of Dual with a numerical value + duals = iter([SSE.Dual(numData=result)]) + + # Append the row data constructed to response_rows + response_rows.append(SSE.Row(duals=duals)) + + # Yield Row data as Bundled rows + yield SSE.BundledRows(rows=response_rows) + + @staticmethod + def _prophet(request, context): + """ + Provide a timeseries forecast using Facebook's Prophet library. Scalar function. + :param request: an iterable sequence of RowData + :param context: not used for now + :return: the forecasted value for each row + : + :Qlik expression example: + :.Prophet(MonthStartDate, sum(Value), 'return=yhat, freq=MS, debug=true') + :The third argument in the Qlik expression is a string of parameters. + :This should take the form of a comma separated string: + :e.g 'return=yhat, freq=MS, debug=true' or 'return=yhat_upper, freq=MS' + : + :.Prophet_Holidays(ForecastDate, sum(Value), Holiday, 'return=yhat, freq=D, debug=true') + :In the holidays variant the third argument is a field containing the holiday name or NULL for each row. + : + :Parameters accepted for the Prophet() function are: cap, floor, changepoint_prior_scale, interval_width, + :lower_window, upper_window + : + :Parameters accepted for the make_future_dataframe() function are: freq + : + :For more information on these parameters go here: https://facebook.github.io/prophet/docs/quick_start.html + : + :Additional parameters used are: return, take_log, debug + : + :cap = 1000 : A logistic growth model can be defined using cap and floor. Values should be double or integer + :changepoint_prior_scale = 0.05 : Decrease if the trend changes are being overfit, increase for underfit + :interval_width = 0.08 : Set the width of the uncertainty intervals + :lower_window = 1 : Only used with holidays. Extend the holiday by certain no. of days prior to the date. + :upper_window = 1 : Only used with holidays. Extend the holiday by certain no. of days after the date. + :freq = MS : The frequency of the time series. e.g. MS for Month Start. See the possible options here: + : : http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases + :return = yhat : Any of the options in the forecast result. You can see these options with debug=true + : : yhat, yhat_upper, yhat_lower : Forecast, upper and lower limits + : : y_then_yhat, y_then_yhat_upper, y_then_yhat_lower : Return forecast only for forecast periods + : : trend, trend_upper, trend_lower : Trend component of the timeseries + : : seasonal, seasonal_upper, seasonal_lower: Seasonal component of the timeseries + :take_log = false : Apply logarithm to the values before the forecast. Default is true + :debug = true : Print execution information to the terminal and logs in ..\logs\Prophet Log .txt + """ + + # Get a list from the generator object so that it can be iterated over multiple times + request_list = [request_rows for request_rows in request] + + # Calculate timings for the components of the forecasting + # The results will be stored in ..\logs\Prophet Performance Log.txt + # The request_list line above is not timed as the generator can only be iterated once + # ProphetForQlik.timeit(request_list) + + # Create an instance of the _prophet_forecast class + # This will take the request data from Qlik and prepare it for forecasting + predictor = ProphetForQlik(request_list) + + # Calculate the forecast and store in a Pandas series + forecast = predictor.predict() + + # Values in the series are converted to type SSE.Dual + response_rows = forecast.apply(lambda result: iter([SSE.Dual(numData=result)])) + + # Values in the series are converted to type SSE.Row + # The series is then converted to a list + response_rows = response_rows.apply(lambda duals: SSE.Row(duals=duals)).tolist() + + # Iterate over bundled rows + for request_rows in request_list: + # Yield Row data as Bundled rows + yield SSE.BundledRows(rows=response_rows) + + @staticmethod + def _prophet_seasonality(request, context): + """ + Provide the seasonality component of the Prophet timeseries forecast. Scalar function. + :param request: an iterable sequence of RowData + :param context: not used for now + :return: the forecasted value for each row + : + :Qlik expression example: + :.Prophet_Seasonality(Month, $(vConcatSeries), $(vHolidays), 'seasonality=yearly, freq=MS, debug=true') + :The fourth argument in the Qlik expression is a string of parameters. + :This should take the form of a comma separated string: + :e.g 'seasonality=yearly, freq=MS, debug=true' or 'seasonality=weekly, freq=D' + : + :Parameters accepted for the Prophet() function are: cap, floor, changepoint_prior_scale, interval_width, + :lower_window, upper_window + : + :Parameters accepted for the make_future_dataframe() function are: freq + : + :For more information on these parameters go here: https://facebook.github.io/prophet/docs/quick_start.html + : + :Additional parameters used are: return, take_log, debug + : + :cap = 1000 : A logistic growth model can be defined using cap and floor. Values should be double or integer + :changepoint_prior_scale = 0.05 : Decrease if the trend changes are being overfit, increase for underfit + :interval_width = 0.08 : Set the width of the uncertainty intervals + :lower_window = -1 : Only used with holidays. Extend the holiday by certain no. of days prior to the date. + :upper_window = 1 : Only used with holidays. Extend the holiday by certain no. of days after the date. + :freq = MS : The frequency of the time series. e.g. MS for Month Start. See the possible options here: + : : http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases + :return = yhat : Any of the options in the forecast result. You can see these options with debug=true + : : yhat, yhat_upper, yhat_lower : Forecast, upper and lower limits + : : y_then_yhat, y_then_yhat_upper, y_then_yhat_lower : Return forecast only for forecast periods + : : trend, trend_upper, trend_lower : Trend component of the timeseries + : : seasonal, seasonal_upper, seasonal_lower: Seasonal component of the timeseries + :take_log = false : Apply logarithm to the values before the forecast. Default is true + :debug = true : Print exexution information to the terminal and logs in ..\logs\Prophet Log .txt + """ + + # Get a list from the generator object so that it can be iterated over multiple times + request_list = [request_rows for request_rows in request] + + # Create an instance of the _prophet_forecast class + # This will take the request data from Qlik and prepare it for forecasting + predictor = ProphetForQlik.init_seasonality(request_list) + + # Calculate the forecast and store in a Pandas series + forecast = predictor.predict() + + # Values in the series are converted to type SSE.Dual + response_rows = forecast.apply(lambda result: iter([SSE.Dual(numData=result)])) + + # Values in the series are converted to type SSE.Row + # The series is then converted to a list + response_rows = response_rows.apply(lambda duals: SSE.Row(duals=duals)).tolist() + + # Iterate over bundled rows + for request_rows in request_list: + # Yield Row data as Bundled rows + yield SSE.BundledRows(rows=response_rows) + + @staticmethod + def _get_function_id(context): + """ + Retrieve function id from header. + :param context: context + :return: function id + """ + metadata = dict(context.invocation_metadata()) + header = SSE.FunctionRequestHeader() + header.ParseFromString(metadata['qlik-functionrequestheader-bin']) + + return header.functionId + + """ + Implementation of rpc functions. + """ + + def GetCapabilities(self, request, context): + """ + Get capabilities. + Note that either request or context is used in the implementation of this method, but still added as + parameters. The reason is that gRPC always sends both when making a function call and therefore we must include + them to avoid error messages regarding too many parameters provided from the client. + :param request: the request, not used in this method. + :param context: the context, not used in this method. + :return: the capabilities. + """ + logging.info('GetCapabilities') + + # Create an instance of the Capabilities grpc message + # Enable(or disable) script evaluation + # Set values for pluginIdentifier and pluginVersion + capabilities = SSE.Capabilities(allowScript=False, + pluginIdentifier='NAF Python Toolbox', + pluginVersion='v1.2.0') + + # If user defined functions supported, add the definitions to the message + with open(self.function_definitions) as json_file: + # Iterate over each function definition and add data to the Capabilities grpc message + for definition in json.load(json_file)['Functions']: + function = capabilities.functions.add() + function.name = definition['Name'] + function.functionId = definition['Id'] + function.functionType = definition['Type'] + function.returnType = definition['ReturnType'] + + # Retrieve name and type of each parameter + for param_name, param_type in sorted(definition['Params'].items()): + function.params.add(name=param_name, dataType=param_type) + + logging.info('Adding to capabilities: {}({})'.format(function.name, + [p.name for p in function.params])) + + return capabilities + + def ExecuteFunction(self, request_iterator, context): + """ + Call corresponding function based on function id sent in header. + :param request_iterator: an iterable sequence of RowData. + :param context: the context. + :return: an iterable sequence of RowData. + """ + # Retrieve function id + func_id = self._get_function_id(context) + logging.info('ExecuteFunction (functionId: {})'.format(func_id)) + + return getattr(self, self.functions[func_id])(request_iterator, context) + + """ + Implementation of the Server connecting to gRPC. + """ + + def Serve(self, port, pem_dir): + """ + Server + :param port: port to listen on. + :param pem_dir: Directory including certificates + :return: None + """ + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + SSE.add_ConnectorServicer_to_server(self, server) + + if pem_dir: + # Secure connection + with open(os.path.join(pem_dir, 'sse_server_key.pem'), 'rb') as f: + private_key = f.read() + with open(os.path.join(pem_dir, 'sse_server_cert.pem'), 'rb') as f: + cert_chain = f.read() + with open(os.path.join(pem_dir, 'root_cert.pem'), 'rb') as f: + root_cert = f.read() + credentials = grpc.ssl_server_credentials([(private_key, cert_chain)], root_cert, True) + server.add_secure_port('[::]:{}'.format(port), credentials) + logging.info('*** Running server in secure mode on port: {} ***'.format(port)) + else: + # Insecure connection + server.add_insecure_port('[::]:{}'.format(port)) + logging.info('*** Running server in insecure mode on port: {} ***'.format(port)) + + server.start() + try: + while True: + time.sleep(_ONE_DAY_IN_SECONDS) + except KeyboardInterrupt: + server.stop(0) + +class AAIException(Exception): + """ + Custom exception call to pass on information error messages + """ + pass + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--port', nargs='?', default=_DEFAULT_PORT) + parser.add_argument('--pem_dir', nargs='?') + parser.add_argument('--definition_file', nargs='?', default='functions.json') + args = parser.parse_args() + + # need to locate the file when script is called from outside it's location dir. + def_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), args.definition_file) + + calc = ExtensionService(def_file) + calc.Serve(args.port, args.pem_dir) diff --git a/core/_prophet_forecast.py b/core/_prophet_forecast.py new file mode 100644 index 0000000..4b710dd --- /dev/null +++ b/core/_prophet_forecast.py @@ -0,0 +1,697 @@ +import os +import sys +import time +import string +import numpy as np +import pandas as pd +from fbprophet import Prophet +import ServerSideExtension_pb2 as SSE + +# Add Generated folder to module path. +PARENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.append(os.path.join(PARENT_DIR, 'generated')) + +class ProphetForQlik: + """ + A class to provide Facebook Prophet functions for Qlik. + """ + + # Counter used to name log files for instances of the class + log_no = 0 + + # Dates in Qlik are stored as serial number that equals the number of days since December 30, 1899. + # This variable is used in correctly translating dates. + qlik_cal_start = pd.Timestamp('1899-12-30') + # This variable denotes the unit of time used in Qlik for numerical representation of datetime values + qlik_cal_unit = 'D' + + def __init__(self, request): + """ + Class initializer. + :param request: an iterable sequence of RowData + :Sets up the input data frame and parameters based on the request + """ + + # Set the request variable for this object instance + self.request = request + + # Create a Pandas Data Frame with column ds for the dates and column y for values + self.request_df = pd.DataFrame([(row.duals[0].numData, row.duals[1].numData) \ + for request_rows in self.request \ + for row in request_rows.rows], \ + columns=['ds','y']) + + # Handle null value rows in the request dataset + self.NaT_df = self.request_df.loc[self.request_df.ds.isnull()].copy() + + # If such a row exists it will be sliced off and then added back to the response + if len(self.NaT_df) > 0: + self.NaT_df.loc[:,'y'] = 0 + self.request_df = self.request_df.loc[self.request_df.ds.notnull()] + + # Get additional arguments from the third column in the request data + # Arguments should take the form of a comma separated string: 'arg1=value1, arg2=value2' + self._set_params() + + # If the request contains holidays create a holidays data frame + if self.has_holidays: + self.holidays_df = pd.DataFrame([(row.duals[0].numData, row.duals[2].strData)\ + for request_rows in self.request\ + for row in request_rows.rows],\ + columns=['ds','holiday']) + + if self.lower_window is not None: + self.holidays_df.loc[:, 'lower_window'] = self.lower_window + + if self.upper_window is not None: + self.holidays_df.loc[:, 'upper_window'] = self.upper_window + + # Additional information is printed to the terminal and logs if the paramater debug = true + if self.debug == 'true': + self._print_log(1) + + # Convert numerical date values to datetime + self.request_df.loc[:,'ds'] = pd.to_datetime(self.request_df.loc[:,'ds'], unit=self.qlik_cal_unit, + origin=self.qlik_cal_start) + + # If the request contains holidays update the ds column for it as well + if self.has_holidays: + self.holidays_df.loc[:,'ds'] = self.request_df.loc[:,'ds'].copy() + + # Also remove rows from the holidays data frame where the holiday or ds column is empty + self.holidays_df = self.holidays_df.loc[self.holidays_df.holiday != ''] + self.holidays_df = self.holidays_df.loc[self.holidays_df.ds.notnull()] + + # Make the holidays names lower case to avoid the return argument becoming case sensitive + self.holidays_df.loc[:,'holiday'] = self.holidays_df.holiday.str.lower() + # Also remove spaces and apostrophes + self.holidays_df.loc[:,'holiday'] = self.holidays_df.holiday.str.replace(" ", "_") + self.holidays_df.loc[:,'holiday'] = self.holidays_df.holiday.str.replace("'", "") + + # And sort by the ds column and reset indexes + self.holidays_df = self.holidays_df.sort_values('ds') + self.holidays_df = self.holidays_df.reset_index(drop=True) + + # Finally add this to the key word argumemnts for Prophet + self.prophet_kwargs['holidays'] = self.holidays_df + + # Sort the Request Data Frame based on dates, as Qlik may send unordered data + self.request_df = self.request_df.sort_values('ds') + + # Store the original indexes for re-ordering output later + self.request_index = self.request_df.loc[:,'ds'] + + # Ignore the placeholder rows which will be filled with forecasted figures later + self.input_df = self.request_df.iloc[:-self.forecast_periods].copy() + + # Reset the indexes for the input data frame. + # Not doing this interferes with correct ordering of the output from Prophet + self.input_df = self.input_df.reset_index(drop=True) + + # If take_log = true take logarithm of relevant input values. + # This is usually to make the timeseries more stationary + if self.take_log == 'true': + self.input_df.loc[:,'y'] = np.log(self.input_df.loc[:,'y']) + + if self.cap is not None: + self.cap = np.log(self.cap) + + if self.floor is not None: + self.floor = np.log(self.floor) + + # If a logistic growth model is applied add the cap and floor columns to the input data frame + if self.cap is not None: + self.input_df.loc[:,'cap'] = self.cap + + if self.floor is not None: + self.input_df.loc[:,'floor'] = self.floor + + if self.debug == 'true': + self._print_log(2) + + @classmethod + def init_seasonality(cls, request): + """ + Alternative initialization method for this class + Used when the request contains the timeseries as a contatenated string, repeated for every row + This is used when the number of input data points differs from the output rows required for seasonality plots + """ + + # The rows are duplicates in this kind of request, so inputs are simply taken from the first row + # First we store the correct number of rows to be output. + request_row_count = len([row for request_rows in request for row in request_rows.rows]) + # The timeseries is accepted as a string from the second column of the first row + timeseries = request[0].rows[0].duals[1].strData + # The holidays are taken from the third column of the first row + holidays = request[0].rows[0].duals[2].strData + # The key word arguments are taken from the fourth column of the first row + args = request[0].rows[0].duals[3] + + # The data may be sent unsorted by Qlik, so we have to store the order to use when sending the results + sort_order = pd.DataFrame([(row.duals[0].numData, row.duals[0].strData) \ + for request_rows in request \ + for row in request_rows.rows], \ + columns=['seasonality_num', 'seasonality_str']) + + # We ignore Null values here as these are handled separately in the response + sort_order = sort_order.loc[sort_order.seasonality_num.notnull()] + + # The correct sort order is based on the data frame's index after sorting on the seasonality field + sort_order = sort_order.sort_values('seasonality_num') + + # Re-create the request with ds and y columns + pairs = timeseries.split(";") + request_df = pd.DataFrame([p.split(":") for p in pairs], columns=['ds', 'y']) + + # Convert strings to numeric values, replace conversion errors with Null values + request_df = request_df.apply(pd.to_numeric, errors='coerce') + + # Check if the holidays column is populated + if len(holidays) > 0: + # Create a holidays data frame + pairs = holidays.split(";") + holiday_df = pd.DataFrame([p.split(":") for p in pairs], columns=['ds', 'holiday']) + + # Merge the holidays with the request data frame using column ds as key + request_df = pd.merge(request_df, holiday_df, on='ds', how='left') + + # Replace null values in the holiday column with empty strings + request_df = request_df.fillna(value={'holiday': ''}) + + # Values in the data frame are converted to type SSE.Dual + request_df.loc[:,'ds'] = request_df.loc[:,'ds'].apply(lambda result: SSE.Dual(numData=result)) + request_df.loc[:,'y'] = request_df.loc[:,'y'].apply(lambda result: SSE.Dual(numData=result)) + if 'holiday' in request_df.columns: + request_df.loc[:,'holiday'] = request_df.loc[:,'holiday'].apply(lambda result: SSE.Dual(strData=result)) + + # Add the keyword arguments to the data frame as well, already of type SSE.Dual + request_df.loc[:, 'args'] = args + + # Create the updated request list and convert to SSE data types + request_list = request_df.values.tolist() + request_list = [SSE.Row(duals=duals) for duals in request_list] + updated_request = [SSE.BundledRows(rows=request_list)] + + # Call the default initialization method + instance = ProphetForQlik(updated_request) + + # Handle null value row in the request dataset + instance.NaT_df = request_df.loc[request_df.ds.isnull()].copy() + + # If such a row exists it will be sliced off and then added back to the response + if len(instance.NaT_df) > 0: + instance.NaT_df.loc[:,'y'] = 0 + + # Set a property that lets us know this instance was created for seasonality forecasts + instance.is_seasonality_request = True + + # Set a property that lets us know the row count in the original request as this will be different from request_df + instance.request_row_count = request_row_count + + # Update the default result type if this was not passed in arguments + if instance.result_type == 'yhat': + instance.result_type = instance.seasonality + + # Set the sort order to be used when returning the results + instance.sort_order = sort_order + + # Return the initialized ProphetForQlik instance + return instance + + def predict(self): + """ + Calculate forecasted values using the Prophet library. + """ + + # If the input data frame contains less than 2 non-Null rows, prediction is not possible + if len(self.input_df) - self.input_df.y.isnull().sum() <= 2: + + if self.debug == 'true': + self._print_log(3) + + # A series of null values is returned to avoid an error in Qlik + return pd.Series([np.NaN for y in range(self.request_row_count)]) + + # Instantiate a Prophet object and fit the input data frame: + + if len(self.prophet_kwargs) > 0: + self.model = Prophet(**self.prophet_kwargs) + else: + self.model = Prophet() + + # Add custom seasonalities if defined in the arguments + if self.add_seasonality is not None and len(self.add_seasonality_kwargs) > 0: + self.model.add_seasonality(**self.add_seasonality_kwargs) + + self.model.fit(self.input_df) + + # Create a data frame for future values + self.future_df = self.model.make_future_dataframe(**self.make_kwargs) + + # If a logistic growth model is applied add the cap and floor columns to the future data frame + if self.cap is not None: + self.future_df.loc[:,'cap'] = self.cap + + if self.floor is not None: + self.future_df.loc[:,'floor'] = self.floor + + # Prepare the forecast + self._forecast() + + if self.debug == 'true': + self._print_log(4) + + return self.forecast.loc[:,self.result_type] + + def _set_params(self): + """ + Set input parameters based on the request. + Parameters implemented for the Prophet() function are: growth, cap, floor, changepoint_prior_scale, interval_width + Parameters implemented for the make_future_dataframe() function are: freq, periods + Parameters implemented for seasonality are: add_seasonality, seasonality_period, seasonality_fourier, seasonality_prior_scale + Parameters implemented for holidays are: holidays_prior_scale, lower_window, upper_window + Additional parameters for seasonlity requests are: weekly_start, yearly_start + Additional parameters used are: return, take_log, seasonality, debug + """ + + # Calculate the forecast periods based on the number of placeholders in the data + self.forecast_periods = ProphetForQlik._count_placeholders(self.request_df.loc[:,'y']) + + # Set the row count in the original request + self.request_row_count = len(self.request_df) + len(self.NaT_df) + + # Set default values which will be used if an argument is not passed + self.result_type = 'yhat' + self.take_log = 'false' + self.seasonality = 'yearly' + self.debug = 'false' + self.freq = 'D' + self.cap = None + self.floor = None + self.changepoint_prior_scale = None + self.interval_width = None + self.add_seasonality = None + self.seasonality_period = None + self.seasonality_fourier = None + self.seasonality_prior_scale = None + self.holidays_prior_scale = None + self.is_seasonality_request = False + self.weekly_start = 6 # Defaulting to a Monday start for the week as used in Qlik + self.yearly_start = 0 + self.lower_window = None + self.upper_window = None + + # Set optional parameters + + # Check if there is a fourth column in the request + try: + # If there is a fourth column, it is assumed to contain the key word arguments + args = self.request[0].rows[0].duals[3].strData + + # The third column should then provide the holiday name or null for each row + self.has_holidays = True + + except IndexError: + # If there is no fourth column, the request does not include holidays + self.has_holidays = False + + # If the fourth column did not exist, we try again with the third column + if not self.has_holidays: + try: + args = self.request[0].rows[0].duals[2].strData + except IndexError: + args = None + + # If the key word arguments were included in the request, get the parameters and values + if args is not None: + + # The parameter and values are transformed into key value pairs + args = args.translate(str.maketrans('', '', string.whitespace)).split(",") + self.kwargs = dict([arg.split("=") for arg in args]) + + # Make sure the key words are in lower case + self.kwargs = {k.lower(): v for k, v in self.kwargs.items()} + + # Set the return type + # Valid values are: yhat, trend, seasonal, seasonalities. + # Add _lower or _upper to the series name to get lower or upper limits. + if 'return' in self.kwargs: + self.result_type = self.kwargs['return'].lower() + + # Set the option to take a logarithm of y values before forecast calculations + # Valid values are: true, false + if 'take_log' in self.kwargs: + self.take_log = self.kwargs['take_log'].lower() + + # Set the type of seasonlity requested. Used only for seasonality requests + # Valid values are: yearly, weekly, monthly, holidays + if 'seasonality' in self.kwargs: + self.seasonality = self.kwargs['seasonality'].lower() + + # Set the debug option for generating execution logs + # Valid values are: true, false + if 'debug' in self.kwargs: + self.debug = self.kwargs['debug'].lower() + + # Set the frequency of the timeseries + # Any valid frequency for pd.date_range, such as 'D' or 'M' + # For options see: http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases + if 'freq' in self.kwargs: + self.freq = self.kwargs['freq'] + + # Set the cap which adds an upper limit at which the forecast will saturate + # This changes the default linear growth model to a logistic growth model + if 'cap' in self.kwargs: + self.cap = float(self.kwargs['cap']) + + # Set the floor which adds a lower limit at which the forecast will saturate + # To use a logistic growth trend with a floor, a cap must also be specified + if 'floor' in self.kwargs: + self.floor = float(self.kwargs['floor']) + + # Set the changepoint_prior_scale to adjust the trend flexibility + # If the trend changes are being overfit (too much flexibility) or underfit (not enough flexibility), + # you can adjust the strength of the sparse prior. + # Default value is 0.05. Increasing it will make the trend more flexible. + if 'changepoint_prior_scale' in self.kwargs: + self.changepoint_prior_scale = float(self.kwargs['changepoint_prior_scale']) + + # Set the width for the uncertainty intervals + # Default value is 0.8 (i.e. 80%) + if 'interval_width' in self.kwargs: + self.interval_width = float(self.kwargs['interval_width']) + + # Set additional seasonality to be added to the model + # Default seasonalities are yearly and weekly, as well as daily for sub daily data + if 'add_seasonality' in self.kwargs: + self.add_seasonality = self.kwargs['add_seasonality'].lower() + + # Set the seasonality period + # e.g. 30.5 for 'monthly' seasonality + if 'seasonality_period' in self.kwargs: + self.seasonality_period = float(self.kwargs['seasonality_period']) + + # Set the seasonality fourier terms + # Increasing the number of Fourier terms allows the seasonality to fit faster changing cycles, + # but can also lead to overfitting + if 'seasonality_fourier' in self.kwargs: + self.seasonality_fourier = int(self.kwargs['seasonality_fourier']) + + # Set the seasonality prior scale to smooth seasonality effects. + # Reducing this parameter dampens seasonal effects + if 'seasonality_prior_scale' in self.kwargs: + self.seasonality_prior_scale = float(self.kwargs['seasonality_prior_scale']) + + # Set the holiday prior scale to smooth holiday effects. + # Reducing this parameter dampens holiday effects. Default is 10, which provides very little regularization. + if 'holidays_prior_scale' in self.kwargs: + self.holidays_prior_scale = float(self.kwargs['holidays_prior_scale']) + + # Set the weekly start for 'weekly' seasonality requests + # Default week start is 0 which represents Sunday. Add offset as required. + if 'weekly_start' in self.kwargs: + self.weekly_start = int(self.kwargs['weekly_start']) + + # Set the weekly start for 'yearly' seasonality requests + # Default week start is 0 which represents 1st of Jan. Add offset as required. + if 'yearly_start' in self.kwargs: + self.yearly_start = int(self.kwargs['yearly_start']) + + # Set a period to extend the holidays by lower_window number of days before the date. + # This can be used to extend the holiday effect + if 'lower_window' in self.kwargs: + self.lower_window = int(self.kwargs['lower_window']) + + # Set a period to extend the holidays by upper_window number of days after the date. + # This can be used to extend the holiday effect + if 'upper_window' in self.kwargs: + self.upper_window = int(self.kwargs['upper_window']) + + # Create dictionary of arguments for the Prophet() and make_future_dataframe() functions + self.prophet_kwargs = {} + self.make_kwargs = {} + self.add_seasonality_kwargs = {} + + # Populate the parameters in the corresponding dictionary: + + self.make_kwargs['periods'] = self.forecast_periods + + if self.freq is not None: + self.make_kwargs['freq'] = self.freq + + if self.cap is not None: + self.prophet_kwargs['growth'] = 'logistic' + + if self.changepoint_prior_scale is not None: + self.prophet_kwargs['changepoint_prior_scale'] = self.changepoint_prior_scale + + if self.interval_width is not None: + self.prophet_kwargs['interval_width'] = self.interval_width + + if self.add_seasonality is not None: + self.add_seasonality_kwargs['name'] = self.add_seasonality + self.add_seasonality_kwargs['period'] = self.seasonality_period + self.add_seasonality_kwargs['fourier_order'] = self.seasonality_fourier + + if self.seasonality_prior_scale is not None: + self.prophet_kwargs['seasonality_prior_scale'] = self.seasonality_prior_scale + + if self.holidays_prior_scale is not None: + self.prophet_kwargs['holidays_prior_scale'] = self.holidays_prior_scale + + def _forecast(self): + """ + Execute the forecast algorithm according to the request type + """ + + # If this is a seasonality request, we need to return the relevant seasonlity component + if self.is_seasonality_request: + + if self.seasonality == 'weekly': + # Prepare the seasonality data frame + # Parameter start needs to be any arbitrary week starting on a Sunday + days = (pd.date_range(start='2017-01-01', periods=7) + pd.Timedelta(days=self.weekly_start)) + df_w = self.model.seasonality_plot_df(days) + + # Calculate seasonal components + self.forecast = self.model.predict_seasonal_components(df_w) + + elif self.seasonality == 'yearly': + # Prepare the seasonality data frame + # Parameter start needs to be 1st January for any arbitrary year + days = (pd.date_range(start='2017-01-01', periods=365) + pd.Timedelta(days=self.yearly_start)) + df_y = self.model.seasonality_plot_df(days) + + # Calculate seasonal components + self.forecast = self.model.predict_seasonal_components(df_y) + + else: + # Prepare the seasonality data frame + start = pd.to_datetime('2017-01-01 0000') + period = self.model.seasonalities[self.seasonality]['period'] + + end = start + pd.Timedelta(days=period) + # plot_points = 200 + # plot_points is used instead of period below in fbprophet/forecaster.py. + # However, it seems to make more sense to use period given the expected usage in Qlik + intervals = pd.to_datetime(np.linspace(start.value, end.value, period)) + + df_x = self.model.seasonality_plot_df(intervals) + + # Calculate seasonal components + self.forecast = self.model.predict_seasonal_components(df_x) + + # Set the correct sort order for the response + self.forecast = self.forecast.reindex(self.sort_order.index) + + # For standard forecast the output rows equal the input rows + else: + # Prepare the forecast + self.forecast = self.model.predict(self.future_df) + + # For return=y_then_yhat[_upper / _lower] we return y values followed by relevant results for the forecast periods + if 'y_then_yhat' in self.result_type: + relevant_result = self.result_type.replace('y_then_', '') + + # Copy yhat / yhat_upper / yhat_lower values to the new column + self.forecast.loc[:, self.result_type] = self.forecast.loc[:, relevant_result] + + if 'upper' in self.result_type or 'lower' in self.result_type: + # Overwrite historic values with Nulls + self.forecast.loc[:len(self.forecast) - self.forecast_periods - 1, self.result_type] \ + = np.NaN + else: + # Overwrite with y values for historic data + self.forecast.loc[:len(self.forecast) - self.forecast_periods - 1, self.result_type] \ + = self.request_df.loc[:len(self.request_df) - self.forecast_periods - 1, 'y'] + + # Update to the original index from the request data frame + self.forecast.index = self.request_index.index + + # Reset to the original sort order of the data sent by Qlik + self.forecast = self.forecast.sort_index() + + # Undo the logarithmic conversion if it was applied during initialization + if self.take_log == 'true': + self.forecast.loc[:,self.result_type] = np.exp(self.forecast.loc[:,self.result_type]) + + # Add back the null row if it was received in the request + if len(self.NaT_df) > 0: + self.NaT_df = self.NaT_df.rename({'y': self.result_type}, axis='columns') + self.forecast = self.forecast.append(self.NaT_df) + + def _print_log(self, step): + """ + Output useful information to stdout and the log file if debugging is required. + step: Print the corresponding step in the log + """ + + if step == 1: + # Increment log counter for the class. Each instance of the class generates a new log. + self.__class__.log_no += 1 + + # Create a log file for the instance + # Logs will be stored in ..\logs\Prophet Log .txt + self.logfile = os.path.join(os.getcwd(), 'logs', 'Prophet Log {}.txt'.format(self.log_no)) + + # Output log header + sys.stdout.write("ProphetForQlik Log: {0} \n\n".format(time.ctime(time.time()))) + with open(self.logfile,'w') as f: + f.write("ProphetForQlik Log: {0} \n\n".format(time.ctime(time.time()))) + + elif step == 2: + # Output the request and input data frames to the terminal + sys.stdout.write("Prophet parameters: {0}\n\n".format(self.kwargs)) + sys.stdout.write("Instance creation parameters: {0}\n\n".format(self.prophet_kwargs)) + sys.stdout.write("Make future data frame parameters: {0}\n\n".format(self.make_kwargs)) + sys.stdout.write("Add seasonality parameters: {0}\n\n".format(self.add_seasonality_kwargs)) + sys.stdout.write("REQUEST DATA FRAME: {0} rows x cols\n\n".format(self.request_df.shape)) + sys.stdout.write("{0} \n\n".format(self.request_df.to_string())) + if len(self.NaT_df) > 0: + sys.stdout.write("REQUEST NULL VALUES DATA FRAME: {0} rows x cols\n\n".format(self.NaT_df.shape)) + sys.stdout.write("{0} \n\n".format(self.NaT_df.to_string())) + sys.stdout.write("INPUT DATA FRAME: {0} rows x cols\n\n".format(self.input_df.shape)) + sys.stdout.write("{} \n\n".format(self.input_df.to_string())) + if self.has_holidays: + sys.stdout.write("HOLIDAYS DATA FRAME: {0} rows x cols\n\n".format(self.holidays_df.shape)) + sys.stdout.write("{0} \n\n".format(self.holidays_df.to_string())) + + # Output the request and input data frames to the log file + with open(self.logfile,'a') as f: + f.write("Prophet parameters: {0}\n\n".format(self.kwargs)) + f.write("Instance creation parameters: {0}\n\n".format(self.prophet_kwargs)) + f.write("Make future data frame parameters: {0}\n\n".format(self.make_kwargs)) + f.write("Add seasonality parameters: {0}\n\n".format(self.add_seasonality_kwargs)) + f.write("REQUEST DATA FRAME: {0} rows x cols\n\n".format(self.request_df.shape)) + f.write("{0} \n\n".format(self.request_df.to_string())) + if len(self.NaT_df) > 0: + f.write("REQUEST NULL VALUES DATA FRAME: {0} rows x cols\n\n".format(self.NaT_df.shape)) + f.write("{0} \n\n".format(self.NaT_df.to_string())) + f.write("INPUT DATA FRAME: {0} rows x cols\n\n".format(self.input_df.shape)) + f.write("{0} \n\n".format(self.input_df.to_string())) + if self.has_holidays: + f.write("HOLIDAYS DATA FRAME: {0} rows x cols\n\n".format(self.holidays_df.shape)) + f.write("{0} \n\n".format(self.holidays_df.to_string())) + + elif step == 3: + # Output in case the input contains less than 2 non-Null rows + sys.stdout.write("\nForecast cannot be generated as the request contains less than two non-Null rows\n\n") + with open(self.logfile,'a') as f: + f.write("\nForecast cannot be generated as the request contains less than two non-Null rows\n\n") + + elif step == 4: + # Output the forecast data frame and returned series to the terminal + sys.stdout.write("\nFORECAST DATA FRAME: {0} rows x cols\n\n".format(self.forecast.shape)) + sys.stdout.write("RESULT COLUMNS:\n\n") + [sys.stdout.write("{}\n".format(col)) for col in self.forecast] + sys.stdout.write("\nSAMPLE RESULTS:\n{0} \n\n".format(self.forecast.tail(self.forecast_periods).to_string())) + sys.stdout.write("FORECAST RETURNED:\n{0}\n\n".format(self.forecast.loc[:,self.result_type].to_string())) + + # Output the forecast data frame and returned series to the log file + with open(self.logfile,'a') as f: + f.write("\nFORECAST DATA FRAME: {0} rows x cols\n\n".format(self.forecast.shape)) + f.write("RESULT COLUMNS:\n\n") + [f.write("{}\n".format(col)) for col in self.forecast] + f.write("\nSAMPLE RESULTS:\n{0} \n\n".format(self.forecast.tail(self.forecast_periods).to_string())) + f.write("FORECAST RETURNED:\n{0}\n\n".format(self.forecast.loc[:,self.result_type].to_string())) + + @staticmethod + def _count_placeholders(series): + """ + Count the number of null or zero values at the bottom of a series. + """ + count = 0 + + for i in range(series.size-1, -1, -1): + if pd.isnull(series[i]) or series[i] == 0: + count += 1 + else: + break + + return count + + @staticmethod + def timeit(request): + """ + Time the different components of the forecast + """ + + import timeit + import ServerSideExtension_pb2 as SSE + + # Create a log file for the + logfile = os.path.join(os.getcwd(), 'logs', 'Prophet Performance Log.txt') + + def t1(request): + return ProphetForQlik(request) + + def t2(predictor): + return predictor.predict() + + def t3(forecast): + return forecast.apply(lambda result: iter([SSE.Dual(numData=result)])) + + def t4(response_rows): + return response_rows.apply(lambda duals: SSE.Row(duals=duals)).tolist() + + def dotime1(): + t = timeit.Timer("t1(request)") + time = t.timeit(1) + sys.stdout.write("Time taken to create an instance of ProphetForQlik: {}\n".format(time)) + with open(logfile,'a') as f: + f.write("Time taken to create an instance of ProphetForQlik: {}\n".format(time)) + + predictor = ProphetForQlik(request) + + def dotime2(): + t = timeit.Timer("t2(predictor)") + time = t.timeit(1) + sys.stdout.write("Time taken to calculate the forecast: {}\n".format(time)) + with open(logfile,'a') as f: + f.write("Time taken to calculate the forecast: {}\n".format(time)) + + forecast = predictor.predict() + + def dotime3(): + t = timeit.Timer("t3(forecast)") + time = t.timeit(1) + sys.stdout.write("Time taken to convert results to SSE.Dual: {}\n".format(time)) + with open(logfile,'a') as f: + f.write("Time taken to convert results to SSE.Dual: {}\n".format(time)) + + response_rows = forecast.apply(lambda result: iter([SSE.Dual(numData=result)])) + + def dotime4(): + t = timeit.Timer("t4(response_rows)") + time = t.timeit(1) + sys.stdout.write("Time taken to convert duals to SSE.Row: {}\n".format(time)) + with open(logfile,'a') as f: + f.write("Time taken to convert duals to SSE.Row: {}\n".format(time)) + + import builtins + builtins.__dict__.update(locals()) + + dotime1() + dotime2() + dotime3() + dotime4() \ No newline at end of file diff --git a/core/functions.json b/core/functions.json new file mode 100644 index 0000000..aba6aa8 --- /dev/null +++ b/core/functions.json @@ -0,0 +1,70 @@ +{ + "Functions": [ + { + "Id": 0, + "Name": "Correlation", + "Type": 0, + "ReturnType": 1, + "Params": { + "a_series1": 0, + "b_series2": 0, + "c_corr_type": 0 + } + }, + { + "Id": 1, + "Name": "Pearson", + "Type": 0, + "ReturnType": 1, + "Params": { + "a_series1": 0, + "b_series2": 0 + } + }, + { + "Id": 2, + "Name": "Prophet", + "Type": 0, + "ReturnType": 1, + "Params": { + "a_date": 2, + "b_value": 1, + "c_other_args": 0 + } + }, + { + "Id": 3, + "Name": "Prophet_Basic", + "Type": 0, + "ReturnType": 1, + "Params": { + "a_date": 2, + "b_value": 1 + } + }, + { + "Id": 4, + "Name": "Prophet_Holidays", + "Type": 0, + "ReturnType": 1, + "Params": { + "a_date": 2, + "b_value": 1, + "c_holidays": 2, + "d_other_args": 0 + } + }, + { + "Id": 5, + "Name": "Prophet_Seasonality", + "Type": 0, + "ReturnType": 1, + "Params": { + "a_season": 2, + "b_time_series": 0, + "c_holidays": 0, + "d_other_args": 0 + } + } + ] +} diff --git a/core/logger.config b/core/logger.config new file mode 100644 index 0000000..8d4294b --- /dev/null +++ b/core/logger.config @@ -0,0 +1,32 @@ +[loggers] +keys=root + +[logger_root] +handlers=console,file +level=NOTSET + +[formatters] +keys=simple,complex + +[formatter_simple] +format=%(asctime)s - %(levelname)s - %(message)s + +[formatter_complex] +format=%(asctime)s - %(levelname)s - %(module)s : %(lineno)d - %(message)s + +[handlers] +keys=file,console + +[handler_file] +class=handlers.TimedRotatingFileHandler +interval=midnight +backupCount=5 +formatter=complex +level=DEBUG +args=('logs/SSEPlugin.log',) + +[handler_console] +class=StreamHandler +formatter=simple +level=INFO +args=(sys.stdout,) diff --git a/generated/ServerSideExtension_pb2.py b/generated/ServerSideExtension_pb2.py new file mode 100644 index 0000000..b0d3458 --- /dev/null +++ b/generated/ServerSideExtension_pb2.py @@ -0,0 +1,886 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: ServerSideExtension.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='ServerSideExtension.proto', + package='qlik.sse', + syntax='proto3', + serialized_pb=_b('\n\x19ServerSideExtension.proto\x12\x08qlik.sse\"\x07\n\x05\x45mpty\"?\n\tParameter\x12$\n\x08\x64\x61taType\x18\x01 \x01(\x0e\x32\x12.qlik.sse.DataType\x12\x0c\n\x04name\x18\x02 \x01(\t\"T\n\x10\x46ieldDescription\x12$\n\x08\x64\x61taType\x18\x01 \x01(\x0e\x32\x12.qlik.sse.DataType\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0c\n\x04tags\x18\x03 \x03(\t\"\xb1\x01\n\x12\x46unctionDefinition\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x0c\x66unctionType\x18\x02 \x01(\x0e\x32\x16.qlik.sse.FunctionType\x12&\n\nreturnType\x18\x03 \x01(\x0e\x32\x12.qlik.sse.DataType\x12#\n\x06params\x18\x04 \x03(\x0b\x32\x13.qlik.sse.Parameter\x12\x12\n\nfunctionId\x18\x05 \x01(\x05\"\x85\x01\n\x0c\x43\x61pabilities\x12\x13\n\x0b\x61llowScript\x18\x01 \x01(\x08\x12/\n\tfunctions\x18\x02 \x03(\x0b\x32\x1c.qlik.sse.FunctionDefinition\x12\x18\n\x10pluginIdentifier\x18\x03 \x01(\t\x12\x15\n\rpluginVersion\x18\x04 \x01(\t\"(\n\x04\x44ual\x12\x0f\n\x07numData\x18\x01 \x01(\x01\x12\x0f\n\x07strData\x18\x02 \x01(\t\"$\n\x03Row\x12\x1d\n\x05\x64uals\x18\x01 \x03(\x0b\x32\x0e.qlik.sse.Dual\"*\n\x0b\x42undledRows\x12\x1b\n\x04rows\x18\x01 \x03(\x0b\x32\r.qlik.sse.Row\"\xa0\x01\n\x13ScriptRequestHeader\x12\x0e\n\x06script\x18\x01 \x01(\t\x12,\n\x0c\x66unctionType\x18\x02 \x01(\x0e\x32\x16.qlik.sse.FunctionType\x12&\n\nreturnType\x18\x03 \x01(\x0e\x32\x12.qlik.sse.DataType\x12#\n\x06params\x18\x04 \x03(\x0b\x32\x13.qlik.sse.Parameter\"<\n\x15\x46unctionRequestHeader\x12\x12\n\nfunctionId\x18\x01 \x01(\x05\x12\x0f\n\x07version\x18\x02 \x01(\t\"I\n\x13\x43ommonRequestHeader\x12\r\n\x05\x61ppId\x18\x01 \x01(\t\x12\x0e\n\x06userId\x18\x02 \x01(\t\x12\x13\n\x0b\x63\x61rdinality\x18\x03 \x01(\x03\"b\n\x10TableDescription\x12*\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x1a.qlik.sse.FieldDescription\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x14\n\x0cnumberOfRows\x18\x03 \x01(\x03*-\n\x08\x44\x61taType\x12\n\n\x06STRING\x10\x00\x12\x0b\n\x07NUMERIC\x10\x01\x12\x08\n\x04\x44UAL\x10\x02*7\n\x0c\x46unctionType\x12\n\n\x06SCALAR\x10\x00\x12\x0f\n\x0b\x41GGREGATION\x10\x01\x12\n\n\x06TENSOR\x10\x02\x32\xd6\x01\n\tConnector\x12<\n\x0fGetCapabilities\x12\x0f.qlik.sse.Empty\x1a\x16.qlik.sse.Capabilities\"\x00\x12\x45\n\x0f\x45xecuteFunction\x12\x15.qlik.sse.BundledRows\x1a\x15.qlik.sse.BundledRows\"\x00(\x01\x30\x01\x12\x44\n\x0e\x45valuateScript\x12\x15.qlik.sse.BundledRows\x1a\x15.qlik.sse.BundledRows\"\x00(\x01\x30\x01\x42\x03\xf8\x01\x01\x62\x06proto3') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_DATATYPE = _descriptor.EnumDescriptor( + name='DataType', + full_name='qlik.sse.DataType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STRING', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMERIC', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DUAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1039, + serialized_end=1084, +) +_sym_db.RegisterEnumDescriptor(_DATATYPE) + +DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) +_FUNCTIONTYPE = _descriptor.EnumDescriptor( + name='FunctionType', + full_name='qlik.sse.FunctionType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SCALAR', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AGGREGATION', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TENSOR', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1086, + serialized_end=1141, +) +_sym_db.RegisterEnumDescriptor(_FUNCTIONTYPE) + +FunctionType = enum_type_wrapper.EnumTypeWrapper(_FUNCTIONTYPE) +STRING = 0 +NUMERIC = 1 +DUAL = 2 +SCALAR = 0 +AGGREGATION = 1 +TENSOR = 2 + + + +_EMPTY = _descriptor.Descriptor( + name='Empty', + full_name='qlik.sse.Empty', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=39, + serialized_end=46, +) + + +_PARAMETER = _descriptor.Descriptor( + name='Parameter', + full_name='qlik.sse.Parameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dataType', full_name='qlik.sse.Parameter.dataType', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='qlik.sse.Parameter.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=48, + serialized_end=111, +) + + +_FIELDDESCRIPTION = _descriptor.Descriptor( + name='FieldDescription', + full_name='qlik.sse.FieldDescription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='dataType', full_name='qlik.sse.FieldDescription.dataType', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='qlik.sse.FieldDescription.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tags', full_name='qlik.sse.FieldDescription.tags', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=113, + serialized_end=197, +) + + +_FUNCTIONDEFINITION = _descriptor.Descriptor( + name='FunctionDefinition', + full_name='qlik.sse.FunctionDefinition', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='qlik.sse.FunctionDefinition.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='functionType', full_name='qlik.sse.FunctionDefinition.functionType', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='returnType', full_name='qlik.sse.FunctionDefinition.returnType', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='params', full_name='qlik.sse.FunctionDefinition.params', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='functionId', full_name='qlik.sse.FunctionDefinition.functionId', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=200, + serialized_end=377, +) + + +_CAPABILITIES = _descriptor.Descriptor( + name='Capabilities', + full_name='qlik.sse.Capabilities', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='allowScript', full_name='qlik.sse.Capabilities.allowScript', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='functions', full_name='qlik.sse.Capabilities.functions', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pluginIdentifier', full_name='qlik.sse.Capabilities.pluginIdentifier', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pluginVersion', full_name='qlik.sse.Capabilities.pluginVersion', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=380, + serialized_end=513, +) + + +_DUAL = _descriptor.Descriptor( + name='Dual', + full_name='qlik.sse.Dual', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='numData', full_name='qlik.sse.Dual.numData', index=0, + number=1, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='strData', full_name='qlik.sse.Dual.strData', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=515, + serialized_end=555, +) + + +_ROW = _descriptor.Descriptor( + name='Row', + full_name='qlik.sse.Row', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='duals', full_name='qlik.sse.Row.duals', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=557, + serialized_end=593, +) + + +_BUNDLEDROWS = _descriptor.Descriptor( + name='BundledRows', + full_name='qlik.sse.BundledRows', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='rows', full_name='qlik.sse.BundledRows.rows', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=595, + serialized_end=637, +) + + +_SCRIPTREQUESTHEADER = _descriptor.Descriptor( + name='ScriptRequestHeader', + full_name='qlik.sse.ScriptRequestHeader', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='script', full_name='qlik.sse.ScriptRequestHeader.script', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='functionType', full_name='qlik.sse.ScriptRequestHeader.functionType', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='returnType', full_name='qlik.sse.ScriptRequestHeader.returnType', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='params', full_name='qlik.sse.ScriptRequestHeader.params', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=640, + serialized_end=800, +) + + +_FUNCTIONREQUESTHEADER = _descriptor.Descriptor( + name='FunctionRequestHeader', + full_name='qlik.sse.FunctionRequestHeader', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='functionId', full_name='qlik.sse.FunctionRequestHeader.functionId', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='version', full_name='qlik.sse.FunctionRequestHeader.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=802, + serialized_end=862, +) + + +_COMMONREQUESTHEADER = _descriptor.Descriptor( + name='CommonRequestHeader', + full_name='qlik.sse.CommonRequestHeader', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='appId', full_name='qlik.sse.CommonRequestHeader.appId', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='userId', full_name='qlik.sse.CommonRequestHeader.userId', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cardinality', full_name='qlik.sse.CommonRequestHeader.cardinality', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=864, + serialized_end=937, +) + + +_TABLEDESCRIPTION = _descriptor.Descriptor( + name='TableDescription', + full_name='qlik.sse.TableDescription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='qlik.sse.TableDescription.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='qlik.sse.TableDescription.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='numberOfRows', full_name='qlik.sse.TableDescription.numberOfRows', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=939, + serialized_end=1037, +) + +_PARAMETER.fields_by_name['dataType'].enum_type = _DATATYPE +_FIELDDESCRIPTION.fields_by_name['dataType'].enum_type = _DATATYPE +_FUNCTIONDEFINITION.fields_by_name['functionType'].enum_type = _FUNCTIONTYPE +_FUNCTIONDEFINITION.fields_by_name['returnType'].enum_type = _DATATYPE +_FUNCTIONDEFINITION.fields_by_name['params'].message_type = _PARAMETER +_CAPABILITIES.fields_by_name['functions'].message_type = _FUNCTIONDEFINITION +_ROW.fields_by_name['duals'].message_type = _DUAL +_BUNDLEDROWS.fields_by_name['rows'].message_type = _ROW +_SCRIPTREQUESTHEADER.fields_by_name['functionType'].enum_type = _FUNCTIONTYPE +_SCRIPTREQUESTHEADER.fields_by_name['returnType'].enum_type = _DATATYPE +_SCRIPTREQUESTHEADER.fields_by_name['params'].message_type = _PARAMETER +_TABLEDESCRIPTION.fields_by_name['fields'].message_type = _FIELDDESCRIPTION +DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY +DESCRIPTOR.message_types_by_name['Parameter'] = _PARAMETER +DESCRIPTOR.message_types_by_name['FieldDescription'] = _FIELDDESCRIPTION +DESCRIPTOR.message_types_by_name['FunctionDefinition'] = _FUNCTIONDEFINITION +DESCRIPTOR.message_types_by_name['Capabilities'] = _CAPABILITIES +DESCRIPTOR.message_types_by_name['Dual'] = _DUAL +DESCRIPTOR.message_types_by_name['Row'] = _ROW +DESCRIPTOR.message_types_by_name['BundledRows'] = _BUNDLEDROWS +DESCRIPTOR.message_types_by_name['ScriptRequestHeader'] = _SCRIPTREQUESTHEADER +DESCRIPTOR.message_types_by_name['FunctionRequestHeader'] = _FUNCTIONREQUESTHEADER +DESCRIPTOR.message_types_by_name['CommonRequestHeader'] = _COMMONREQUESTHEADER +DESCRIPTOR.message_types_by_name['TableDescription'] = _TABLEDESCRIPTION +DESCRIPTOR.enum_types_by_name['DataType'] = _DATATYPE +DESCRIPTOR.enum_types_by_name['FunctionType'] = _FUNCTIONTYPE + +Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), dict( + DESCRIPTOR = _EMPTY, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.Empty) + )) +_sym_db.RegisterMessage(Empty) + +Parameter = _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), dict( + DESCRIPTOR = _PARAMETER, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.Parameter) + )) +_sym_db.RegisterMessage(Parameter) + +FieldDescription = _reflection.GeneratedProtocolMessageType('FieldDescription', (_message.Message,), dict( + DESCRIPTOR = _FIELDDESCRIPTION, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.FieldDescription) + )) +_sym_db.RegisterMessage(FieldDescription) + +FunctionDefinition = _reflection.GeneratedProtocolMessageType('FunctionDefinition', (_message.Message,), dict( + DESCRIPTOR = _FUNCTIONDEFINITION, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.FunctionDefinition) + )) +_sym_db.RegisterMessage(FunctionDefinition) + +Capabilities = _reflection.GeneratedProtocolMessageType('Capabilities', (_message.Message,), dict( + DESCRIPTOR = _CAPABILITIES, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.Capabilities) + )) +_sym_db.RegisterMessage(Capabilities) + +Dual = _reflection.GeneratedProtocolMessageType('Dual', (_message.Message,), dict( + DESCRIPTOR = _DUAL, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.Dual) + )) +_sym_db.RegisterMessage(Dual) + +Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict( + DESCRIPTOR = _ROW, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.Row) + )) +_sym_db.RegisterMessage(Row) + +BundledRows = _reflection.GeneratedProtocolMessageType('BundledRows', (_message.Message,), dict( + DESCRIPTOR = _BUNDLEDROWS, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.BundledRows) + )) +_sym_db.RegisterMessage(BundledRows) + +ScriptRequestHeader = _reflection.GeneratedProtocolMessageType('ScriptRequestHeader', (_message.Message,), dict( + DESCRIPTOR = _SCRIPTREQUESTHEADER, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.ScriptRequestHeader) + )) +_sym_db.RegisterMessage(ScriptRequestHeader) + +FunctionRequestHeader = _reflection.GeneratedProtocolMessageType('FunctionRequestHeader', (_message.Message,), dict( + DESCRIPTOR = _FUNCTIONREQUESTHEADER, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.FunctionRequestHeader) + )) +_sym_db.RegisterMessage(FunctionRequestHeader) + +CommonRequestHeader = _reflection.GeneratedProtocolMessageType('CommonRequestHeader', (_message.Message,), dict( + DESCRIPTOR = _COMMONREQUESTHEADER, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.CommonRequestHeader) + )) +_sym_db.RegisterMessage(CommonRequestHeader) + +TableDescription = _reflection.GeneratedProtocolMessageType('TableDescription', (_message.Message,), dict( + DESCRIPTOR = _TABLEDESCRIPTION, + __module__ = 'ServerSideExtension_pb2' + # @@protoc_insertion_point(class_scope:qlik.sse.TableDescription) + )) +_sym_db.RegisterMessage(TableDescription) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\370\001\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class ConnectorStub(object): + """* + The communication service provided between the Qlik engine and the plugin. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetCapabilities = channel.unary_unary( + '/qlik.sse.Connector/GetCapabilities', + request_serializer=Empty.SerializeToString, + response_deserializer=Capabilities.FromString, + ) + self.ExecuteFunction = channel.stream_stream( + '/qlik.sse.Connector/ExecuteFunction', + request_serializer=BundledRows.SerializeToString, + response_deserializer=BundledRows.FromString, + ) + self.EvaluateScript = channel.stream_stream( + '/qlik.sse.Connector/EvaluateScript', + request_serializer=BundledRows.SerializeToString, + response_deserializer=BundledRows.FromString, + ) + + + class ConnectorServicer(object): + """* + The communication service provided between the Qlik engine and the plugin. + """ + + def GetCapabilities(self, request, context): + """/ A handshake call for the Qlik engine to retrieve the capability of the plugin. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ExecuteFunction(self, request_iterator, context): + """/ Requests a function to be executed as specified in the header. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def EvaluateScript(self, request_iterator, context): + """/ Requests a script to be evaluated as specified in the header. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_ConnectorServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetCapabilities': grpc.unary_unary_rpc_method_handler( + servicer.GetCapabilities, + request_deserializer=Empty.FromString, + response_serializer=Capabilities.SerializeToString, + ), + 'ExecuteFunction': grpc.stream_stream_rpc_method_handler( + servicer.ExecuteFunction, + request_deserializer=BundledRows.FromString, + response_serializer=BundledRows.SerializeToString, + ), + 'EvaluateScript': grpc.stream_stream_rpc_method_handler( + servicer.EvaluateScript, + request_deserializer=BundledRows.FromString, + response_serializer=BundledRows.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'qlik.sse.Connector', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaConnectorServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """* + The communication service provided between the Qlik engine and the plugin. + """ + def GetCapabilities(self, request, context): + """/ A handshake call for the Qlik engine to retrieve the capability of the plugin. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ExecuteFunction(self, request_iterator, context): + """/ Requests a function to be executed as specified in the header. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def EvaluateScript(self, request_iterator, context): + """/ Requests a script to be evaluated as specified in the header. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaConnectorStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """* + The communication service provided between the Qlik engine and the plugin. + """ + def GetCapabilities(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """/ A handshake call for the Qlik engine to retrieve the capability of the plugin. + """ + raise NotImplementedError() + GetCapabilities.future = None + def ExecuteFunction(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """/ Requests a function to be executed as specified in the header. + """ + raise NotImplementedError() + def EvaluateScript(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """/ Requests a script to be evaluated as specified in the header. + """ + raise NotImplementedError() + + + def beta_create_Connector_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('qlik.sse.Connector', 'EvaluateScript'): BundledRows.FromString, + ('qlik.sse.Connector', 'ExecuteFunction'): BundledRows.FromString, + ('qlik.sse.Connector', 'GetCapabilities'): Empty.FromString, + } + response_serializers = { + ('qlik.sse.Connector', 'EvaluateScript'): BundledRows.SerializeToString, + ('qlik.sse.Connector', 'ExecuteFunction'): BundledRows.SerializeToString, + ('qlik.sse.Connector', 'GetCapabilities'): Capabilities.SerializeToString, + } + method_implementations = { + ('qlik.sse.Connector', 'EvaluateScript'): face_utilities.stream_stream_inline(servicer.EvaluateScript), + ('qlik.sse.Connector', 'ExecuteFunction'): face_utilities.stream_stream_inline(servicer.ExecuteFunction), + ('qlik.sse.Connector', 'GetCapabilities'): face_utilities.unary_unary_inline(servicer.GetCapabilities), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Connector_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('qlik.sse.Connector', 'EvaluateScript'): BundledRows.SerializeToString, + ('qlik.sse.Connector', 'ExecuteFunction'): BundledRows.SerializeToString, + ('qlik.sse.Connector', 'GetCapabilities'): Empty.SerializeToString, + } + response_deserializers = { + ('qlik.sse.Connector', 'EvaluateScript'): BundledRows.FromString, + ('qlik.sse.Connector', 'ExecuteFunction'): BundledRows.FromString, + ('qlik.sse.Connector', 'GetCapabilities'): Capabilities.FromString, + } + cardinalities = { + 'EvaluateScript': cardinality.Cardinality.STREAM_STREAM, + 'ExecuteFunction': cardinality.Cardinality.STREAM_STREAM, + 'GetCapabilities': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'qlik.sse.Connector', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/generated/__init__.py b/generated/__init__.py new file mode 100644 index 0000000..e69de29