From 95f0d4e9a69bd7261a6ef24a501e4e18f3810e40 Mon Sep 17 00:00:00 2001 From: Georg Kallidis Date: Tue, 12 Nov 2024 13:31:40 +0000 Subject: [PATCH 1/4] Update yaml default load second argument yaml.Loader; Refactor: In handler.py method traceback and use in exception; Add print statements (as debug hints) in session,py and other modules; Fix in handler.py by removing bucket (which is no key, value pair) from setting into session headers as argument for start_response call); Add __init__py in plugins folder to allow import from makeAccount.py and remove local KibbleDatabase class there; Make in database.py *Wrapper classes private; Add into kibble.yaml configuration key elasticsearch versionHint (by default 8) to allow for more robust and transparent initialization; Refactor in setup.py into methods makeIndices and makeAccount(); Fix check for undefined snap in kibble.v1.js. Improvement: Evaluate result in makeAccount.py; Remove cgi import (not used) and deprecated since py 3.11; Use __debug__ to print out more debugging information; Set cookie httpOnly; Update setup docs (tested with python 3.9.1 and 3.13.0). --- api/handler.py | 33 +++++++++++++------ api/pages/__init__.py | 8 +++++ api/pages/account.py | 2 +- api/pages/org/sourcetypes.py | 2 +- api/pages/session.py | 37 ++++++++++++++++++--- api/pages/sources.py | 2 +- api/pages/widgets.py | 2 +- api/plugins/__init__.py | 20 ++++++++++++ api/plugins/database.py | 33 +++++++++++-------- api/plugins/openapi.py | 2 +- api/plugins/session.py | 14 ++++++-- api/yaml/openapi/combine.py | 8 ++--- docs/source/setup.rst | 11 +++++-- setup/makeaccount.py | 35 ++++++++------------ setup/setup.py | 62 ++++++++++++++++++++++++++---------- ui/js/kibble.v1.js | 10 +++--- 16 files changed, 197 insertions(+), 84 deletions(-) create mode 100644 api/plugins/__init__.py diff --git a/api/handler.py b/api/handler.py index d767e692..6c6956b7 100644 --- a/api/handler.py +++ b/api/handler.py @@ -24,7 +24,6 @@ # Main imports -import cgi import re import sys import traceback @@ -44,7 +43,7 @@ # Load Kibble master configuration -config = yaml.load(open("yaml/kibble.yaml")) +config = yaml.load(open("yaml/kibble.yaml"), Loader=yaml.Loader) # Instantiate database connections DB = None @@ -117,6 +116,9 @@ def __call__(self, environ, start_response, session): 501: '501 Gateway error' } errHeader = errHeaders[err.code] if err.code in errHeaders else "400 Bad request" + if __debug__: + print("Set response header: %s." % ( errHeader ) ) #'Set-Cookie' + # traceBack(err) start_response(errHeader, [ ('Content-Type', 'application/json')]) yield json.dumps({ @@ -125,11 +127,8 @@ def __call__(self, environ, start_response, session): }, indent = 4) + "\n" return - except: - err_type, err_value, tb = sys.exc_info() - traceback_output = ['API traceback:'] - traceback_output += traceback.format_tb(tb) - traceback_output.append('%s: %s' % (err_type.__name__, err_value)) + except Exception as err: + traceBack(err) # We don't know if response has been given yet, try giving one, fail gracefully. try: start_response('500 Internal Server Error', [ @@ -138,9 +137,17 @@ def __call__(self, environ, start_response, session): pass yield json.dumps({ "code": "500", - "reason": '\n'.join(traceback_output) + "reason": '\n'.join(err.message) }) +def traceBack(err): + print("Initial exception error: %s" % ( err ) ) + err_type, err_value, tb = sys.exc_info() + traceback_output = ['API traceback:'] + traceback_output += traceback.format_tb(tb) + traceback_output.append('%s: %s' % (err_type.__name__, err_value)) + print("Error: traceback_output: %s" % (traceback_output)) + return traceback_output def fourohfour(environ, start_response): """A very simple 404 handler""" @@ -166,12 +173,18 @@ def application(environ, start_response): if m: callback = KibbleAPIWrapper(path, function) session = plugins.session.KibbleSession(DB, environ, config) + if __debug__: + print("Path %s setting in session %s header %s" % ( path, session, session.headers ) ) #'Set-Cookie' a = 0 for bucket in callback(environ, start_response, session): if a == 0: - session.headers.append(bucket) + if __debug__: + print("Checking list type of bucket: %s %s" % ( type(bucket), bucket ) ) + if isinstance(bucket, dict): + print("Added to session headers now %s" % ( session.headers ) ) + session.headers.append(bucket) try: - start_response("200 Okay", session.headers) + start_response("200 Okay", (session.headers) ) except: pass a += 1 diff --git a/api/pages/__init__.py b/api/pages/__init__.py index 67a30574..2fad7a51 100644 --- a/api/pages/__init__.py +++ b/api/pages/__init__.py @@ -38,7 +38,15 @@ def loadPage(path): if os.path.isdir(filepath): loadPage(filepath) else: + # debugging + # print("Init of filepath %s" % filepath ) + # x = filepath.replace(rootpath, "")[1:] + # print("Result replace of filepath %s type: %s" % (x , type(x)) ) + # explanation: remove rootpath from the beginning and truncate last three characters (=.py) at the end and replace any slash before with a dot p = filepath.replace(rootpath, "")[1:].replace('/', '.')[:-3] + # windows path + p = p.replace('\\', '.') + # print("Init of module path: pages.%s" % p ) xp = p.replace('.', '/') print("Loading endpoint pages.%s as %s" % (p, xp)) handlers[xp] = importlib.import_module("pages.%s" % p) diff --git a/api/pages/account.py b/api/pages/account.py index 48577735..da572f6a 100644 --- a/api/pages/account.py +++ b/api/pages/account.py @@ -153,7 +153,7 @@ def run(API, environ, indata, session): # Okay, let's make an account...I guess salt = bcrypt.gensalt() - pwd = bcrypt.hashpw(p.encode('utf-8'), salt).decode('ascii') + pwd = bcrypt.hashpw(p.encode('utf-8'), salt) #.decode('ascii') # Verification code, if needed vsalt = bcrypt.gensalt() diff --git a/api/pages/org/sourcetypes.py b/api/pages/org/sourcetypes.py index 9e5b8afe..3dc7f4ac 100644 --- a/api/pages/org/sourcetypes.py +++ b/api/pages/org/sourcetypes.py @@ -72,7 +72,7 @@ def run(API, environ, indata, session): - types = yaml.load(open("yaml/sourcetypes.yaml")) + types = yaml.load(open("yaml/sourcetypes.yaml"), Loader=yaml.Loader) yield json.dumps(types) diff --git a/api/pages/session.py b/api/pages/session.py index 425cd89a..28072799 100644 --- a/api/pages/session.py +++ b/api/pages/session.py @@ -103,6 +103,9 @@ import hashlib import uuid +# debug +import traceback + def run(API, environ, indata, session): method = environ['REQUEST_METHOD'] @@ -111,10 +114,19 @@ def run(API, environ, indata, session): if method == "PUT": u = indata['email'] p = indata['password'] - if session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = u): + # session.DB.ES calls database.py which wraps for es > 7 doc_tyoe to the index + user_exists = session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = u) + if __debug__: + print("user exists for %s: %s" % (u, user_exists)) + if user_exists: doc = session.DB.ES.get(index=session.DB.dbname, doc_type='useraccount', id = u) hp = doc['_source']['password'] - if bcrypt.hashpw(p.encode('utf-8'), hp.encode('utf-8')).decode('ascii') == hp: + x = bcrypt.hashpw(p.encode('utf-8'), hp.encode('utf-8')) + #print("Check doc %s" % ( doc)) + #print("Check user password %s" % (hp)) + #print("Check ucrypt pw %s" % ( x)) + if x== hp: + #print("Matched pw proceedd with verify %s!" % (session.config['accounts']) ) # If verification is enabled, make sure account is verified if session.config['accounts'].get('verify'): if doc['_source']['verified'] == False: @@ -124,10 +136,17 @@ def run(API, environ, indata, session): 'id': session.cookie, 'timestamp': int(time.time()) } - session.DB.ES.index(index=session.DB.dbname, doc_type='uisession', id = session.cookie, body = sessionDoc) + if __debug__: + print("Saving from session %s to sessionDoc %s." % (session, sessionDoc) ) + res = session.DB.ES.index(index=session.DB.dbname, doc_type='uisession', id = session.cookie, body = sessionDoc) + if __debug__: + print("Saved to index uisession %s." % (res) ) + print("session headers ", ( session.headers) ) yield json.dumps({"message": "Logged in OK!"}) return + #if __debug__: + # traceback.print_stack(); # Fall back to a 403 if username and password did not match raise API.exception(403, "Wrong username or password supplied!") @@ -145,11 +164,20 @@ def run(API, environ, indata, session): # Display the user data for this session if method == "GET": - # Do we have an API key? If not, make one + if __debug__: + print("GET session user %s" % ( session.user)) + print("GET indata token %s" % (indata.get('newtoken'))) + + if (session.user is None or indata is None ): + print("NO user session cookie not set? RETURN") + return + # Do we have an API key? If not, make one - question: Should it be ".. or not indata.get.."? This is not used anywhere. if not session.user.get('token') or indata.get('newtoken'): token = str(uuid.uuid4()) session.user['token'] = token session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = session.user['email'], body = session.user) + if __debug__: + print("saved session user %s token %s" % ( session.user, token)) # Run a quick search of all orgs we have. res = session.DB.ES.search( @@ -162,6 +190,7 @@ def run(API, environ, indata, session): } } ) + print("organisation index search result :%s" % ( res)) orgs = [] for hit in res['hits']['hits']: diff --git a/api/pages/sources.py b/api/pages/sources.py index 0a46756b..40949d58 100644 --- a/api/pages/sources.py +++ b/api/pages/sources.py @@ -222,7 +222,7 @@ def run(API, environ, indata, session): if canModifySource(session): new = 0 old = 0 - stypes = yaml.load(open("yaml/sourcetypes.yaml")) + stypes = yaml.load(open("yaml/sourcetypes.yaml"), Loader=yaml.Loader) for source in indata.get('sources', []): sourceURL = source['sourceURL'] sourceType = source['type'] diff --git a/api/pages/widgets.py b/api/pages/widgets.py index cebda02a..090a355a 100644 --- a/api/pages/widgets.py +++ b/api/pages/widgets.py @@ -54,7 +54,7 @@ def run(API, environ, indata, session): if not session.user: raise API.exception(403, "You must be logged in to use this API endpoint! %s") - widgets = yaml.load(open("yaml/widgets.yaml")) + widgets = yaml.load(open("yaml/widgets.yaml"), Loader=yaml.Loader) page = indata['pageid'] if not page or page == '0': diff --git a/api/plugins/__init__.py b/api/plugins/__init__.py new file mode 100644 index 00000000..60c38d6e --- /dev/null +++ b/api/plugins/__init__.py @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +""" +Kibble API plugins library package + +""" \ No newline at end of file diff --git a/api/plugins/database.py b/api/plugins/database.py index 80b94dd1..3068da0a 100644 --- a/api/plugins/database.py +++ b/api/plugins/database.py @@ -22,12 +22,11 @@ # Main imports -import cgi import re #import aaa import elasticsearch -class KibbleESWrapper(object): +class _KibbleESWrapper(object): """ Class for rewriting old-style queries to the new ones, where doc_type is an integral part of the DB name @@ -65,7 +64,7 @@ def count(self, index, doc_type = '*', body = None): body = body ) -class KibbleESWrapperSeven(object): +class _KibbleESWrapperSeven(object): """ Class for rewriting old-style queries to the >= 7.x ones, where doc_type is an integral part of the DB name and NO DOC_TYPE! @@ -106,14 +105,22 @@ class KibbleDatabase(object): def __init__(self, config): self.config = config self.dbname = config['elasticsearch']['dbname'] - self.ES = elasticsearch.Elasticsearch([{ - 'host': config['elasticsearch']['host'], - 'port': int(config['elasticsearch']['port']), - 'use_ssl': config['elasticsearch']['ssl'], - 'verify_certs': False, - 'url_prefix': config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '', - 'http_auth': config['elasticsearch']['auth'] if 'auth' in config['elasticsearch'] else None - }], + + defaultELConfig = { + 'host': config['elasticsearch']['host'], + 'port': int(config['elasticsearch']['port']), + } + versionHint = config['elasticsearch']['versionHint'] + if (versionHint >= 7): + defaultELConfig['scheme'] = 'https' if (config['elasticsearch']['ssl']) else 'http' + defaultELConfig['path_prefix'] = config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '' + else: + defaultELConfig['use_ssl'] = config['elasticsearch']['ssl'] + defaultELConfig['verify_certs']: False + defaultELConfig['url_prefix'] = config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '' + defaultELConfig['http_auth'] = config['elasticsearch']['auth'] if 'auth' in config['elasticsearch'] else None + + self.ES = elasticsearch.Elasticsearch([ defaultELConfig ], max_retries=5, retry_on_timeout=True ) @@ -123,7 +130,7 @@ def __init__(self, config): # ES calls to match this. self.ESversion = int(self.ES.info()['version']['number'].split('.')[0]) if self.ESversion >= 7: - self.ES = KibbleESWrapperSeven(self.ES) + self.ES = _KibbleESWrapperSeven(self.ES) elif self.ESVersion >= 6: - self.ES = KibbleESWrapper(self.ES) + self.ES = _KibbleESWrapper(self.ES) diff --git a/api/plugins/openapi.py b/api/plugins/openapi.py index ba6153a5..7894c29a 100644 --- a/api/plugins/openapi.py +++ b/api/plugins/openapi.py @@ -55,7 +55,7 @@ def __init__(self, APIFile): if APIFile.endswith(".json") or APIFile.endswith(".js"): self.API = json.load(open(APIFile)) else: - self.API = yaml.load(open(APIFile)) + self.API = yaml.load(open(APIFile), Loader=yaml.Loader) def validateType(self, field, value, ftype): """ Validate a single field value against an expected type """ diff --git a/api/plugins/session.py b/api/plugins/session.py index 68614e13..681fb138 100644 --- a/api/plugins/session.py +++ b/api/plugins/session.py @@ -22,7 +22,6 @@ # Main imports -import cgi import re import sys import traceback @@ -122,12 +121,19 @@ def logout(self): self.user = None except: pass + def newCookie(self): cookie = uuid.uuid4() cookies = http.cookies.SimpleCookie() cookies['kibble_session'] = cookie cookies['kibble_session']['expires'] = 86400 * 365 # Expire one year from now + cookies['kibble_session']['HttpOnly'] = True; # no js write exposure + # cookies['kibble_session']['secure'] = True; # more secure self.headers.append(('Set-Cookie', cookies['kibble_session'].OutputString())) + if __debug__: + print("headers ", ( self.headers) ) + return cookie + def __init__(self, DB, environ, config): """ Loads the current user session or initiates a new session if @@ -138,7 +144,7 @@ def __init__(self, DB, environ, config): self.DB = DB self.headers = [('Content-Type', 'application/json; charset=utf-8')] self.cookie = None - + # Construct the URL we're visiting self.url = "%s://%s" % (environ['wsgi.url_scheme'], environ.get('HTTP_HOST', environ.get('SERVER_NAME'))) self.url += environ.get('SCRIPT_NAME', '/') @@ -182,6 +188,8 @@ def __init__(self, DB, environ, config): except Exception as err: print(err) if not cookie: - self.newCookie() + cookie = self.newCookie() self.cookie = cookie + if __debug__: + print("cookie found/set ", (cookie) ) \ No newline at end of file diff --git a/api/yaml/openapi/combine.py b/api/yaml/openapi/combine.py index 11afc958..f89f53f7 100644 --- a/api/yaml/openapi/combine.py +++ b/api/yaml/openapi/combine.py @@ -40,7 +40,7 @@ def deconstruct(): - yml = yaml.load(open(bpath + "/../openapi.yaml")) + yml = yaml.load(open(bpath + "/../openapi.yaml"), Loader=yaml.Loader) noDefs = 0 print("Dumping paths into pages...") for endpoint, defs in yml['paths'].items(): @@ -103,7 +103,7 @@ def construct(): cyml = m.group(2) print("Weaving in API path %s" % apath) cyml = "\n".join([line[2:] for line in cyml.split("\n")]) - defs = yaml.load(cyml) + defs = yaml.load(cyml, Loader=yaml.Loader) yml['paths'][apath] = defs else: fname = d @@ -117,7 +117,7 @@ def construct(): cyml = m.group(2) print("Weaving in API path %s" % apath) cyml = "\n".join([line[2:] for line in cyml.split("\n")]) - defs = yaml.load(cyml) + defs = yaml.load(cyml, Loader=yaml.Loader) yml['paths'][apath] = defs apidir = os.path.abspath("%s/components" % bpath) print("Scanning %s" % apidir) @@ -130,7 +130,7 @@ def construct(): yml['components'][d] = yml['components'].get(d, {}) fpath = "%s/%s" % (cdir, fname) print("Scanning %s" % fpath) - defs = yaml.load(open(fpath)) + defs = yaml.load(open(fpath), Loader=yaml.Loader) yml['components'][d][fname.replace(".yaml", "")] = defs ypath = os.path.abspath("%s/../openapi.yaml" % bpath) with open(ypath, "w") as f: diff --git a/docs/source/setup.rst b/docs/source/setup.rst index ad821704..c403599c 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -154,7 +154,7 @@ be using the Apache HTTP Server and proxy to Gunicorn: # Set this to your domain, or add kibble.localhost to /etc/hosts ServerName kibble.localhost DocumentRoot /var/www/kibble/ui/ - # Proxy to gunicorn for /api/ below: + # Proxy to gunicorn or waitress for /api/ below: ProxyPass /api/ http://localhost:8000/api/ @@ -166,7 +166,14 @@ be using the Apache HTTP Server and proxy to Gunicorn: cd /var/www/kibble/api/ gunicorn -w 10 -b 127.0.0.1:8000 handler:application -t 120 -D -Once httpd is (re)started, you should be able to browse to your new + Alternatively use waitress, e.g. in development like this: + +:: + + cd /var/www/kibble/api/ + waitress-serve --listen=*:8000 handler:application + + Once httpd is (re)started, you should be able to browse to your new Kibble instance. diff --git a/setup/makeaccount.py b/setup/makeaccount.py index 67b41516..93f12b44 100644 --- a/setup/makeaccount.py +++ b/setup/makeaccount.py @@ -20,22 +20,10 @@ import yaml import bcrypt -class KibbleDatabase(object): - def __init__(self, config): - self.config = config - self.dbname = config['elasticsearch']['dbname'] - self.ES = elasticsearch.Elasticsearch([{ - 'host': config['elasticsearch']['host'], - 'port': int(config['elasticsearch']['port']), - 'use_ssl': config['elasticsearch']['ssl'], - 'verify_certs': False, - 'url_prefix': config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '', - 'http_auth': config['elasticsearch']['auth'] if 'auth' in config['elasticsearch'] else None - }], - max_retries=5, - retry_on_timeout=True - ) +import sys +sys.path.append('../') +import api.plugins.database arg_parser = argparse.ArgumentParser() arg_parser.add_argument("-u", "--username", required=True, help="Username (email) of accoun to create") @@ -43,14 +31,15 @@ def __init__(self, config): arg_parser.add_argument("-n", "--name", help="Real name (displayname) of account (optional)") arg_parser.add_argument("-A", "--admin", action="store_true", help="Make account global admin") arg_parser.add_argument("-a", "--orgadmin", action="store_true", help="Make account owner of orgs invited to") -arg_parser.add_argument("-o", "--org", help="Invite to this organisation") +arg_parser.add_argument("-o", "--org", help="Invite to this organisation (id)") args = arg_parser.parse_args() # Load Kibble master configuration -config = yaml.load(open("../api/yaml/kibble.yaml")) +config = yaml.load(open("../api/yaml/kibble.yaml"), Loader=yaml.Loader) -DB = KibbleDatabase(config) +# use es 7 mapping if +DB = api.plugins.database.KibbleDatabase(config) username = args.username password = args.password @@ -61,7 +50,8 @@ def __init__(self, config): aorgs = orgs if adminorg else [] salt = bcrypt.gensalt() -pwd = bcrypt.hashpw(password.encode('utf-8'), salt).decode('ascii') +pwd = bcrypt.hashpw(password.encode('utf-8'), salt) #.decode('ascii') + doc = { 'email': username, # Username (email) 'password': pwd, # Hashed password @@ -72,6 +62,9 @@ def __init__(self, config): 'verified': True, # Account verified via email? 'userlevel': "admin" if admin else "user" # User level (user/admin) } -DB.ES.index(index=DB.dbname, doc_type='useraccount', id = username, body = doc) -print("Account created!") + +# doc_type is adpated for es > 6 +res = DB.ES.index(index=DB.dbname, doc_type='useraccount', id = username, body = doc) # + +print("Account '%s' %s in index %s!" %( username, res['result'], DB.dbname) ) diff --git a/setup/setup.py b/setup/setup.py index f06bfd2f..37192864 100644 --- a/setup/setup.py +++ b/setup/setup.py @@ -17,6 +17,8 @@ KIBBLE_VERSION = '0.1.0' # ABI/API compat demarcation. KIBBLE_DB_VERSION = 2 # Second database revision +ELASTIC_SEARCH_MIN_VERSION = 8 + import sys if sys.version_info <= (3, 3): @@ -33,7 +35,7 @@ import json mappings = json.load(open("mappings.json")) -myyaml = yaml.load(open("kibble.yaml.sample")) +myyaml = yaml.load(open("kibble.yaml.sample"), Loader=yaml.Loader) dopip = False try: @@ -106,7 +108,7 @@ pass while dbname == "": - dbname = input("What would you like to call the DB index [kibble]: ") + dbname = input("What would you like to call (lowercase) the DB index [kibble]: ") if dbname == "": print("Using default; kibble") dbname = "kibble" @@ -151,19 +153,30 @@ def createIndex(): global mappings - es = Elasticsearch([ - { - 'host': hostname, - 'port': port, - 'use_ssl': False, - 'url_prefix': '' - }], + global es + + defaultELConfig = { + 'host': hostname, + 'port': port + } + + versionHint = ELASTIC_SEARCH_MIN_VERSION + + if versionHint >= 7: + defaultELConfig['scheme'] = 'http' + defaultELConfig['path_prefix'] = '' + else: + defaultELConfig['use_ssl'] = False + defaultELConfig['url_prefix'] = '' + + es = Elasticsearch([ defaultELConfig], max_retries=5, retry_on_timeout=True - ) - + ) + es6 = True if int(es.info()['version']['number'].split('.')[0]) >= 6 else False es7 = True if int(es.info()['version']['number'].split('.')[0]) >= 7 else False + es8 = True if int(es.info()['version']['number'].split('.')[0]) >= 8 else False if not es6: print("New Kibble installations require ElasticSearch 6.x or newer! You appear to be running %s!" % es.info()['version']['number']) sys.exit(-1) @@ -171,7 +184,7 @@ def createIndex(): if es7: mappings['mappings'] = mappings['mappings']['_doc'] # Check if index already exists - if es.indices.exists(dbname+"_api"): + if es.indices.exists(index=dbname+"_api"): if args.skiponexist: # Skip this is DB exists and -k added print("DB prefix exists, but --skiponexist used, skipping this step.") return @@ -192,8 +205,8 @@ def createIndex(): # forum_*: forum stats (SO, Discourse, Askbot etc) 'forum_post', 'forum_topic', - # GitHub stats 'ghstats', + # GitHub stats # im_*: Instant messaging stats 'im_stats', 'im_ops', @@ -241,9 +254,13 @@ def createIndex(): ) print("Indices created! %s " % res) + + +def createAccount(): + global es salt = bcrypt.gensalt() - pwd = bcrypt.hashpw(adminPass.encode('utf-8'), salt).decode('ascii') + pwd = bcrypt.hashpw(adminPass.encode('utf-8'), salt) # .decode('ascii') -> 'str' object has no attribute 'decode' print("Creating administrator account") doc = { 'email': adminName, # Username (email) @@ -252,6 +269,7 @@ def createIndex(): 'organisations': [], # Orgs user belongs to (default is none) 'ownerships': [], # Orgs user owns (default is none) 'defaultOrganisation': None, # Default org for user + #'defaultOrganisation': 'apache', # Default org for user 'verified': True, # Account verified via email? 'userlevel': "admin" # User level (user/admin) } @@ -259,15 +277,24 @@ def createIndex(): 'apiversion': KIBBLE_VERSION, # Log current API version 'dbversion': KIBBLE_DB_VERSION # Log the database revision we accept (might change!) } - es.index(index=dbname+'_useraccount', doc_type = '_doc', id = adminName, body = doc) - es.index(index=dbname+'_api', doc_type = '_doc', id = 'current', body = dbdoc) + + es7 = True if int(es.info()['version']['number'].split('.')[0]) >= 7 else False + + if es7: + es.index(index=dbname+'_useraccount', id = adminName, body = doc) + es.index(index=dbname+'_api', id = 'current', body = dbdoc) + else: + es.index(index=dbname+'_useraccount', doc_type = '_doc', id = adminName, body = doc) + es.index(index=dbname+'_api', doc_type = '_doc', id = 'current', body = dbdoc) print("Account created!") + try: import logging # elasticsearch logs lots of warnings on retries/connection failure logging.getLogger("elasticsearch").setLevel(logging.ERROR) createIndex() + createAccount() except Exception as e: @@ -296,7 +323,8 @@ def createIndex(): 'host': hostname, 'port': port, 'ssl': False, - 'dbname': dbname + 'dbname': dbname, + 'versionHint': ELASTIC_SEARCH_MIN_VERSION }, 'mail': { 'mailhost': m[0], diff --git a/ui/js/kibble.v1.js b/ui/js/kibble.v1.js index 42f02b3a..bb002f45 100644 --- a/ui/js/kibble.v1.js +++ b/ui/js/kibble.v1.js @@ -3315,7 +3315,7 @@ fetch = function(url, xstate, callback, nocreds) { return xmlHttp.onreadystatechange = function(state) { var e, js, mpart, response; if (xmlHttp.readyState === 4 && xmlHttp.status === 500) { - if (snap) { + if (typeof snap !== "undefined") { snap(xstate); } return; @@ -3362,7 +3362,7 @@ put = function(url, json, xstate, callback, nocreds) { return xmlHttp.onreadystatechange = function(state) { var e, js, response; if (xmlHttp.readyState === 4 && xmlHttp.status === 500) { - if (snap) { + if (typeof snap !== "undefined") { snap(xstate); } return; @@ -3409,7 +3409,7 @@ patch = function(url, json, xstate, callback, nocreds) { return xmlHttp.onreadystatechange = function(state) { var e, js, response; if (xmlHttp.readyState === 4 && xmlHttp.status === 500) { - if (snap) { + if (typeof snap !== "undefined") { snap(xstate); } return; @@ -3456,7 +3456,7 @@ xdelete = function(url, json, xstate, callback, nocreds) { return xmlHttp.onreadystatechange = function(state) { var e, js, response; if (xmlHttp.readyState === 4 && xmlHttp.status === 500) { - if (snap) { + if (typeof snap !== "undefined") { snap(xstate); } return; @@ -3514,7 +3514,7 @@ post = function(url, json, xstate, callback, snap) { return xmlHttp.onreadystatechange = function(state) { var e, response; if (xmlHttp.readyState === 4 && xmlHttp.status === 500) { - if (snap) { + if (typeof snap !== "undefined") { snap(xstate); } } From a443c6a190d4aa20235d1ff16951824956e881db Mon Sep 17 00:00:00 2001 From: Georg Kallidis Date: Fri, 15 Nov 2024 15:09:31 +0100 Subject: [PATCH 2/4] New _KibbleESWrapperEight class inheriting fro ..Seven using Nestedicts to replace interval with calendar_interval in body lists. See https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-datehistogram-aggregation.html#calendar_and_fixed_intervals Fix NPE err.message in handler.py and use traceback_output (needs more filtering to not expose stacktraces pathes) --- api/handler.py | 8 +++--- api/pages/code/committers.py | 2 +- api/plugins/database.py | 55 +++++++++++++++++++++++++++++++++++- setup/requirements.txt | 1 + 4 files changed, 60 insertions(+), 6 deletions(-) diff --git a/api/handler.py b/api/handler.py index 6c6956b7..ec0b72fc 100644 --- a/api/handler.py +++ b/api/handler.py @@ -128,7 +128,7 @@ def __call__(self, environ, start_response, session): return except Exception as err: - traceBack(err) + traceback_output = traceBack(err) # We don't know if response has been given yet, try giving one, fail gracefully. try: start_response('500 Internal Server Error', [ @@ -137,7 +137,7 @@ def __call__(self, environ, start_response, session): pass yield json.dumps({ "code": "500", - "reason": '\n'.join(err.message) + "reason": '\n'.join(traceback_output) }) def traceBack(err): @@ -178,8 +178,8 @@ def application(environ, start_response): a = 0 for bucket in callback(environ, start_response, session): if a == 0: - if __debug__: - print("Checking list type of bucket: %s %s" % ( type(bucket), bucket ) ) + #if __debug__: + # print("Checking list type of bucket: %s %s" % ( type(bucket), bucket ) ) if isinstance(bucket, dict): print("Added to session headers now %s" % ( session.headers ) ) session.headers.append(bucket) diff --git a/api/pages/code/committers.py b/api/pages/code/committers.py index 7b6d5183..d2201724 100644 --- a/api/pages/code/committers.py +++ b/api/pages/code/committers.py @@ -96,7 +96,7 @@ def run(API, environ, indata, session): which = 'author_email' role = 'author' - interval = indata.get('interval', 'month') + interval = indata.get('interval', 'month') # or 30d if using fixed_interval el8 fixed-interval or calendar_interval #################################################################### diff --git a/api/plugins/database.py b/api/plugins/database.py index 3068da0a..051cd208 100644 --- a/api/plugins/database.py +++ b/api/plugins/database.py @@ -26,6 +26,8 @@ #import aaa import elasticsearch +from ndicts import NestedDict + class _KibbleESWrapper(object): """ Class for rewriting old-style queries to the new ones, @@ -100,6 +102,55 @@ def count(self, index, doc_type = '*', body = None): body = body ) +class _KibbleESWrapperEight(_KibbleESWrapperSeven): + def __init__(self, ES): + super().__init__(ES) + # to replace key in body in queries + self.replace = {'interval': 'calendar_interval'} # or fixed_interval + + def index(self, index, doc_type, id, body): + if body is not None: + body = self.ndict_replace(body, self.replace) + return self.ES.index(index = index+'_'+doc_type, id = id, body = body) + def update(self, index, doc_type, id, body): + if body is not None: + body = self.ndict_replace(body, self.replace) + return self.ES.update(index = index+'_'+doc_type, id = id, body = body) + def scroll(self, scroll_id, scroll): + if body is not None: + body = self.ndict_replace(body, self.replace) + return self.ES.scroll(scroll_id = scroll_id, scroll = scroll) + def search(self, index, doc_type, size = 100, scroll = None, _source_include = None, body = None): + if body is not None: + body = self.ndict_replace(body, self.replace) + return self.ES.search( + index = index+'_'+doc_type, + size = size, + scroll = scroll, + _source_includes = _source_include, + body = body + ) + def count(self, index, doc_type = '*', body = None): + if body is not None: + body = self.ndict_replace(body, self.replace) + return self.ES.count( + index = index+'_'+doc_type, + body = body + ) + + def ndict_replace(self, dict, replace): + #print("original body/dict : %s." %(dict) ) + ndict = NestedDict(dict) + new_nd = NestedDict() + for key, value in ndict.items(): + # get(k,k) with second parameter as default return value + result = tuple( replace.get(k, k) for k in key ) + #print("replace %s matched in key %s " %(key, result) ) + new_key = result + new_nd[new_key] = value + new_dict = new_nd.to_dict(); + #print("replaced body/dict: %s." %(new_dict) ) + return new_dict class KibbleDatabase(object): def __init__(self, config): @@ -129,7 +180,9 @@ def __init__(self, config): # If so, we're using the new ES DB mappings, and need to adjust ALL # ES calls to match this. self.ESversion = int(self.ES.info()['version']['number'].split('.')[0]) - if self.ESversion >= 7: + if self.ESversion >= 8: + self.ES = _KibbleESWrapperEight(self.ES) + elif self.ESversion >= 7: self.ES = _KibbleESWrapperSeven(self.ES) elif self.ESVersion >= 6: self.ES = _KibbleESWrapper(self.ES) diff --git a/setup/requirements.txt b/setup/requirements.txt index 24b3bca6..4cb13e07 100644 --- a/setup/requirements.txt +++ b/setup/requirements.txt @@ -4,3 +4,4 @@ bcrypt elasticsearch pre-commit python-dateutil +ndicts From c136cf099a0c2006b27e6ff7d4b7a1956723615d Mon Sep 17 00:00:00 2001 From: Georg Kallidis Date: Fri, 15 Nov 2024 15:26:11 +0100 Subject: [PATCH 3/4] Fixed: Removed overriding scroll method in ..Eight class as no body argument is never set --- api/plugins/database.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/api/plugins/database.py b/api/plugins/database.py index 051cd208..b16fd75d 100644 --- a/api/plugins/database.py +++ b/api/plugins/database.py @@ -116,10 +116,7 @@ def update(self, index, doc_type, id, body): if body is not None: body = self.ndict_replace(body, self.replace) return self.ES.update(index = index+'_'+doc_type, id = id, body = body) - def scroll(self, scroll_id, scroll): - if body is not None: - body = self.ndict_replace(body, self.replace) - return self.ES.scroll(scroll_id = scroll_id, scroll = scroll) + def search(self, index, doc_type, size = 100, scroll = None, _source_include = None, body = None): if body is not None: body = self.ndict_replace(body, self.replace) From 7707fd3967a486cbb0ac0de616eab73e1693e1a7 Mon Sep 17 00:00:00 2001 From: Georg Kallidis Date: Wed, 11 Dec 2024 16:24:33 +0100 Subject: [PATCH 4/4] Provide simple build environment with setuptools using pyproject.toml; add infos about packaging in README.md; Provide a project version reference __version__ in handler, currently set to 1.0.0; Comment debugging statements and add warning for duplicate size in search request. Change in pre-commit-config check hook to pre-stages and update versions of pre-commit-hooks and pre_commit. --- .pre-commit-config.yaml | 6 ++--- README.md | 27 ++++++++++++++++++- api/handler.py | 30 +++++++++++---------- api/plugins/database.py | 24 +++++++++-------- api/plugins/session.py | 25 +++++++++-------- docs/source/setup.rst | 6 ++--- pyproject.toml | 60 +++++++++++++++++++++++++++++++++++++++++ setup/requirements.txt | 2 +- 8 files changed, 134 insertions(+), 46 deletions(-) create mode 100644 pyproject.toml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 07fae567..88a49985 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,15 +16,15 @@ # limitations under the License. --- -default_stages: [commit, push] +default_stages: [pre-commit, pre-push] default_language_version: # force all unspecified python hooks to run python3 python: python3 -minimum_pre_commit_version: "1.20.0" +minimum_pre_commit_version: "3.4.0" repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + rev: v5.0.0 hooks: - id: check-yaml - id: end-of-file-fixer diff --git a/README.md b/README.md index 39f6914a..04b5ea88 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Apache Kibble is a tool to collect, aggregate and visualize data about any softw for the scanners to connect to, and provides the overall management of sources as well as the visualizations and API end points. - **Kibble scanners** ([kibble-scanners](https://github.com/apache/kibble-scanners)) - a collection of - scanning applications each designed to work with a specific type of resource (git repo, mailing list, + scanning applications each designed to work with a specific type of resource (git repo, mailing list, JIRA, etc) and push compiled data objects to the Kibble Server. ### Documentation @@ -30,6 +30,31 @@ service: [https://demo.kibble.apache.org/](https://demo.kibble.apache.org/). For installation steps see the [documentation](https://apache-kibble.readthedocs.io/en/latest/setup.html#installing-the-server). +### Packaging + +After installation of the build requirements + + pip install -q build + +build the project by running + + python -m build + +Find more information here: [Setuptools](https://setuptools.pypa.io/) and in project.toml file. + + +### Installation + +To install **Kibble-1** for development and/or testing from the checked-out code repository, run the following from the repository root: + + pip install -e '.[dev]' + +Find more information [Editable project mode](https://setuptools.pypa.io/en/latest/userguide/development_mode.html). + + +More TBD .. + + ### Contributing We welcome all contributions that improve the state of the Apache Kibble project. For contribution guidelines diff --git a/api/handler.py b/api/handler.py index ec0b72fc..4161acf1 100644 --- a/api/handler.py +++ b/api/handler.py @@ -33,6 +33,8 @@ import plugins.database import plugins.openapi +__version__ = "1.0.0" + # Compile valid API URLs from the pages library # Allow backwards compatibility by also accepting .lua URLs urls = [] @@ -55,7 +57,7 @@ class KibbleHTTPError(Exception): def __init__(self, code, message): self.code = code self.message = message - + class KibbleAPIWrapper: """ @@ -66,7 +68,7 @@ def __init__(self, path, func): self.API = KibbleOpenAPI self.path = path self.exception = KibbleHTTPError - + def __call__(self, environ, start_response, session): """Run the function, return response OR return stacktrace""" response = None @@ -89,7 +91,7 @@ def __call__(self, environ, start_response, session): "reason": "Invalid JSON: %s" % err }) return - + # Validate URL against OpenAPI specs try: self.API.validate(environ['REQUEST_METHOD'], self.path, formdata) @@ -101,7 +103,7 @@ def __call__(self, environ, start_response, session): "reason": err.message }) return - + # Call page with env, SR and form data try: response = self.func(self, environ, formdata, session) @@ -118,7 +120,7 @@ def __call__(self, environ, start_response, session): errHeader = errHeaders[err.code] if err.code in errHeaders else "400 Bad request" if __debug__: print("Set response header: %s." % ( errHeader ) ) #'Set-Cookie' - # traceBack(err) + # traceBack(err) start_response(errHeader, [ ('Content-Type', 'application/json')]) yield json.dumps({ @@ -126,7 +128,7 @@ def __call__(self, environ, start_response, session): "reason": err.message }, indent = 4) + "\n" return - + except Exception as err: traceback_output = traceBack(err) # We don't know if response has been given yet, try giving one, fail gracefully. @@ -139,16 +141,16 @@ def __call__(self, environ, start_response, session): "code": "500", "reason": '\n'.join(traceback_output) }) - + def traceBack(err): - print("Initial exception error: %s" % ( err ) ) + print("Initial exception error: %s" % ( err ) ) err_type, err_value, tb = sys.exc_info() traceback_output = ['API traceback:'] traceback_output += traceback.format_tb(tb) traceback_output.append('%s: %s' % (err_type.__name__, err_value)) - print("Error: traceback_output: %s" % (traceback_output)) + print("Error: traceback_output: %s" % (traceback_output)) return traceback_output - + def fourohfour(environ, start_response): """A very simple 404 handler""" start_response("404 Not Found", [ @@ -173,15 +175,15 @@ def application(environ, start_response): if m: callback = KibbleAPIWrapper(path, function) session = plugins.session.KibbleSession(DB, environ, config) - if __debug__: - print("Path %s setting in session %s header %s" % ( path, session, session.headers ) ) #'Set-Cookie' + #if __debug__: + # print("Path %s setting in session %s header %s" % ( path, session, session.headers ) ) #'Set-Cookie' a = 0 for bucket in callback(environ, start_response, session): if a == 0: #if __debug__: # print("Checking list type of bucket: %s %s" % ( type(bucket), bucket ) ) if isinstance(bucket, dict): - print("Added to session headers now %s" % ( session.headers ) ) + print("Added to session headers now %s" % ( session.headers ) ) session.headers.append(bucket) try: start_response("200 Okay", (session.headers) ) @@ -194,7 +196,7 @@ def application(environ, start_response): elif isinstance(bucket, bytes): yield bucket return - + for bucket in fourohfour(environ, start_response): yield bytes(bucket, encoding = 'utf-8') diff --git a/api/plugins/database.py b/api/plugins/database.py index b16fd75d..c12b7204 100644 --- a/api/plugins/database.py +++ b/api/plugins/database.py @@ -35,7 +35,7 @@ class _KibbleESWrapper(object): """ def __init__(self, ES): self.ES = ES - + def get(self, index, doc_type, id): return self.ES.get(index = index+'_'+doc_type, doc_type = '_doc', id = id) def exists(self, index, doc_type, id): @@ -73,7 +73,7 @@ class _KibbleESWrapperSeven(object): """ def __init__(self, ES): self.ES = ES - + def get(self, index, doc_type, id): return self.ES.get(index = index+'_'+doc_type, id = id) def exists(self, index, doc_type, id): @@ -101,13 +101,13 @@ def count(self, index, doc_type = '*', body = None): index = index+'_'+doc_type, body = body ) - + class _KibbleESWrapperEight(_KibbleESWrapperSeven): def __init__(self, ES): super().__init__(ES) # to replace key in body in queries self.replace = {'interval': 'calendar_interval'} # or fixed_interval - + def index(self, index, doc_type, id, body): if body is not None: body = self.ndict_replace(body, self.replace) @@ -116,10 +116,13 @@ def update(self, index, doc_type, id, body): if body is not None: body = self.ndict_replace(body, self.replace) return self.ES.update(index = index+'_'+doc_type, id = id, body = body) - + def search(self, index, doc_type, size = 100, scroll = None, _source_include = None, body = None): if body is not None: body = self.ndict_replace(body, self.replace) + if 'size' in body: + print("WARNING duplicate size: body size %s and size param: %s" % (body['size'], size) ) + #del body['size'] return self.ES.search( index = index+'_'+doc_type, size = size, @@ -134,7 +137,7 @@ def count(self, index, doc_type = '*', body = None): index = index+'_'+doc_type, body = body ) - + def ndict_replace(self, dict, replace): #print("original body/dict : %s." %(dict) ) ndict = NestedDict(dict) @@ -145,7 +148,7 @@ def ndict_replace(self, dict, replace): #print("replace %s matched in key %s " %(key, result) ) new_key = result new_nd[new_key] = value - new_dict = new_nd.to_dict(); + new_dict = new_nd.to_dict(); #print("replaced body/dict: %s." %(new_dict) ) return new_dict @@ -153,7 +156,7 @@ class KibbleDatabase(object): def __init__(self, config): self.config = config self.dbname = config['elasticsearch']['dbname'] - + defaultELConfig = { 'host': config['elasticsearch']['host'], 'port': int(config['elasticsearch']['port']), @@ -167,12 +170,12 @@ def __init__(self, config): defaultELConfig['verify_certs']: False defaultELConfig['url_prefix'] = config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '' defaultELConfig['http_auth'] = config['elasticsearch']['auth'] if 'auth' in config['elasticsearch'] else None - + self.ES = elasticsearch.Elasticsearch([ defaultELConfig ], max_retries=5, retry_on_timeout=True ) - + # IMPORTANT BIT: Figure out if this is ES < 6.x, 6.x or >= 7.x. # If so, we're using the new ES DB mappings, and need to adjust ALL # ES calls to match this. @@ -183,4 +186,3 @@ def __init__(self, config): self.ES = _KibbleESWrapperSeven(self.ES) elif self.ESVersion >= 6: self.ES = _KibbleESWrapper(self.ES) - diff --git a/api/plugins/session.py b/api/plugins/session.py index 681fb138..4ab5dfa5 100644 --- a/api/plugins/session.py +++ b/api/plugins/session.py @@ -31,13 +31,13 @@ import time class KibbleSession(object): - + def getView(self, viewID): if self.DB.ES.exists(index=self.DB.dbname, doc_type="view", id = viewID): view = self.DB.ES.get(index=self.DB.dbname, doc_type="view", id = viewID) return view['_source']['sourceList'] return [] - + def subFilter(self, subfilter, view = []): if len(subfilter) == 0: return view @@ -56,7 +56,7 @@ def subFilter(self, subfilter, view = []): } }] } - + } } ) @@ -69,7 +69,7 @@ def subFilter(self, subfilter, view = []): if not sources: sources = ['x'] # blank return to not show eeeeverything return sources - + def subType(self, stype, view = []): if len(stype) == 0: return view @@ -95,7 +95,7 @@ def subType(self, stype, view = []): } ] } - + } } ) @@ -108,7 +108,7 @@ def subType(self, stype, view = []): if not sources: sources = ['x'] # blank return to not show eeeeverything return sources - + def logout(self): """Log out user and wipe cookie""" if self.user and self.cookie: @@ -128,12 +128,12 @@ def newCookie(self): cookies['kibble_session'] = cookie cookies['kibble_session']['expires'] = 86400 * 365 # Expire one year from now cookies['kibble_session']['HttpOnly'] = True; # no js write exposure - # cookies['kibble_session']['secure'] = True; # more secure + # cookies['kibble_session']['secure'] = True; # more secure self.headers.append(('Set-Cookie', cookies['kibble_session'].OutputString())) if __debug__: print("headers ", ( self.headers) ) return cookie - + def __init__(self, DB, environ, config): """ Loads the current user session or initiates a new session if @@ -144,11 +144,11 @@ def __init__(self, DB, environ, config): self.DB = DB self.headers = [('Content-Type', 'application/json; charset=utf-8')] self.cookie = None - + # Construct the URL we're visiting self.url = "%s://%s" % (environ['wsgi.url_scheme'], environ.get('HTTP_HOST', environ.get('SERVER_NAME'))) self.url += environ.get('SCRIPT_NAME', '/') - + # Get Kibble cookie cookie = None cookies = None @@ -190,6 +190,5 @@ def __init__(self, DB, environ, config): if not cookie: cookie = self.newCookie() self.cookie = cookie - if __debug__: - print("cookie found/set ", (cookie) ) - \ No newline at end of file + #if __debug__: + # print("cookie found/set ", (cookie) ) diff --git a/docs/source/setup.rst b/docs/source/setup.rst index c403599c..652717f6 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -113,7 +113,7 @@ following components installed and set up: - - certifi - - pyyaml - - bcrypt -- Gunicorn for Python 3.x (often called gunicorn3) or mod_wsgi +- Gunicorn for Python 3.x (often called gunicorn3), Waitress or mod_wsgi ########################################### Configuring and Priming the Kibble Instance @@ -167,12 +167,12 @@ be using the Apache HTTP Server and proxy to Gunicorn: gunicorn -w 10 -b 127.0.0.1:8000 handler:application -t 120 -D Alternatively use waitress, e.g. in development like this: - + :: cd /var/www/kibble/api/ waitress-serve --listen=*:8000 handler:application - + Once httpd is (re)started, you should be able to browse to your new Kibble instance. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..6c541e45 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,60 @@ +############################## +# Python packaging settings: # + +[build-system] +requires = ["setuptools", "setuptools-scm"] +build-backend = "setuptools.build_meta" + +[project] +name = "kibble-1" +description = "Apache Kibble is a tool to collect, aggregate and visualize data about any software project that uses commonly known tools." +# version is dynamic + +dependencies = [ + "python-dateutil", + "certifi", + "elasticsearch", + "PyYAML>=5.2", +] + +requires-python = ">=3.9, <4.0" + +authors = [ + { name = "Apache Software Foundation", email = "dev@kibble.apache.org" }, +] +maintainers = [ + { name = "Apache Software Foundation", email="dev@kibble.apache.org" }, +] +keywords = [ + "kibble", "data" ] + +license = { text = "Apache License, Version 2.0" } + +classifiers = [ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Environment :: Console", + "Environment :: Web Environment", + "Framework :: Apache Kibble-1", + "License :: OSI Approved :: Apache Software License", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Topic :: System :: Monitoring" +] + +dynamic = [ + "readme", + "version", +] + +[project.urls] +repository = "https://github.com/apache/kibble-1.git" +"Bug Tracker" = "https://github.com/apache/kibble-1/issues" + +[tool.setuptools] +dynamic = { readme = { file = ["README.md"] }, version = { attr = "handler.__version__" } } +packages.find = { where = ["api"] } diff --git a/setup/requirements.txt b/setup/requirements.txt index 4cb13e07..c1cfe7cc 100644 --- a/setup/requirements.txt +++ b/setup/requirements.txt @@ -2,6 +2,6 @@ certifi pyyaml bcrypt elasticsearch -pre-commit +pre_commit python-dateutil ndicts