diff --git a/.asf.yaml b/.asf.yaml index 98f624ad..0e43aed5 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. -# https://cwiki.apache.org/confluence/display/INFRA/.asf.yaml+features+for+git+repositories +# https://cwiki.apache.org/confluence/display/INFRA/git+-+.asf.yaml+features --- github: description: "Apache Kibble - a tool to collect, aggregate and visualize data about any software project" diff --git a/.github/ISSUE_TEMPLATE/config.ymal b/.github/ISSUE_TEMPLATE/config.yml similarity index 100% rename from .github/ISSUE_TEMPLATE/config.ymal rename to .github/ISSUE_TEMPLATE/config.yml diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 753f97da..bf1a4b79 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,6 +1,6 @@ --- name: Feature request -about: Idea or feature request +about: Idea or feature request title: '' labels: 'kind:feature' assignees: '' diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 00000000..299dda74 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,23 @@ +area:api: + - 'kibble/api/*' + +area:cli: + - 'kibble/cli/*' + +area:scanners: + - 'kibble/scanners/*' + +area:ui: + - 'ui/*' + +area:docs: + - 'docs/*' + - '*.md' + +area:dev: + - '.github/*' + - '.pre-commit.config.yaml' + - 'asf.yaml' + - 'Dockerfile*' + - 'docker*' + - 'setup.*' diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 00000000..7880c5e8 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +name: CI +on: + - push + - pull_request + +jobs: + statics: + name: Static checks + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.8' + - run: pip install '.[devel]' + - uses: pre-commit/action@v1.0.1 + with: + extra_args: --show-diff-on-failure + build-docker: + name: Build kibble dev image + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Build docker image + run: docker-compose -f docker-compose-dev.yaml build setup + - name: Run kibble command + run: docker run apache/kibble kibble --help + - name: Check dependencies + run: docker run apache/kibble pip check + run-tests: + name: Run Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.8' + - run: pip install '.[devel]' + - run: pytest tests --color=yes diff --git a/.github/workflows/labeler.yaml b/.github/workflows/labeler.yaml new file mode 100644 index 00000000..103e3e44 --- /dev/null +++ b/.github/workflows/labeler.yaml @@ -0,0 +1,12 @@ +name: "PR labeler" +on: + - pull_request_target + +jobs: + triage: + name: Label + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@main + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.gitignore b/.gitignore index d562a27b..b12fa18f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Apache Kibble files -api/yaml/kibble.yaml +api/yaml/kibble.yaml* +kibble/api/yaml/kibble.yaml* # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 07fae567..30f7f15a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,8 +24,67 @@ minimum_pre_commit_version: "1.20.0" repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + rev: v3.3.0 hooks: - id: check-yaml - id: end-of-file-fixer + exclude: ^ui/vendors/.*$ - id: trailing-whitespace + exclude: ^ui/vendors/.*$ + - id: fix-encoding-pragma + args: + - --remove + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.1.9 + hooks: + - id: insert-license + name: Add license for all other files + exclude: ^\.github/.*$ + args: + - --comment-style + - "|#|" + - --license-filepath + - license-templates/LICENSE.txt + - --fuzzy-match-generates-todo + files: > + \.cfg$|^Dockerfile.*$|\.sh$|\.bash$|\.py$|\.yml$|\.yaml$ + - id: insert-license + name: Add license for all rst files + exclude: ^\.github/.*$ + args: + - --comment-style + - "||" + - --license-filepath + - license-templates/LICENSE.rst + - --fuzzy-match-generates-todo + files: \.rst$ + - id: insert-license + name: Add license for all md and html files + files: \.md$|\.html$ + exclude: ^\.github/.*$| + args: + - --comment-style + - "" + - --license-filepath + - license-templates/LICENSE.txt + - --fuzzy-match-generates-todo + - repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + name: Black + types: [python] + - repo: https://github.com/timothycrosley/isort + rev: 5.6.4 + hooks: + - id: isort + name: Run isort to sort imports + args: ['--multi-line', '3', '--trailing-comma'] + files: \.py$ + - repo: local + hooks: + - id: pylint + name: Pylint on all sources + entry: pylint + language: system + types: [ python ] diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 5fd7277c..d5c66bb2 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,7 +1,7 @@ # Code of Conduct -The Apache Kibble project follows the +The Apache Kibble project follows the [Apache Software Foundation code of conduct](https://www.apache.org/foundation/policies/conduct.html). -If you observe behavior that violates those rules please follow the +If you observe behavior that violates those rules please follow the [ASF reporting guidelines](https://www.apache.org/foundation/policies/conduct#reporting-guidelines). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 86ec40be..d420751e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ # Contributing to Kibble # -## Community +## Community The main development and design discussion happens on our mailing lists. We have a list specifically for development, and one for future user questions and feedback. @@ -15,5 +15,57 @@ We also have: ## Development installation -This project requires Python in higher version than 3.3. -More information will come soon! +You should be able to install Apache Kibble by simply doing: +``` +pip install -e ."[devel]" +``` + +The easiest option to spin up a development environment is to use our development docker-compose. +The development image has mounted all Kibble sources so all your local code changes will be automatically +reflected in the running app. + +First you need to configure the Elasticsearch node: +``` +docker-compose -f docker-compose-dev.yaml up setup +``` +Once you see the +``` +setup_1 | All done, Kibble should...work now :) +``` +Now you can can launch Apache Kibble ui: +``` +docker-compose -f docker-compose-dev.yaml up ui +``` +The ui should be available under `http://0.0.0.0:8000` or `http://localhost:8000`. To log in you can use +the dummy admin account `admin@kibble` and password `kibbleAdmin`. + +You can also start only the API server: +``` +docker-compose -f docker-compose-dev.yaml up kibble +``` + +## Code Quality + +Apache Kibble project is using [pre-commits](https://pre-commit.com) to ensure the quality of the code. +We encourage you to use pre-commits, but it's not required in order to contribute. Every change is checked +on CI and if it does not pass the tests it cannot be accepted. If you want to check locally then +you should install Python3.6 or newer together and run: +```bash +pip install pre-commit +# or +brew install pre-commit +``` +For more installation options visit the [pre-commits](https://pre-commit.com). + +To turn on pre-commit checks for commit operations in git, run: +```bash +pre-commit install +``` +To run all checks on your staged files, run: +```bash +pre-commit run +``` +To run all checks on all files, run: +```bash +pre-commit run --all-files +``` diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 00000000..78500b60 --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,29 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +FROM python:3.8-slim + +USER root +RUN apt-get update +RUN apt-get install -y gcc git unzip cloc + +COPY . /kibble/ + +WORKDIR /kibble + +RUN pip install --upgrade pip +RUN pip install -e . diff --git a/NOTICE b/NOTICE index 790683a8..66e7dc12 100644 --- a/NOTICE +++ b/NOTICE @@ -119,7 +119,7 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Metis Dashboard (MIT License) ------------------------------------------------------------------------ -Copyright (c) 2015 onokumus +Copyright (c) 2015 onokumus Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to @@ -186,4 +186,3 @@ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index 7efe0455..3d9f3728 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@

# Apache Kibble +![CI](https://github.com/apache/kibble/workflows/CI/badge.svg) +[![License](http://img.shields.io/:license-Apache%202-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) Apache Kibble is a tool to collect, aggregate and visualize data about any software project that uses commonly known tools. It consists of two components: @@ -8,7 +10,7 @@ Apache Kibble is a tool to collect, aggregate and visualize data about any softw for the scanners to connect to, and provides the overall management of sources as well as the visualizations and API end points. - **Kibble scanners** ([kibble-scanners](https://github.com/apache/kibble-scanners)) - a collection of - scanning applications each designed to work with a specific type of resource (git repo, mailing list, + scanning applications each designed to work with a specific type of resource (git repo, mailing list, JIRA, etc) and push compiled data objects to the Kibble Server. ## Documentation diff --git a/api/handler.py b/api/handler.py deleted file mode 100644 index d767e692..00000000 --- a/api/handler.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -This is the main WSGI handler file for Apache Kibble. -It compiles a list of valid URLs from the 'pages' library folder, -and if a URL matches it runs the specific submodule's run() function. It -also handles CGI parsing and exceptions in the applications. -""" - - -# Main imports -import cgi -import re -import sys -import traceback -import yaml -import json -import plugins.session -import plugins.database -import plugins.openapi - -# Compile valid API URLs from the pages library -# Allow backwards compatibility by also accepting .lua URLs -urls = [] -if __name__ != '__main__': - import pages - for page in pages.handlers: - urls.append((r"^(/api/%s)(/.+)?$" % page, pages.handlers[page].run)) - - -# Load Kibble master configuration -config = yaml.load(open("yaml/kibble.yaml")) - -# Instantiate database connections -DB = None - -# Load Open API specifications -KibbleOpenAPI = plugins.openapi.OpenAPI("yaml/openapi.yaml") - -class KibbleHTTPError(Exception): - def __init__(self, code, message): - self.code = code - self.message = message - - -class KibbleAPIWrapper: - """ - Middleware wrapper for exceptions in the application - """ - def __init__(self, path, func): - self.func = func - self.API = KibbleOpenAPI - self.path = path - self.exception = KibbleHTTPError - - def __call__(self, environ, start_response, session): - """Run the function, return response OR return stacktrace""" - response = None - try: - # Read JSON client data if any - try: - request_size = int(environ.get('CONTENT_LENGTH', 0)) - except (ValueError): - request_size = 0 - requestBody = environ['wsgi.input'].read(request_size) - formdata = {} - if requestBody and len(requestBody) > 0: - try: - formdata = json.loads(requestBody.decode('utf-8')) - except json.JSONDecodeError as err: - start_response('400 Invalid request', [ - ('Content-Type', 'application/json')]) - yield json.dumps({ - "code": 400, - "reason": "Invalid JSON: %s" % err - }) - return - - # Validate URL against OpenAPI specs - try: - self.API.validate(environ['REQUEST_METHOD'], self.path, formdata) - except plugins.openapi.OpenAPIException as err: - start_response('400 Invalid request', [ - ('Content-Type', 'application/json')]) - yield json.dumps({ - "code": 400, - "reason": err.message - }) - return - - # Call page with env, SR and form data - try: - response = self.func(self, environ, formdata, session) - if response: - for bucket in response: - yield bucket - except KibbleHTTPError as err: - errHeaders = { - 403: '403 Authentication failed', - 404: '404 Resource not found', - 500: '500 Internal Server Error', - 501: '501 Gateway error' - } - errHeader = errHeaders[err.code] if err.code in errHeaders else "400 Bad request" - start_response(errHeader, [ - ('Content-Type', 'application/json')]) - yield json.dumps({ - "code": err.code, - "reason": err.message - }, indent = 4) + "\n" - return - - except: - err_type, err_value, tb = sys.exc_info() - traceback_output = ['API traceback:'] - traceback_output += traceback.format_tb(tb) - traceback_output.append('%s: %s' % (err_type.__name__, err_value)) - # We don't know if response has been given yet, try giving one, fail gracefully. - try: - start_response('500 Internal Server Error', [ - ('Content-Type', 'application/json')]) - except: - pass - yield json.dumps({ - "code": "500", - "reason": '\n'.join(traceback_output) - }) - - -def fourohfour(environ, start_response): - """A very simple 404 handler""" - start_response("404 Not Found", [ - ('Content-Type', 'application/json')]) - yield json.dumps({ - "code": 404, - "reason": "API endpoint not found" - }, indent = 4) + "\n" - return - - -def application(environ, start_response): - """ - This is the main handler. Every API call goes through here. - Checks against the pages library, and if submod found, runs - it and returns the output. - """ - DB = plugins.database.KibbleDatabase(config) - path = environ.get('PATH_INFO', '') - for regex, function in urls: - m = re.match(regex, path) - if m: - callback = KibbleAPIWrapper(path, function) - session = plugins.session.KibbleSession(DB, environ, config) - a = 0 - for bucket in callback(environ, start_response, session): - if a == 0: - session.headers.append(bucket) - try: - start_response("200 Okay", session.headers) - except: - pass - a += 1 - # WSGI prefers byte strings, so convert if regular py3 string - if isinstance(bucket, str): - yield bytes(bucket, encoding = 'utf-8') - elif isinstance(bucket, bytes): - yield bucket - return - - for bucket in fourohfour(environ, start_response): - yield bytes(bucket, encoding = 'utf-8') - - - -if __name__ == '__main__': - KibbleOpenAPI.toHTML() diff --git a/api/pages/__init__.py b/api/pages/__init__.py deleted file mode 100644 index 67a30574..00000000 --- a/api/pages/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -""" -Kibble API scripts library: - - oauth: oauth manager - -""" - -import importlib -import os -# Define all the submodules we have - -rootpath = os.path.dirname(__file__) -print("Reading pages from %s" % rootpath) - -# Import each submodule into a hash called 'handlers' -handlers = {} - -def loadPage(path): - for el in os.listdir(path): - filepath = os.path.join(path, el) - if el.find("__") == -1: - if os.path.isdir(filepath): - loadPage(filepath) - else: - p = filepath.replace(rootpath, "")[1:].replace('/', '.')[:-3] - xp = p.replace('.', '/') - print("Loading endpoint pages.%s as %s" % (p, xp)) - handlers[xp] = importlib.import_module("pages.%s" % p) - -loadPage(rootpath) \ No newline at end of file diff --git a/api/pages/bio/bio.py b/api/pages/bio/bio.py deleted file mode 100644 index 0f43f9a3..00000000 --- a/api/pages/bio/bio.py +++ /dev/null @@ -1,196 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/bio/bio -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Biography' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows some facts about a contributor -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Biography' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows some facts about a contributor -# -######################################################################## - - - - - -""" -This is the contributor trends renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dOrg = session.user['defaultOrganisation'] or "apache" - - pid = hashlib.sha1( ("%s%s" % (dOrg, indata.get('email', '???'))).encode('ascii', errors='replace')).hexdigest() - person = {} - if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id = pid): - person = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id = pid)['_source'] - else: - raise API.exception(404, "No such biography!") - - query = { - 'query': { - 'bool': { - 'must': [ - { - 'term': { - 'organisation': dOrg - } - } - ] - } - }, - 'size': 1, - 'sort': [{ 'ts': 'asc' }] - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - codeKey = 'committer_email' - query['query']['bool']['should'] = [ - {'term': {'issueCreator': indata.get('email')}}, - {'term': {'issueCloser': indata.get('email')}}, - {'term': {'sender': indata.get('email')}}, - {'term': {codeKey: indata.get('email')}}, - ] - query['query']['bool']['minimum_should_match'] = 1 - - - # FIRST EMAIL - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - body = query - ) - firstEmail = None - if res['hits']['hits']: - firstEmail = res['hits']['hits'][0]['_source']['ts'] - - # FIRST COMMIT - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - firstCommit = None - if res['hits']['hits']: - firstCommit = res['hits']['hits'][0]['_source']['ts'] - - # FIRST AUTHORSHIP - query['query']['bool']['should'][3] = {'term': {'author_email': indata.get('email')}} - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - firstAuthor = None - if res['hits']['hits']: - firstAuthor = res['hits']['hits'][0]['_source']['ts'] - - - # COUNT EMAIL, CODE, LINES CHANGED - del query['sort'] - del query['size'] - no_emails = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - )['count'] - - no_commits = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - )['count'] - - JSON_OUT = { - 'found': True, - 'bio': { - 'organisation': dOrg, - 'name': person['name'], - 'email': person['email'], - 'id': pid, - 'gravatar': hashlib.md5(person['email'].lower().encode('utf-8')).hexdigest(), - 'firstEmail': firstEmail, - 'firstCommit': firstCommit, - 'firstAuthor': firstAuthor, - 'tags': person.get('tags', []), - 'alts': person.get('alts', []), - 'emails': no_emails, - 'commits': no_commits - }, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/bio/newtimers.py b/api/pages/bio/newtimers.py deleted file mode 100644 index 12245f16..00000000 --- a/api/pages/bio/newtimers.py +++ /dev/null @@ -1,357 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/bio/newtimers -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Biography' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows some facts about a contributor -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Biography' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows some facts about a contributor -# -######################################################################## - - - - - -""" -This is the newtimers list renderer for Kibble -""" - -import json -import time -import hashlib - -def find_earlier(session, query, when, who, which, where, doctype, dOrg): - """Find earlier document pertaining to this user. return True if found""" - if 'aggs' in query: - del query['aggs'] - - rangeQuery = {'range': - { - which: { - 'from': 0, - 'to': time.time() - } - } - } - - query['query']['bool']['must'] = [ - rangeQuery, - { - 'term': { - 'organisation': dOrg - } - }, - { - 'term': { - where: who - } - - } - ] - query['size'] = 1 - query['sort'] = [{ which: 'asc' }] - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type=doctype, - body = query - ) - if res['hits']['hits']: - doc = res['hits']['hits'][0]['_source'] - if doc[which] >= when: - return [doc[which], doc] - else: - return [-1, None] - else: - return [-1, None] - - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dOrg = session.user['defaultOrganisation'] or "apache" - - - # Keep track of all contributors, and newcomers - contributors = [] - newcomers = {} - - #################################################################### - # Start by grabbing all contributors this period via terms agg # - #################################################################### - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - - - - ############################ - # CODE NEWTIMERS # - ############################ - rangeKey = 'ts' - rangeQuery = {'range': - { - rangeKey: { - 'from': dateFrom, - 'to': dateTo - } - } - } - - query = { - 'query': { - 'bool': { - 'must': [ - rangeQuery, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - - query['aggs'] = { - 'by_committer': { - 'terms': { - 'field': 'committer_email', - 'size': 500 - } - }, - 'by_author': { - 'terms': { - 'field': 'author_email', - 'size': 500 - } - } - } - - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - - code_contributors = [] - for bucket in res['aggregations']['by_committer']['buckets']: - email = bucket['key'] - if email not in code_contributors: - code_contributors.append(email) - - for bucket in res['aggregations']['by_author']['buckets']: - email = bucket['key'] - if email not in code_contributors: - code_contributors.append(email) - - # Now, for each contributor, find if they have done anything before - for email in code_contributors: - ea = find_earlier(session, query, dateFrom, email, 'ts', 'author_email', 'code_commit', dOrg) - ec = find_earlier(session, query, dateFrom, email, 'ts', 'committer_email', 'code_commit', dOrg) - if ea[0] != -1 and ec[0] != -1: - earliest = ea - if earliest[0] == -1 or (earliest[0] > ec[0] and ec[0] != -1): - earliest = ec - newcomers[email] = { - 'code': earliest - } - - - - ############################ - # ISSUE NEWTIMERS # - ############################ - rangeKey = 'created' - rangeQuery = {'range': - { - rangeKey: { - 'from': dateFrom, - 'to': dateTo - } - } - } - - query = { - 'query': { - 'bool': { - 'must': [ - rangeQuery, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - - query['aggs'] = { - 'by_creator': { - 'terms': { - 'field': 'issueCreator', - 'size': 500 - } - }, - 'by_closer': { - 'terms': { - 'field': 'issueCloser', - 'size': 500 - } - } - } - - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - - issue_contributors = [] - for bucket in res['aggregations']['by_creator']['buckets']: - email = bucket['key'] - if email not in issue_contributors: - issue_contributors.append(email) - - for bucket in res['aggregations']['by_closer']['buckets']: - email = bucket['key'] - if email not in issue_contributors: - issue_contributors.append(email) - - # Now, for each contributor, find if they have done anything before - for email in issue_contributors: - ecr = find_earlier(session, query, dateFrom, email, 'created', 'issueCreator', 'issue', dOrg) - ecl = find_earlier(session, query, dateFrom, email, 'closed', 'issueCloser', 'issue', dOrg) - if ecr[0] != -1 and ecl[0] != -1: - earliest = ecr - if earliest[0] == -1 or (earliest[0] > ecl[0] and ecl[0] != -1): - earliest = ecl - newcomers[email] = newcomers.get(email, {}) - newcomers[email]['issue'] = earliest - - email_contributors = [] - - ################################ - # For each newtimer, get a bio # - ################################ - - for email in newcomers: - pid = hashlib.sha1( ("%s%s" % (dOrg, email)).encode('ascii', errors='replace')).hexdigest() - person = {} - if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id = pid): - person = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id = pid)['_source'] - person['md5'] = hashlib.md5(person['email'].encode('utf-8')).hexdigest() # gravatar needed for UI! - newcomers[email]['bio'] = person - - newcomers_code = [] - newcomers_issues = [] - newcomers_email = [] - - # Count newcomers in each category (TODO: put this elsewhere earlier) - for email, entry in newcomers.items(): - if 'code' in entry: - newcomers_code.append(email) - if 'issue' in entry: - newcomers_issues.append(email) - if 'email' in entry: - newcomers_email.append(email) - - JSON_OUT = { - 'okay': True, - 'stats': { - 'code': { - 'newcomers': newcomers_code, - 'seen': len(code_contributors), - }, - 'issues': { - 'newcomers': newcomers_issues, - 'seen': len(issue_contributors), - }, - 'email': { - 'newcomers': newcomers_email, - 'seen': len(email_contributors), - } - }, - 'bios': newcomers, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT, indent = 2) diff --git a/api/pages/bio/trends.py b/api/pages/bio/trends.py deleted file mode 100644 index 776779e9..00000000 --- a/api/pages/bio/trends.py +++ /dev/null @@ -1,320 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/bio/trends -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a quick trend summary of the past 6 months for a contributor -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a quick trend summary of the past 6 months for a contributor -# -######################################################################## - - - - - -""" -This is the contributor trends renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - dOrg = session.user['defaultOrganisation'] or "apache" - - #################################################################### - # We start by doing all the queries for THIS period. # - # Then we reset the query, and change date to yonder-->from # - # and rerun the same queries. # - #################################################################### - - rangeKey = 'created' - rangeQuery = {'range': - { - rangeKey: { - 'from': dateFrom, - 'to': dateTo - } - } - } - # ISSUES OPENED - query = { - 'query': { - 'bool': { - 'must': [ - rangeQuery, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - codeKey = 'committer_email' if not indata.get('author') else 'author_email' - query['query']['bool']['should'] = [ - {'term': {'issueCreator': indata.get('email')}}, - {'term': {'issueCloser': indata.get('email')}}, - {'term': {'sender': indata.get('email')}}, - {'term': {codeKey: indata.get('email')}}, - ] - query['query']['bool']['minimum_should_match'] = 1 - - - # ISSUES CREATED - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_created = res['count'] - - - # ISSUES CLOSED - rangeKey = "closed" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateFrom, - 'to': dateTo - } - } - } - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_closed = res['count'] - - - # EMAIL SENT - rangeKey = "ts" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateFrom, - 'to': dateTo - } - } - } - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - ) - no_email_sent = res['count'] - - # COMMITS MADE - rangeKey = "ts" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateFrom, - 'to': dateTo - } - } - } - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - no_commits = res['count'] - - - - #################################################################### - # Change to PRIOR SPAN # - #################################################################### - - # ISSUES OPENED - rangeKey = "created" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - } - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_created_before = res['count'] - - - - # ISSUES CLOSED - rangeKey = "closed" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - } - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_closed_before = res['count'] - - - # EMAIL SENT - rangeKey = "ts" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - } - - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - ) - no_email_sent_before = res['count'] - - # CODE COMMITS - rangeKey = "ts" - query['query']['bool']['must'][0] = {'range': - { - rangeKey: { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - } - - - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - no_commits_before = res['count'] - - - trends = { - "created": { - 'before': no_issues_created_before, - 'after': no_issues_created, - 'title': "Issues opened this period" - }, - "closed": { - 'before': no_issues_closed_before, - 'after': no_issues_closed, - 'title': "Issues closed this period" - }, - "email": { - 'before': no_email_sent_before, - 'after': no_email_sent, - 'title': "Emails sent this period" - }, - "code": { - 'before': no_commits_before, - 'after': no_commits, - 'title': "Commits this period" - } - } - - JSON_OUT = { - 'trends': trends, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/ci/queue.py b/api/pages/ci/queue.py deleted file mode 100644 index 2ef9f729..00000000 --- a/api/pages/ci/queue.py +++ /dev/null @@ -1,210 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/ci/queue -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows email sent over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows CI queue over time -# -######################################################################## - - - -""" -This is the CI queue timeseries renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - # We only want build sources, so we can sum up later. - viewList = session.subType(['jenkins', 'travis', 'buildbot'], viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'time': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - viewList = [indata.get('source')] - - query['query']['bool']['must'].append({'term': {'sourceID': 'x'}}) - - timeseries = [] - for source in viewList: - query['query']['bool']['must'][2] = {'term': {'sourceID': source}} - - # Get queue stats - query['aggs'] = { - 'timeseries': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - }, - 'aggs': { - 'size': { - 'avg': { - 'field': 'size' - } - }, - 'blocked': { - 'avg': { - 'field': 'blocked' - } - }, - 'building': { - 'avg': { - 'field': 'building' - } - }, - 'stuck': { - 'avg': { - 'field': 'stuck' - } - }, - 'wait': { - 'avg': { - 'field': 'avgwait' - } - } - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="ci_queue", - size = 0, - body = query - ) - - for bucket in res['aggregations']['timeseries']['buckets']: - ts = int(bucket['key'] / 1000) - bucket['wait']['value'] = bucket['wait'].get('value', 0) or 0 - if bucket['doc_count'] == 0: - continue - - found = False - for t in timeseries: - if t['date'] == ts: - found = True - t['queue size'] += bucket['size']['value'] - t['builds running'] += bucket['building']['value'] - t['average wait (hours)'] += bucket['wait']['value'] - t['builders'] += 1 - if not found: - timeseries.append({ - 'date': ts, - 'queue size': bucket['size']['value'], - 'builds running': bucket['building']['value'], - 'average wait (hours)': bucket['wait']['value'], - 'builders': 1, - }) - - for t in timeseries: - t['average wait (hours)'] = int(t['average wait (hours)']/360)/10.0 - del t['builders'] - - JSON_OUT = { - 'widgetType': { - 'chartType': 'line', # Recommendation for the UI - 'nofill': True - }, - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/ci/status.py b/api/pages/ci/status.py deleted file mode 100644 index a1a8aac8..00000000 --- a/api/pages/ci/status.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/ci/status -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows email sent over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows CI queue over time -# -######################################################################## - - - -""" -This is the CI queue status (blocked/stuck) timeseries renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'time': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get queue stats - query['aggs'] = { - 'timeseries': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - }, - 'aggs': { - 'size': { - 'avg': { - 'field': 'size' - } - }, - 'blocked': { - 'avg': { - 'field': 'blocked' - } - }, - 'stuck': { - 'avg': { - 'field': 'stuck' - } - }, - 'wait': { - 'avg': { - 'field': 'avgwait' - } - } - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="ci_queue", - size = 0, - body = query - ) - - timeseries = [] - for bucket in res['aggregations']['timeseries']['buckets']: - if bucket['doc_count'] == 0: - continue - ts = int(bucket['key'] / 1000) - timeseries.append({ - 'date': ts, - 'builds blocked': bucket['blocked']['value'], - 'builds stuck': bucket['stuck']['value'] - }) - - JSON_OUT = { - 'widgetType': { - 'chartType': 'bar' # Recommendation for the UI - }, - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/ci/top-buildcount.py b/api/pages/ci/top-buildcount.py deleted file mode 100644 index aa704058..00000000 --- a/api/pages/ci/top-buildcount.py +++ /dev/null @@ -1,176 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/ci/top-buildcount -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 repos by lines of code -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 jobs by total builds done. Essentially buildtime, tweaked -# -######################################################################## - - - - - -""" -This is the TopN CI jobs by total build time renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(dateFrom)), - 'to': time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(dateTo)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - query['aggs'] = { - 'by_job': { - 'terms': { - 'field': 'jobURL.keyword', - 'size': 5000, - }, - 'aggs': { - 'duration': { - 'sum': { - 'field': 'duration' - } - }, - 'ci': { - 'terms': { - 'field': 'ci.keyword', - 'size': 1 - } - }, - 'name': { - 'terms': { - 'field': 'job.keyword', - 'size': 1 - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="ci_build", - size = 0, - body = query - ) - - jobs = [] - for doc in res['aggregations']['by_job']['buckets']: - job = doc['key'] - builds = doc['doc_count'] - duration = doc['duration']['value'] - ci = doc['ci']['buckets'][0]['key'] - jobname = doc['name']['buckets'][0]['key'] - jobs.append([builds, duration, jobname, ci]) - - topjobs = sorted(jobs, key = lambda x: int(x[0]), reverse = True) - tophash = {} - for v in topjobs: - tophash["%s (%s)" % (v[2], v[3])] = v[0] - - JSON_OUT = { - 'counts': tophash, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/ci/top-buildtime.py b/api/pages/ci/top-buildtime.py deleted file mode 100644 index 6aded754..00000000 --- a/api/pages/ci/top-buildtime.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/ci/top-buildtime -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 repos by lines of code -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 jobs by total build time spent -# -######################################################################## - - - - - -""" -This is the TopN CI jobs by total build time renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(dateFrom)), - 'to': time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(dateTo)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - query['aggs'] = { - 'by_job': { - 'terms': { - 'field': 'jobURL.keyword', - 'size': 5000, - }, - 'aggs': { - 'duration': { - 'sum': { - 'field': 'duration' - } - }, - 'ci': { - 'terms': { - 'field': 'ci.keyword', - 'size': 1 - } - }, - 'name': { - 'terms': { - 'field': 'job.keyword', - 'size': 1 - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="ci_build", - size = 0, - body = query - ) - - jobs = [] - for doc in res['aggregations']['by_job']['buckets']: - job = doc['key'] - builds = doc['doc_count'] - duration = doc['duration']['value'] - ci = doc['ci']['buckets'][0]['key'] - jobname = doc['name']['buckets'][0]['key'] - jobs.append([builds, duration, jobname, ci]) - - topjobs = sorted(jobs, key = lambda x: int(x[1]), reverse = True) - top = topjobs[0:24] - if len(topjobs) > 25: - count = 0 - for repo in topjobs[24:]: - count += repo[1] - top.append([1, count, "Other jobs", '??']) - - tophash = {} - for v in top: - tophash["%s (%s)" % (v[2], v[3])] = int((v[1]/360000))/10 - - JSON_OUT = { - 'counts': tophash, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/changes.py b/api/pages/code/changes.py deleted file mode 100644 index c6233d4f..00000000 --- a/api/pages/code/changes.py +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/changes -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show insertions/deletions as a timeseries -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show insertions/deletions as a timeseries -# -######################################################################## - - - - - -""" -This is the code changes timeseries renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'day') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'committer_email': indata.get('email')}}, {'term': {'author_email': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - }, - 'aggs': { - 'insertions': { - 'sum': { - 'field': 'insertions' - } - }, - 'deletions': { - 'sum': { - 'field': 'deletions' - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - timeseries = [] - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - icount = bucket['insertions']['value'] - dcount = bucket['deletions']['value'] - timeseries.append({ - 'date': ts, - 'insertions': icount, - 'deletions': dcount - }) - - JSON_OUT = { - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'area' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/commits.py b/api/pages/code/commits.py deleted file mode 100644 index 2899f756..00000000 --- a/api/pages/code/commits.py +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/commits -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show commits as a timeseries -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show commits as a timeseries -# -######################################################################## - - - - - -""" -This is the TopN committers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'day') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'committer_email': indata.get('email')}}, {'term': {'author_email': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - # Get number of committers, this period - query['aggs'] = { - 'commits': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - timeseries = [] - for bucket in res['aggregations']['commits']['buckets']: - ts = int(bucket['key'] / 1000) - count = bucket['doc_count'] - timeseries.append({ - 'date': ts, - 'commits': count - }) - - JSON_OUT = { - 'widgetType': { - 'chartType': 'bar' # Recommendation for the UI - }, - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/committers.py b/api/pages/code/committers.py deleted file mode 100644 index 7b6d5183..00000000 --- a/api/pages/code/committers.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/committers -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of committers -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of repos over a given period of time -# -######################################################################## - - - - - -""" -This is the TopN committers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'committer_email': indata.get('email')}}, {'term': {'author_email': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - # Get top 25 committers this period - query['aggs'] = { - 'committers': { - 'terms': { - 'field': which, - 'size': 25 - }, - 'aggs': { - 'byinsertions': { - 'terms': { - 'field': which - }, - 'aggs': { - 'stats': { - 'sum': { - 'field': "insertions" - } - } - } - }, - 'bydeletions': { - 'terms': { - 'field': which - }, - 'aggs': { - 'stats': { - 'sum': { - 'field': "deletions" - } - } - } - }, - } - }, - - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - people = {} - for bucket in res['aggregations']['committers']['buckets']: - email = bucket['key'] - count = bucket['doc_count'] - sha = hashlib.sha1( ("%s%s" % (dOrg, email)).encode('utf-8') ).hexdigest() - if session.DB.ES.exists(index=session.DB.dbname,doc_type="person",id = sha): - pres = session.DB.ES.get( - index=session.DB.dbname, - doc_type="person", - id = sha - ) - person = pres['_source'] - person['name'] = person.get('name', 'unknown') - people[email] = person - people[email]['gravatar'] = hashlib.md5(person.get('email', 'unknown').encode('utf-8')).hexdigest() - people[email]['count'] = count - people[email]['subcount'] = { - 'insertions': int(bucket['byinsertions']['buckets'][0]['stats']['value']), - 'deletions': int(bucket['bydeletions']['buckets'][0]['stats']['value']) - } - - topN = [] - for email, person in people.items(): - topN.append(person) - topN = sorted(topN, key = lambda x: x['count'], reverse = True) - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - }, - 'aggs': { - 'by_committer': { - 'cardinality': { - 'field': 'committer_email' - } - }, - 'by_author': { - 'cardinality': { - 'field': 'author_email' - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - timeseries = [] - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - ccount = bucket['by_committer']['value'] - acount = bucket['by_author']['value'] - timeseries.append({ - 'date': ts, - 'committers': ccount, - 'authors': acount - }) - - JSON_OUT = { - 'topN': { - 'denoter': 'commits', - 'items': topN - }, - 'timeseries': timeseries, - 'sorted': people, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/evolution.py b/api/pages/code/evolution.py deleted file mode 100644 index 593bd47c..00000000 --- a/api/pages/code/evolution.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/evolution -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show code evolution as a timeseries -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show code evolution as a timeseries -# -######################################################################## - - - - - -""" -This is the TopN committers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - breakdown = False - onlycode = False - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'time': { - 'from': 0, - 'to': int(time.time()) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # We need scrolling here! - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="evolution", - scroll = '2m', - size = 5000, - body = query - ) - sid = res['_scroll_id'] - scroll_size = res['hits']['total'] - if type(scroll_size) is dict: - scroll_size = scroll_size['value'] # ES >= 7.x - - timeseries = [] - tstmp = {} - - while (scroll_size > 0): - for doc in res['hits']['hits']: - updates = doc['_source'] - ts = updates['time'] #round(updates['time']/86400) * 86400 - if updates['time'] % 86400 != 0: - continue - tstmp[ts] = tstmp.get(ts, {}) - item = tstmp[ts] - if breakdown: - pass - else: - item['code'] = item.get('code', 0) + (updates['loc'] or 0) - item['comments'] = item.get('comments', 0) + (updates['comments'] or 0) - item['blanks'] = item.get('blanks', 0) + (updates['blank'] or 0) - - res = session.DB.ES.scroll(scroll_id = sid, scroll = '1m') - sid = res['_scroll_id'] - scroll_size = len(res['hits']['hits']) - - for k, v in tstmp.items(): - v['date'] = k - timeseries.append(v) - - timeseries = sorted(timeseries, key = lambda x: x['date']) - JSON_OUT = { - 'widgetType': { - 'chartType': 'line', # Recommendation for the UI - 'stack': True - }, - 'timeseries': timeseries, - 'sortOrder': ['code', 'comments', 'blanks'], - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/pony-timeseries.py b/api/pages/code/pony-timeseries.py deleted file mode 100644 index a7c1c8ec..00000000 --- a/api/pages/code/pony-timeseries.py +++ /dev/null @@ -1,215 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/pony-timeseries -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of Pony Factor over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of Pony Factor over time -# -######################################################################## - - - - - -""" -This is the pony factor renderer for Kibble -""" - -import json -import time -import re -import datetime -import dateutil.relativedelta - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - hl = indata.get('span', 24) - tnow = datetime.date.today() - nm = tnow.month - (tnow.month % 3) - ny = tnow.year - ts = [] - - if nm < 1: - nm += 12 - ny = ny - 1 - - while ny > 1970: - d = datetime.date(ny, nm, 1) - t = time.mktime(d.timetuple()) - d = d - dateutil.relativedelta.relativedelta(months=hl) - tf = time.mktime(d.timetuple()) - nm -= 3 - if nm < 1: - nm += 12 - ny = ny - 1 - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': tf, - 'to': t - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - - globcount = res['count'] - if globcount == 0: - break - - # Get top 25 committers this period - query['aggs'] = { - 'by_committer': { - 'terms': { - 'field': 'committer_email', - 'size': 1000 - } - }, - 'by_author': { - 'terms': { - 'field': 'author_email', - 'size': 1000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - - # PF for committers - pf_committer = 0 - pf_committer_count = 0 - for bucket in res['aggregations']['by_committer']['buckets']: - count = bucket['doc_count'] - pf_committer += 1 - pf_committer_count += count - if pf_committer_count > int(globcount/2): - break - - # PF for authors - pf_author = 0 - pf_author_count = 0 - cpf = {} - for bucket in res['aggregations']['by_author']['buckets']: - count = bucket['doc_count'] - pf_author += 1 - pf_author_count += count - if '@' in bucket['key']: - mldom = bucket['key'].lower().split('@')[-1] - cpf[mldom] = True - if pf_author_count > int(globcount/2): - break - ts.append({ - 'date': t, - 'Pony Factor (committership)': pf_committer, - 'Pony Factor (authorship)': pf_author, - 'Meta-Pony Factor': len(cpf) - }) - - ts = sorted(ts, key = lambda x: x['date']) - - JSON_OUT = { - 'text': "This shows Pony Factors as calculated over a %u month timespan. Authorship measures the people writing the bulk of the codebase, committership mesaures the people committing (merging) the code, and meta-pony is an estimation of how many organisations/companies are involved." % hl, - 'timeseries': ts, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/pony.py b/api/pages/code/pony.py deleted file mode 100644 index 3eb074fe..00000000 --- a/api/pages/code/pony.py +++ /dev/null @@ -1,289 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/pony -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Factor' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows pony factor data for a set of repos over a given period of time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Factor' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows pony factor data for a set of repos over a given period of time -# -######################################################################## - - - - - -""" -This is the pony factor renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*24)) # Default to a 24 month span - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - - globcount = res['count'] - - # Get top 25 committers this period - query['aggs'] = { - 'by_committer': { - 'terms': { - 'field': 'committer_email', - 'size': 5000 - } - }, - 'by_author': { - 'terms': { - 'field': 'author_email', - 'size': 5000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - - # PF for committers - pf_committer = 0 - pf_committer_count = 0 - for bucket in res['aggregations']['by_committer']['buckets']: - count = bucket['doc_count'] - pf_committer += 1 - pf_committer_count += count - if pf_committer_count > int(globcount/2): - break - - # PF for authors - pf_author = 0 - pf_author_count = 0 - cpf = {} - for bucket in res['aggregations']['by_author']['buckets']: - count = bucket['doc_count'] - pf_author += 1 - pf_author_count += count - mldom = bucket['key'].lower().split('@')[1] - cpf[mldom] = True - if pf_author_count > int(globcount/2): - break - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - - globcount = res['count'] - - # Get top 25 committers this period - query['aggs'] = { - 'by_committer': { - 'terms': { - 'field': 'committer_email', - 'size': 5000 - } - }, - 'by_author': { - 'terms': { - 'field': 'author_email', - 'size': 5000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - - # PF for committers - pf_committer_b = 0 - pf_committer_count = 0 - for bucket in res['aggregations']['by_committer']['buckets']: - count = bucket['doc_count'] - pf_committer_b += 1 - pf_committer_count += count - if pf_committer_count > int(globcount/2): - break - - # PF for authors - pf_author_b = 0 - pf_author_count = 0 - cpf_b = {} - for bucket in res['aggregations']['by_author']['buckets']: - count = bucket['doc_count'] - pf_author_b += 1 - pf_author_count += count - mldom = bucket['key'].lower().split('@')[1] - cpf_b[mldom] = True - if pf_author_count > int(globcount/2): - break - - JSON_OUT = { - 'factors': [ - { - 'title': "Pony Factor (by committership)", - 'count': pf_committer, - 'previous': pf_committer_b - }, - { - 'title': "Pony Factor (by authorship)", - 'count': pf_author, - 'previous': pf_author_b - }, - { - 'title': "Meta-Pony Factor (by authorship)", - 'count': len(cpf), - 'previous': len(cpf_b) - } - ], - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/punchcard.py b/api/pages/code/punchcard.py deleted file mode 100644 index ab0a52f8..00000000 --- a/api/pages/code/punchcard.py +++ /dev/null @@ -1,177 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/punchcard -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show commits as a timeseries -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Show commits as a timeseries -# -######################################################################## - - - - - -""" -This is the commit punch-card renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'day') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'committer_email': indata.get('email')}}, {'term': {'author_email': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - # Get number of committers, this period - query['aggs'] = { - 'commits': { - 'date_histogram': { - 'field': 'date', - 'interval': 'hour', - "format": "E - k" - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - timeseries = {} - for bucket in res['aggregations']['commits']['buckets']: - ts = bucket['key_as_string'] - count = bucket['doc_count'] - timeseries[ts] = timeseries.get(ts, 0) + count - - JSON_OUT = { - 'widgetType': { - 'chartType': 'punchcard' # Recommendation for the UI - }, - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/relationships.py b/api/pages/code/relationships.py deleted file mode 100644 index 843c1da6..00000000 --- a/api/pages/code/relationships.py +++ /dev/null @@ -1,302 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/relationships -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of contributor relationships between repositories -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of contributor relationships between repositories -# -######################################################################## - - - - - -""" -This is the committer relationship list renderer for Kibble -""" - -import json -import time -import hashlib -import copy -import re -import math - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'day') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'committer_email' if not indata.get('author') else 'author_email': indata.get('email')}}) - - # Get number of commits, this period, per repo - query['aggs'] = { - 'per_repo': { - 'terms': { - 'field': 'sourceID', - 'size': 10000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - repos = {} - repo_commits = {} - authorlinks = {} - max_commits = 0 - max_links = 0 - max_shared = 0 - max_authors = 0 - minLinks = indata.get('links', 1) - - # For each repo, count commits and gather data on authors - for doc in res['aggregations']['per_repo']['buckets']: - sourceID = doc['key'] - commits = doc['doc_count'] - - # Gather the unique authors/committers - query['aggs'] = { - 'per_contributor': { - 'terms': { - 'field': 'committer_email' if not indata.get('author') else 'author_email', - 'size': 10000 - } - } - } - xquery = copy.deepcopy(query) - xquery['query']['bool']['must'].append({'term': {'sourceID': sourceID}}) - xres = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = xquery - ) - authors = [] - for person in xres['aggregations']['per_contributor']['buckets']: - authors.append(person['key']) - if commits > max_commits: - max_commits = commits - repos[sourceID] = authors - repo_commits[sourceID] = commits - - # Now, figure out which repos share the same contributors - repo_links = {} - repo_notoriety = {} - repodatas = {} - repo_authors = {} - - # Grab data of all sources - for ID, repo in repos.items(): - mylinks = {} - if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = ID): - continue - repodatas[ID] = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id = ID) - - for ID, repo in repos.items(): - mylinks = {} - if not ID in repodatas: - continue - repodata = repodatas[ID] - oID = ID - if indata.get('collapse'): - m = re.search(indata.get('collapse'), repodata['_source']['sourceURL']) - if m: - ID = m.group(1) - else: - ID = re.sub(r"^.+/", "", repodata['_source']['sourceURL']) - for xID, xrepo in repos.items(): - if xID in repodatas: - xrepodata = repodatas[xID] - if indata.get('collapse'): - m = re.search(indata.get('collapse'), xrepodata['_source']['sourceURL']) - if m: - xID = m.group(1) - else: - xID = re.sub(r"^.+/", "", xrepodata['_source']['sourceURL']) - if xID != ID: - xlinks = [] - for author in xrepo: - if author in repo: - xlinks.append(author) - lname = "%s@%s" % (ID, xID) # Link name - rname = "%s@%s" % (xID, ID) # Reverse link name - if len(xlinks) >= minLinks and not rname in repo_links: - mylinks[xID] = len(xlinks) - repo_links[lname] = repo_links.get(lname, 0) + len(xlinks) # How many contributors in common between project A and B? - if repo_links[lname] > max_shared: - max_shared = repo_links[lname] - if ID not in repo_notoriety: - repo_notoriety[ID] = set() - repo_notoriety[ID].update(mylinks.keys()) # How many projects is this repo connected to? - - if ID not in repo_authors: - repo_authors[ID] = set() - repo_authors[ID].update(repo) # How many projects is this repo connected to? - - if ID != oID: - repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] - if repo_commits[ID] > max_commits: - max_commits = repo_commits[ID] # Used for calculating max link thickness - if len(repo_notoriety[ID]) > max_links: - max_links = len(repo_notoriety[ID]) - if len(repo_authors[ID]) > max_authors: - max_authors = len(repo_authors[ID]) # Used for calculating max sphere size in charts - - # Now, pull it all together! - nodes = [] - links = [] - existing_repos = [] - for sourceID in repo_notoriety.keys(): - lsize = 0 - for k in repo_links.keys(): - fr, to = k.split('@') - if fr == sourceID or to == sourceID: - lsize += 1 - asize = len(repo_authors[sourceID]) - doc = { - 'id': sourceID, - 'name': sourceID, - 'commits': repo_commits[sourceID], - 'authors': asize, - 'links': lsize, - 'size': max(5, (1 - abs(math.log10(asize / max_authors))) * 45), - 'tooltip': "%u connections, %u contributors, %u commits" % (lsize, asize, repo_commits[sourceID]) - } - nodes.append(doc) - existing_repos.append(sourceID) - - for k, s in repo_links.items(): - size = s - fr, to = k.split('@') - if fr in existing_repos and to in existing_repos: - doc = { - 'source': fr, - 'target': to, - 'value': max(1, (size/max_shared) * 8), - 'name': "%s ↔ %s" % (fr, to), - 'tooltip': "%u committers in common" % size - } - links.append(doc) - - JSON_OUT = { - 'maxLinks': max_links, - 'maxShared': max_shared, - 'widgetType': { - 'chartType': 'link' # Recommendation for the UI - }, - 'links': links, - 'nodes': nodes, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/retention.py b/api/pages/code/retention.py deleted file mode 100644 index 6e108441..00000000 --- a/api/pages/code/retention.py +++ /dev/null @@ -1,252 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/retention -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Factor' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows retention metrics for a set of repos over a given period of time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Factor' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows retention metrics for a set of repos over a given period of time -# -######################################################################## - - - - - -""" -This is the code contributor retention factor renderer for Kibble -""" - -import json -import time -import re -import datetime - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - hl = indata.get('span', 12) # By default, we define a contributor as active if having committer in the past year - tnow = datetime.date.today() - nm = tnow.month - (tnow.month % 3) - ny = tnow.year - cy = ny - ts = [] - - if nm < 1: - nm += 12 - ny = ny - 1 - - peopleSeen = {} - activePeople = {} - allPeople = {} - FoundSomething = False - - ny = 1970 - while ny < cy or (ny == cy and (nm+3) <= tnow.month): - d = datetime.date(ny, nm, 1) - t = time.mktime(d.timetuple()) - nm += 3 - if nm > 12: - nm -= 12 - ny = ny + 1 - if ny == cy and nm > tnow.month: - break - d = datetime.date(ny, nm, 1) - tf = time.mktime(d.timetuple()) - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': t, - 'to': tf - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - - globcount = res['count'] - if globcount == 0 and not FoundSomething: - continue - FoundSomething = True - - # Get top 1000 committers this period - query['aggs'] = { - 'by_committer': { - 'terms': { - 'field': 'committer_email', - 'size': 25000 - } - }, - 'by_author': { - 'terms': { - 'field': 'author_email', - 'size': 25000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - - retained = 0 - added = 0 - lost = 0 - - thisPeriod = [] - for bucket in res['aggregations']['by_author']['buckets']: - who = bucket['key'] - thisPeriod.append(who) - if who not in peopleSeen: - peopleSeen[who] = tf - added += 1 - activePeople[who] = tf - if who not in allPeople: - allPeople[who] = tf - - prune = [] - for k, v in activePeople.items(): - if v < (t - (hl*30.45*86400)): - prune.append(k) - lost += 1 - - for who in prune: - del activePeople[who] - del peopleSeen[who] - retained = len(activePeople) - added - - ts.append({ - 'date': tf, - 'People who (re)joined': added, - 'People who quit': lost, - 'People retained': retained, - 'Active people': added + retained - }) - - groups = [ - ['More than 5 years', (5*365*86400)+1], - ['2 - 5 years', (2*365*86400)+1], - ['1 - 2 years', (365*86400)], - ['Less than a year', 1] - ] - - counts = {} - totExp = 0 - for person, age in activePeople.items(): - totExp += time.time() - allPeople[person] - for el in sorted(groups, key = lambda x: x[1], reverse = True): - if allPeople[person] <= time.time() - el[1]: - counts[el[0]] = counts.get(el[0], 0) + 1 - break - avgyr = (totExp / (86400*365)) / max(len(activePeople),1) - - ts = sorted(ts, key = lambda x: x['date']) - avgm = "" - yr = int(avgyr) - ym = round((avgyr-yr)*12) - if yr >= 1: - avgm += "%u year%s" % (yr, "s" if yr != 1 else "") - if ym > 0: - avgm += "%s%u month%s" % (", " if yr > 0 else "", ym, "s" if ym != 1 else "") - JSON_OUT = { - 'text': "This shows Contributor retention as calculated over a %u month timespan. The average experience of currently active people is %s." % (hl, avgm), - 'timeseries': ts, - 'counts': counts, - 'averageYears': avgyr, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/sloc.py b/api/pages/code/sloc.py deleted file mode 100644 index 29b54c55..00000000 --- a/api/pages/code/sloc.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/sloc -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of lines of code for one or more sources -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of lines of code for one or more sources -# -######################################################################## - - - - - -""" -This is the SLoC renderer for Kibble -""" - -import json - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - # Fetch all sources for default org - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - { - 'terms': { - 'type': ['git', 'svn', 'github'] - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="source", - size = 5000, - body = query - ) - - languages = {} - years = 0 - for hit in res['hits']['hits']: - doc = hit['_source'] - if 'sloc' in doc: - sloc = doc['sloc'] - years += sloc['years'] - for k, v in sloc['languages'].items(): - languages[k] = languages.get(k, {'code': 0, 'comment': 0, 'blank': 0}) - languages[k]['code'] += v.get('code', 0) - languages[k]['comment'] += v.get('comment', 0) - languages[k]['blank'] += v.get('blank', 0) - - - JSON_OUT = { - 'languages': languages, - 'okay': True, - 'years': years - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/top-commits.py b/api/pages/code/top-commits.py deleted file mode 100644 index ce75268e..00000000 --- a/api/pages/code/top-commits.py +++ /dev/null @@ -1,177 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/top-commits -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 repos by commit volume -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 repos by commit volume -# -######################################################################## - - - - - -""" -This is the TopN repos by commits list renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'committer_email': indata.get('email')}}, {'term': {'author_email': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - - # Get top 25 committers this period - query['aggs'] = { - 'by_repo': { - 'terms': { - 'field': 'sourceURL', - 'size': 5000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - - toprepos = [] - for bucket in res['aggregations']['by_repo']['buckets']: - repo = re.sub(r".+/([^/]+?)(?:\.git)?$", r"\1", bucket['key']) - count = bucket['doc_count'] - - toprepos.append([repo, count]) - - toprepos = sorted(toprepos, key = lambda x: x[1], reverse = True) - top = toprepos[0:24] - if len(toprepos) > 25: - count = 0 - for repo in toprepos[25:]: - count += repo[1] - top.append(["Other repos", count]) - - tophash = {} - for v in top: - tophash[v[0]] = v[1] - - JSON_OUT = { - 'counts': tophash, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/top-sloc.py b/api/pages/code/top-sloc.py deleted file mode 100644 index db0c8592..00000000 --- a/api/pages/code/top-sloc.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/top-sloc -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 repos by lines of code -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 repos by lines of code -# -######################################################################## - - - - - -""" -This is the TopN repos by SLoC list renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'terms': - { - 'type': ['git', 'svn', 'github'] - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="source", - size = 5000, - body = query - ) - - toprepos = [] - for doc in res['hits']['hits']: - repo = doc['_source'] - url = re.sub(r".+/([^/]+?)(?:\.git)?$", r"\1", repo['sourceURL']) - if 'sloc' in repo: - count = repo['sloc'].get('loc', 0) - if not count: - count = 0 - toprepos.append([url, count]) - - toprepos = sorted(toprepos, key = lambda x: int(x[1]), reverse = True) - top = toprepos[0:24] - if len(toprepos) > 25: - count = 0 - for repo in toprepos[25:]: - count += repo[1] - top.append(["Other repos", count]) - - tophash = {} - for v in top: - tophash[v[0]] = v[1] - - JSON_OUT = { - 'counts': tophash, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/code/trends.py b/api/pages/code/trends.py deleted file mode 100644 index 69d9b130..00000000 --- a/api/pages/code/trends.py +++ /dev/null @@ -1,362 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/code/trends -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of repos over a given period of time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of repos over a given period of time -# -######################################################################## - - - - - -""" -This is the SLoC renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - - #################################################################### - # We start by doing all the queries for THIS period. # - # Then we reset the query, and change date to yonder-->from # - # and rerun the same queries. # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'committer_email': indata.get('email')}}, {'term': {'author_email': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - # Get number of commits, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - no_commits = res['count'] - - - # Get number of committers, this period - query['aggs'] = { - 'commits': { - 'cardinality': { - 'field': 'committer_email' - } - }, - 'authors': { - 'cardinality': { - 'field': 'author_email' - } - } - - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - no_committers = res['aggregations']['commits']['value'] - no_authors = res['aggregations']['authors']['value'] - - - # Get number of insertions, this period - query['aggs'] = { - 'changes': { - 'sum': { - 'field': 'insertions' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - insertions = res['aggregations']['changes']['value'] - - # Get number of deletions, this period - query['aggs'] = { - 'changes': { - 'sum': { - 'field': 'deletions' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - deletions = res['aggregations']['changes']['value'] - - - #################################################################### - # Change to PRIOR SPAN # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Path filter? - if indata.get('pathfilter'): - pf = indata.get('pathfilter') - if '!' in pf: - pf = pf.replace('!', '') - query['query']['bool']['must_not'] = query['query']['bool'].get('must_not', []) - query['query']['bool']['must_not'].append({'regexp': {'files_changed': pf}}) - else: - query['query']['bool']['must'].append({'regexp': {'files_changed': pf}}) - - - # Get number of commits, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - no_commits_before = res['count'] - - # Get number of committers, this period - query['aggs'] = { - 'commits': { - 'cardinality': { - 'field': 'committer_email' - } - }, - 'authors': { - 'cardinality': { - 'field': 'author_email' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - no_committers_before = res['aggregations']['commits']['value'] - no_authors_before = res['aggregations']['authors']['value'] - - # Get number of insertions, this period - query['aggs'] = { - 'changes': { - 'sum': { - 'field': 'insertions' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - insertions_before = res['aggregations']['changes']['value'] - - # Get number of deletions, this period - query['aggs'] = { - 'changes': { - 'sum': { - 'field': 'deletions' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - deletions_before = res['aggregations']['changes']['value'] - - - - trends = { - "committers": { - 'before': no_committers_before, - 'after': no_committers, - 'title': "Committers this period" - }, - "authors": { - 'before': no_authors_before, - 'after': no_authors, - 'title': "Authors this period" - }, - 'commits': { - 'before': no_commits_before, - 'after': no_commits, - 'title': "Commits this period" - }, - 'changes': { - 'before': insertions_before + deletions_before, - 'after': insertions + deletions, - 'title': "Lines changed this period" - } - } - - JSON_OUT = { - 'trends': trends, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) - -""" -commits = { - before = pcommits, - after = commits, - title = "Commits" - }, - [role.."s"] = { - before = pcommitters, - after = committers, - title = role:gsub("^(%S)", string.upper).."s", - }, - lines = { - before = pdeletions + pinsertions, - after = deletions + insertions, - title = "Lines changed" - } - """ - \ No newline at end of file diff --git a/api/pages/filters.py b/api/pages/filters.py deleted file mode 100644 index d2960517..00000000 --- a/api/pages/filters.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -This is the source list handler for Kibble -""" - -import json -import re -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - # Fetch all sources for default org - dOrg = session.user['defaultOrganisation'] or "apache" - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="view", - size = 5000, - body = { - 'query': { - 'term': { - 'owner': session.user['email'] - } - } - } - ) - - sources = [] - for hit in res['hits']['hits']: - doc = hit['_source'] - if indata.get('quick'): - xdoc = { - 'sourceID': doc['sourceID'], - 'type': doc['type'], - 'sourceURL': doc['sourceURL'] - } - sources.append(xdoc) - else: - sources.append(doc) - - JSON_OUT = { - 'views': sources, - 'okay': True, - 'organisation': dOrg - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/actors.py b/api/pages/forum/actors.py deleted file mode 100644 index 345f59ae..00000000 --- a/api/pages/forum/actors.py +++ /dev/null @@ -1,244 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/actors -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of no. of people opening/closing issues over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of no. of people opening topics or replying to them. -# -######################################################################## - - - - - -""" -This is the forum actors stats page for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}] - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - }, - 'aggs': { - 'by_user': { - 'cardinality': { - 'field': 'creator' - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_post", - size = 0, - body = query - ) - - timeseries = {} - - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - ccount = bucket['by_user']['value'] - timeseries[ts] = { - 'date': ts, - 'topic responders': ccount, - 'topic creators': 0 - } - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'creator': indata.get('email')}}] - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - }, - 'aggs': { - 'by_user': { - 'cardinality': { - 'field': 'creator' - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_topic", - size = 0, - body = query - ) - - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - ccount = bucket['by_user']['value'] - if ts in timeseries: - timeseries[ts]['topic creators'] = ccount - else: - timeseries[ts] = { - 'date': ts, - 'topic creators': 0, - 'topic responders': ccount - } - - ts = [] - for x, el in timeseries.items(): - ts.append(el) - - JSON_OUT = { - 'timeseries': ts, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/creators.py b/api/pages/forum/creators.py deleted file mode 100644 index dc6a6c6a..00000000 --- a/api/pages/forum/creators.py +++ /dev/null @@ -1,181 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/creators -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue openers -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of forum topic creators -# -######################################################################## - - - - - -""" -This is the TopN issue openers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - xtitle = None - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'creator': indata.get('email')}}) - xtitle = "People opening issues solved by %s" % indata.get('email') - - # Get top 25 committers this period - query['aggs'] = { - 'committers': { - 'terms': { - 'field': 'creator', - 'size': 25 - }, - 'aggs': { - - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_topic", - size = 0, - body = query - ) - - people = {} - for bucket in res['aggregations']['committers']['buckets']: - email = bucket['key'] - count = bucket['doc_count'] - sha = email - if session.DB.ES.exists(index=session.DB.dbname,doc_type="person",id = sha): - pres = session.DB.ES.get( - index=session.DB.dbname, - doc_type="person", - id = email - ) - person = pres['_source'] - person['name'] = person.get('name', 'unknown') - people[email] = person - people[email]['gravatar'] = hashlib.md5(person.get('email', 'unknown').encode('utf-8')).hexdigest() - people[email]['count'] = count - - topN = [] - for email, person in people.items(): - topN.append(person) - topN = sorted(topN, key = lambda x: x['count'], reverse = True) - JSON_OUT = { - 'topN': { - 'denoter': 'topics created', - 'items': topN, - }, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar', - 'title': xtitle - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/issues.py b/api/pages/forum/issues.py deleted file mode 100644 index a485bfac..00000000 --- a/api/pages/forum/issues.py +++ /dev/null @@ -1,258 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/issues -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of issues opened/closed over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of forum topics opened/responded-to over time -# -######################################################################## - - - - - -""" -This is the forum timeseries renderer for Kibble -""" - -import json -import time -import hashlib - -# This creates an empty timeseries object with -# all categories initialized as 0 opened, 0 closed. -def makeTS(dist): - ts = {} - for k in dist: - ts[k + ' topics'] = 0 - ts[k + ' replies'] = 0 - return ts - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - # By default, we lump generic forums and question/answer (like SO, askbot) together as one - distinct = { - 'forum': ['discourse', 'stackoverflow', 'askbot'] - } - - # If requested, we split them into two - if indata.get('distinguish', False): - distinct = { - 'forum': ['discourse'], - 'question bank': ['stackoverflow', 'askbot'] - } - - timeseries = {} - - # For each category and the issue types that go along with that, - # grab opened and closed over time. - for iType, iValues in distinct.items(): - #################################################################### - # ISSUES OPENED # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - }, - { - 'terms': { - 'type': iValues - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'creator': indata.get('email')}}) - - # Get number of opened ones, this period - query['aggs'] = { - 'commits': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_topic", - size = 0, - body = query - ) - - for bucket in res['aggregations']['commits']['buckets']: - ts = int(bucket['key'] / 1000) - count = bucket['doc_count'] - timeseries[ts] = timeseries.get(ts, makeTS(distinct)) - timeseries[ts][iType + ' topics'] = timeseries[ts].get(iType + ' topics', 0) + count - - - #################################################################### - # ISSUES CLOSED # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - }, - { - 'terms': { - 'type': iValues - } - } - ] - } - } - } - if viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'creator': indata.get('email')}}) - - # Get number of closed ones, this period - query['aggs'] = { - 'commits': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_post", - size = 0, - body = query - ) - - for bucket in res['aggregations']['commits']['buckets']: - ts = int(bucket['key'] / 1000) - count = bucket['doc_count'] - timeseries[ts] = timeseries.get(ts, makeTS(distinct)) - timeseries[ts][iType + ' replies'] = timeseries[ts].get(iType + ' replies', 0) + count - - ts = [] - for k, v in timeseries.items(): - v['date'] = k - ts.append(v) - - - JSON_OUT = { - 'widgetType': { - 'chartType': 'line', # Recommendation for the UI - 'nofill': True - }, - 'timeseries': ts, - 'interval': interval, - 'okay': True, - 'distinguishable': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/responders.py b/api/pages/forum/responders.py deleted file mode 100644 index 6c12ca2a..00000000 --- a/api/pages/forum/responders.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/responders -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue closers -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue closers -# -######################################################################## - - - - - -""" -This is the TopN forum posters list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - xtitle = None - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'creator': indata.get('email')}}) - xTitle = "People closing %s's issues" % indata.get('email') - - # Get top 25 committers this period - query['aggs'] = { - 'committers': { - 'terms': { - 'field': 'creator', - 'size': 25 - }, - 'aggs': { - - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_post", - size = 0, - body = query - ) - - people = {} - for bucket in res['aggregations']['committers']['buckets']: - email = bucket['key'] - count = bucket['doc_count'] - sha = email - if session.DB.ES.exists(index=session.DB.dbname,doc_type="person",id = sha): - pres = session.DB.ES.get( - index=session.DB.dbname, - doc_type="person", - id = email - ) - person = pres['_source'] - person['name'] = person.get('name', 'unknown') - people[email] = person - people[email]['gravatar'] = hashlib.md5(person.get('email', 'unknown').encode('utf-8')).hexdigest() - people[email]['count'] = count - - topN = [] - for email, person in people.items(): - topN.append(person) - topN = sorted(topN, key = lambda x: x['count'], reverse = True) - JSON_OUT = { - 'topN': { - 'denoter': 'replies posted', - 'items': topN, - }, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar', - 'title': xtitle - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/top-count.py b/api/pages/forum/top-count.py deleted file mode 100644 index 58d345c1..00000000 --- a/api/pages/forum/top-count.py +++ /dev/null @@ -1,170 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/top-count -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 issue trackers by issues -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 forums by interactions -# -######################################################################## - - - - - -""" -This is the TopN repos by commits list renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [ - {'term': {'creator': indata.get('email')}} - ] - - - # Get top 25 committers this period - query['aggs'] = { - 'by_repo': { - 'terms': { - 'field': 'sourceID', - 'size': 5000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_post", - size = 0, - body = query - ) - - toprepos = [] - for bucket in res['aggregations']['by_repo']['buckets']: - ID = bucket['key'] - if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = ID): - it = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id = ID)['_source'] - repo = re.sub(r".+/([^/]+)$", r"\1", it['sourceURL']) - count = bucket['doc_count'] - toprepos.append([repo, count]) - - toprepos = sorted(toprepos, key = lambda x: x[1], reverse = True) - top = toprepos[0:24] - if len(toprepos) > 25: - count = 0 - for repo in toprepos[25:]: - count += repo[1] - top.append(["Other forums", count]) - - tophash = {} - for v in top: - tophash[v[0]] = v[1] - - JSON_OUT = { - 'counts': tophash, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/top.py b/api/pages/forum/top.py deleted file mode 100644 index 51d4c8d6..00000000 --- a/api/pages/forum/top.py +++ /dev/null @@ -1,159 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/top -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/TopList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N issues by interactions -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/TopList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N topics by interactions -# -######################################################################## - - - - - -""" -This is the issue actors stats page for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - }, - 'sort': { - 'posts': 'desc' - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'creator': indata.get('email')}}] - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_topic", - size = 25, - body = query - ) - top = [] - for bucket in res['hits']['hits']: - doc = bucket['_source'] - doc['source'] = doc.get('url', '#') - doc['name'] = doc.get('type', 'unknown') - doc['subject'] = doc.get('title') - doc['count'] = doc.get('posts', 0) - top.append(doc) - - - JSON_OUT = { - 'topN': { - 'denoter': 'interactions', - 'icon': 'comment', - 'items': top - }, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'line' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/forum/trends.py b/api/pages/forum/trends.py deleted file mode 100644 index 6012890e..00000000 --- a/api/pages/forum/trends.py +++ /dev/null @@ -1,350 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/forum/trends -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of issue trackers over a given period of time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of forums over a given period of time -# -######################################################################## - - - - - -""" -This is the forum trends renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - dOrg = session.user['defaultOrganisation'] or "apache" - - #################################################################### - # We start by doing all the queries for THIS period. # - # Then we reset the query, and change date to yonder-->from # - # and rerun the same queries. # - #################################################################### - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get number of issues created, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="forum_topic", - body = query - ) - no_issues_created = res['count'] - - - # Get number of open/close, this period - query['aggs'] = { - 'opener': { - 'cardinality': { - 'field': 'creator' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_topic", - size = 0, - body = query - ) - no_creators = res['aggregations']['opener']['value'] - - - # REPLIERS - - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - - # Get number of issues created, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="forum_post", - body = query - ) - no_issues_closed = res['count'] - - - # Get number of open/close, this period - query['aggs'] = { - 'closer': { - 'cardinality': { - 'field': 'creator' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_post", - size = 0, - body = query - ) - no_closers = res['aggregations']['closer']['value'] - - - #################################################################### - # Change to PRIOR SPAN # - #################################################################### - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - - # Get number of issues, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="forum_topic", - body = query - ) - no_issues_created_before = res['count'] - - # Get number of committers, this period - query['aggs'] = { - 'opener': { - 'cardinality': { - 'field': 'creator' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_topic", - size = 0, - body = query - ) - no_creators_before = res['aggregations']['opener']['value'] - - - - # REPLIERS - - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get number of issues created, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="forum_post", - body = query - ) - no_issues_closed_before = res['count'] - - - # Get number of open/close, this period - query['aggs'] = { - 'closer': { - 'cardinality': { - 'field': "creator" - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="forum_post", - size = 0, - body = query - ) - no_closers_before = res['aggregations']['closer']['value'] - - trends = { - "created": { - 'before': no_issues_created_before, - 'after': no_issues_created, - 'title': "Topics started this period" - }, - "authors": { - 'before': no_creators_before, - 'after': no_creators, - 'title': "People starting topics this period" - }, - "closed": { - 'before': no_issues_closed_before, - 'after': no_issues_closed, - 'title': "Replies this period" - }, - "closers": { - 'before': no_closers_before, - 'after': no_closers, - 'title': "People replying this period" - } - } - - JSON_OUT = { - 'trends': trends, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/actors.py b/api/pages/issue/actors.py deleted file mode 100644 index 37a5124a..00000000 --- a/api/pages/issue/actors.py +++ /dev/null @@ -1,245 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/actors -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of no. of people opening/closing issues over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of no. of people opening/closing issues over time -# -######################################################################## - - - - - -""" -This is the issue actors stats page for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'closedDate', - 'interval': interval - }, - 'aggs': { - 'by_user': { - 'cardinality': { - 'field': 'issueCloser' - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - timeseries = {} - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - ccount = bucket['by_user']['value'] - timeseries[ts] = { - 'date': ts, - 'closers': ccount, - 'openers': 0 - } - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - }, - 'aggs': { - 'by_user': { - 'cardinality': { - 'field': 'issueCreator' - } - } - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - ccount = bucket['by_user']['value'] - if ts in timeseries: - timeseries[ts]['openers'] = ccount - else: - timeseries[ts] = { - 'date': ts, - 'closers': 0, - 'openers': ccount - } - - ts = [] - for x, el in timeseries.items(): - ts.append(el) - - JSON_OUT = { - 'timeseries': ts, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/age.py b/api/pages/issue/age.py deleted file mode 100644 index 4ab1f617..00000000 --- a/api/pages/issue/age.py +++ /dev/null @@ -1,158 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/age -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of no. of open tickets by age -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of no. of open tickets by age -# -######################################################################## - - - - - -""" -This is the issue actors stats page for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - interval = indata.get('interval', 'month') - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - { - 'term': { - 'status': 'open' - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get timeseries for this period - query['aggs'] = { - 'per_interval': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - } - } - } - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - timeseries = [] - opened = 0 - for bucket in res['aggregations']['per_interval']['buckets']: - ts = int(bucket['key'] / 1000) - opened += bucket['doc_count'] - timeseries.append( { - 'date': ts, - 'open': opened - }) - - - - JSON_OUT = { - 'timeseries': timeseries, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'line' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/closers.py b/api/pages/issue/closers.py deleted file mode 100644 index 6515130e..00000000 --- a/api/pages/issue/closers.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/closers -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue closers -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue closers -# -######################################################################## - - - - - -""" -This is the TopN issue closers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - xtitle = None - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'issueCreator': indata.get('email')}}) - xTitle = "People closing %s's issues" % indata.get('email') - - # Get top 25 committers this period - query['aggs'] = { - 'committers': { - 'terms': { - 'field': 'issueCloser', - 'size': 25 - }, - 'aggs': { - - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - people = {} - for bucket in res['aggregations']['committers']['buckets']: - email = bucket['key'] - count = bucket['doc_count'] - sha = hashlib.sha1( ("%s%s" % (dOrg, email)).encode('utf-8') ).hexdigest() - if session.DB.ES.exists(index=session.DB.dbname,doc_type="person",id = sha): - pres = session.DB.ES.get( - index=session.DB.dbname, - doc_type="person", - id = sha - ) - person = pres['_source'] - person['name'] = person.get('name', 'unknown') - people[email] = person - people[email]['gravatar'] = hashlib.md5(person.get('email', 'unknown').encode('utf-8')).hexdigest() - people[email]['count'] = count - - topN = [] - for email, person in people.items(): - topN.append(person) - topN = sorted(topN, key = lambda x: x['count'], reverse = True) - JSON_OUT = { - 'topN': { - 'denoter': 'issues closed', - 'items': topN, - }, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar', - 'title': xtitle - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/issues.py b/api/pages/issue/issues.py deleted file mode 100644 index 623eaa7e..00000000 --- a/api/pages/issue/issues.py +++ /dev/null @@ -1,258 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/issues -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of issues opened/closed over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of issues opened/closed over time -# -######################################################################## - - - - - -""" -This is the issue timeseries renderer for Kibble -""" - -import json -import time -import hashlib - -# This creates an empty timeseries object with -# all categories initialized as 0 opened, 0 closed. -def makeTS(dist): - ts = {} - for k in dist: - ts[k + ' opened'] = 0 - ts[k + ' closed'] = 0 - return ts - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - # By default, we lump PRs and issues into the same category - distinct = { - 'issues': ['issue', 'pullrequest'] - } - - # If requested, we split them into two - if indata.get('distinguish', False): - distinct = { - 'issues': ['issue'], - 'pull requests': ['pullrequest'] - } - - timeseries = {} - - # For each category and the issue types that go along with that, - # grab opened and closed over time. - for iType, iValues in distinct.items(): - #################################################################### - # ISSUES OPENED # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - }, - { - 'terms': { - 'issuetype': iValues - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'issueCreator': indata.get('email')}}) - - # Get number of opened ones, this period - query['aggs'] = { - 'commits': { - 'date_histogram': { - 'field': 'createdDate', - 'interval': interval - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - for bucket in res['aggregations']['commits']['buckets']: - ts = int(bucket['key'] / 1000) - count = bucket['doc_count'] - timeseries[ts] = timeseries.get(ts, makeTS(distinct)) - timeseries[ts][iType + ' opened'] = timeseries[ts].get(iType + ' opened', 0) + count - - - #################################################################### - # ISSUES CLOSED # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - }, - { - 'terms': { - 'issuetype': iValues - } - } - ] - } - } - } - if viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'issueCloser': indata.get('email')}}) - - # Get number of closed ones, this period - query['aggs'] = { - 'commits': { - 'date_histogram': { - 'field': 'closedDate', - 'interval': interval - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - for bucket in res['aggregations']['commits']['buckets']: - ts = int(bucket['key'] / 1000) - count = bucket['doc_count'] - timeseries[ts] = timeseries.get(ts, makeTS(distinct)) - timeseries[ts][iType + ' closed'] = timeseries[ts].get(iType + ' closed', 0) + count - - ts = [] - for k, v in timeseries.items(): - v['date'] = k - ts.append(v) - - - JSON_OUT = { - 'widgetType': { - 'chartType': 'line', # Recommendation for the UI - 'nofill': True - }, - 'timeseries': ts, - 'interval': interval, - 'okay': True, - 'distinguishable': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/openers.py b/api/pages/issue/openers.py deleted file mode 100644 index 321e5d0f..00000000 --- a/api/pages/issue/openers.py +++ /dev/null @@ -1,181 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/openers -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue openers -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of issue openers -# -######################################################################## - - - - - -""" -This is the TopN issue openers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - xtitle = None - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'issueCloser': indata.get('email')}}) - xtitle = "People opening issues solved by %s" % indata.get('email') - - # Get top 25 committers this period - query['aggs'] = { - 'committers': { - 'terms': { - 'field': 'issueCreator', - 'size': 25 - }, - 'aggs': { - - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - people = {} - for bucket in res['aggregations']['committers']['buckets']: - email = bucket['key'] - count = bucket['doc_count'] - sha = hashlib.sha1( ("%s%s" % (dOrg, email)).encode('utf-8') ).hexdigest() - if session.DB.ES.exists(index=session.DB.dbname,doc_type="person",id = sha): - pres = session.DB.ES.get( - index=session.DB.dbname, - doc_type="person", - id = sha - ) - person = pres['_source'] - person['name'] = person.get('name', 'unknown') - people[email] = person - people[email]['gravatar'] = hashlib.md5(person.get('email', 'unknown').encode('utf-8')).hexdigest() - people[email]['count'] = count - - topN = [] - for email, person in people.items(): - topN.append(person) - topN = sorted(topN, key = lambda x: x['count'], reverse = True) - JSON_OUT = { - 'topN': { - 'denoter': 'issues opened', - 'items': topN, - }, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'bar', - 'title': xtitle - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/pony-timeseries.py b/api/pages/issue/pony-timeseries.py deleted file mode 100644 index 2bf096d5..00000000 --- a/api/pages/issue/pony-timeseries.py +++ /dev/null @@ -1,218 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/pony-timeseries -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of Pony Factor over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of Pony Factor over time -# -######################################################################## - - - - - -""" -This is the pony factor renderer for Kibble -""" - -import json -import time -import re -import datetime -import dateutil.relativedelta - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - hl = indata.get('span', 24) - tnow = datetime.date.today() - nm = tnow.month - (tnow.month % 3) - ny = tnow.year - ts = [] - - if nm < 1: - nm += 12 - ny = ny - 1 - - while ny > 1970: - d = datetime.date(ny, nm, 1) - t = time.mktime(d.timetuple()) - d = d - dateutil.relativedelta.relativedelta(months=hl) - tf = time.mktime(d.timetuple()) - nm -= 3 - if nm < 1: - nm += 12 - ny = ny - 1 - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': tf, - 'to': t - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - - globcount = res['count'] - if globcount == 0: - break - - # Get top 25 committers this period - query['aggs'] = { - 'by_creator': { - 'terms': { - 'field': 'issueCreator', - 'size': 1000 - } - }, - 'by_closer': { - 'terms': { - 'field': 'issueCloser', - 'size': 1000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - cpf = {} - - # PF for openers - pf_opener = 0 - pf_opener_count = 0 - for bucket in res['aggregations']['by_creator']['buckets']: - count = bucket['doc_count'] - pf_opener += 1 - pf_opener_count += count - if '@' in bucket['key']: - mldom = bucket['key'].lower().split('@')[-1] - cpf[mldom] = True - if pf_opener_count > int(globcount/2): - break - - # PF for closer - pf_closer = 0 - pf_closer_count = 0 - for bucket in res['aggregations']['by_closer']['buckets']: - count = bucket['doc_count'] - pf_closer += 1 - pf_closer_count += count - if '@' in bucket['key']: - mldom = bucket['key'].lower().split('@')[-1] - cpf[mldom] = True - if pf_closer_count > int(globcount/2): - break - ts.append({ - 'date': t, - 'Pony Factor (openers)': pf_opener, - 'Pony Factor (closers)': pf_closer, - 'Meta-Pony Factor': len(cpf) - }) - - ts = sorted(ts, key = lambda x: x['date']) - - JSON_OUT = { - 'text': "This shows Pony Factors as calculated over a %u month timespan. Openers measures the people submitting the bulk of the issues, closers mesaures the people closing (resolving) the issues, and meta-pony is an estimation of how many organisations/companies are involved." % hl, - 'timeseries': ts, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/relationships.py b/api/pages/issue/relationships.py deleted file mode 100644 index f660832f..00000000 --- a/api/pages/issue/relationships.py +++ /dev/null @@ -1,311 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/relationships -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of contributor relationships between issue trackers -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of contributor relationships between issue trackers -# -######################################################################## - - - - - -""" -This is the issue tracker relationship list renderer for Kibble -""" - -import json -import time -import hashlib -import copy -import re -import math - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'day') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of commits, this period, per repo - query['aggs'] = { - 'per_repo': { - 'terms': { - 'field': 'sourceID', - 'size': 10000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - repos = {} - repo_commits = {} - authorlinks = {} - max_commits = 0 - max_links = 0 - max_shared = 0 - max_authors = 0 - - # For each repo, count commits and gather data on authors - for doc in res['aggregations']['per_repo']['buckets']: - sourceID = doc['key'] - commits = doc['doc_count'] - - # Gather the unique authors/committers - query['aggs'] = { - 'per_closer': { - 'terms': { - 'field': 'issueCloser', - 'size': 10000 - } - }, - 'per_creator': { - 'terms': { - 'field': 'issueCreator', - 'size': 10000 - } - } - } - xquery = copy.deepcopy(query) - xquery['query']['bool']['must'].append({'term': {'sourceID': sourceID}}) - xres = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = xquery - ) - authors = [] - for person in xres['aggregations']['per_closer']['buckets']: - authors.append(person['key']) - for person in xres['aggregations']['per_creator']['buckets']: - authors.append(person['key']) - if commits > max_commits: - max_commits = commits - repos[sourceID] = authors - repo_commits[sourceID] = commits - - # Now, figure out which repos share the same contributors - repo_links = {} - repo_notoriety = {} - repodatas = {} - repo_authors = {} - minLinks = indata.get('links', 1) - - # Grab data of all sources - for ID, repo in repos.items(): - mylinks = {} - if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = ID): - continue - repodatas[ID] = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id = ID) - - for ID, repo in repos.items(): - mylinks = {} - if not ID in repodatas: - continue - repodata = repodatas[ID] - oID = ID - if indata.get('collapse'): - m = re.search(indata.get('collapse'), repodata['_source']['sourceURL']) - if m: - ID = m.group(1) - else: - ID = re.sub(r"^.+/", "", repodata['_source']['sourceURL']) - for xID, xrepo in repos.items(): - if xID in repodatas: - xrepodata = repodatas[xID] - if indata.get('collapse'): - m = re.search(indata.get('collapse'), xrepodata['_source']['sourceURL']) - if m: - xID = m.group(1) - else: - xID = re.sub(r"^.+/", "", xrepodata['_source']['sourceURL']) - if xID != ID: - xlinks = [] - for author in xrepo: - if author in repo: - xlinks.append(author) - lname = "%s@%s" % (ID, xID) # Link name - rname = "%s@%s" % (xID, ID) # Reverse link name - if len(xlinks) >= minLinks and not rname in repo_links: - mylinks[xID] = len(xlinks) - repo_links[lname] = repo_links.get(lname, 0) + len(xlinks) # How many contributors in common between project A and B? - if repo_links[lname] > max_shared: - max_shared = repo_links[lname] - if ID not in repo_notoriety: - repo_notoriety[ID] = set() - repo_notoriety[ID].update(mylinks.keys()) # How many projects is this repo connected to? - - if ID not in repo_authors: - repo_authors[ID] = set() - repo_authors[ID].update(repo) # How many projects is this repo connected to? - - if ID != oID: - repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] - if repo_commits[ID] > max_commits: - max_commits = repo_commits[ID] # Used for calculating max link thickness - if len(repo_notoriety[ID]) > max_links: - max_links = len(repo_notoriety[ID]) - if len(repo_authors[ID]) > max_authors: - max_authors = len(repo_authors[ID]) # Used for calculating max sphere size in charts - - # Now, pull it all together! - nodes = [] - links = [] - existing_repos = [] - for sourceID in repo_notoriety.keys(): - lsize = 0 - for k in repo_links.keys(): - fr, to = k.split('@') - if fr == sourceID or to == sourceID: - lsize += 1 - asize = len(repo_authors[sourceID]) - doc = { - 'id': sourceID, - 'name': sourceID, - 'issues': repo_commits[sourceID], - 'authors': asize, - 'links': lsize, - 'size': max(5, (1 - abs(math.log10(asize / max_authors))) * 45), - 'tooltip': "%u connections, %u contributors, %u issues" % (lsize, asize, repo_commits[sourceID]) - } - nodes.append(doc) - existing_repos.append(sourceID) - - for k, s in repo_links.items(): - size = s - fr, to = k.split('@') - if fr in existing_repos and to in existing_repos: - doc = { - 'source': fr, - 'target': to, - 'value': max(1, (size/max_shared) * 8), - 'name': "%s ↔ %s" % (fr, to), - 'tooltip': "%u contributors in common" % size - } - links.append(doc) - - JSON_OUT = { - 'maxLinks': max_links, - 'maxShared': max_shared, - 'widgetType': { - 'chartType': 'link' # Recommendation for the UI - }, - 'links': links, - 'nodes': nodes, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/retention.py b/api/pages/issue/retention.py deleted file mode 100644 index 22e021e3..00000000 --- a/api/pages/issue/retention.py +++ /dev/null @@ -1,265 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/retention -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Factor' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows retention metrics for a set of issue trackers over a given period -# of time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Factor' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows retention metrics for a set of issue trackers over a given period -# of time -# -######################################################################## - - - - - -""" -This is the code contributor retention factor renderer for Kibble -""" - -import json -import time -import re -import datetime - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - hl = indata.get('span', 12) # By default, we define a contributor as active if having committer in the past year - tnow = datetime.date.today() - nm = tnow.month - (tnow.month % 3) - ny = tnow.year - cy = ny - ts = [] - - if nm < 1: - nm += 12 - ny = ny - 1 - - peopleSeen = {} - activePeople = {} - allPeople = {} - FoundSomething = False - - ny = 1970 - while ny < cy or (ny == cy and (nm+3) <= tnow.month): - d = datetime.date(ny, nm, 1) - t = time.mktime(d.timetuple()) - nm += 3 - if nm > 12: - nm -= 12 - ny = ny + 1 - if ny == cy and nm > tnow.month: - break - d = datetime.date(ny, nm, 1) - tf = time.mktime(d.timetuple()) - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': t, - 'to': tf - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - - globcount = res['count'] - if globcount == 0 and FoundSomething == False: - continue - FoundSomething = True - - # Get top 1000 committers this period - query['aggs'] = { - 'by_o': { - 'terms': { - 'field': 'issueCloser', - 'size': 50000 - } - }, - 'by_c': { - 'terms': { - 'field': 'issueCreator', - 'size': 50000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - - retained = 0 - added = 0 - lost = 0 - - thisPeriod = [] - for bucket in res['aggregations']['by_o']['buckets']: - who = bucket['key'] - thisPeriod.append(who) - if who not in peopleSeen: - peopleSeen[who] = tf - added += 1 - activePeople[who] = tf - if who not in allPeople: - allPeople[who] = tf - - for bucket in res['aggregations']['by_c']['buckets']: - who = bucket['key'] - thisPeriod.append(who) - if who not in peopleSeen: - peopleSeen[who] = tf - added += 1 - if who not in activePeople: - activePeople[who] = tf - if who not in allPeople: - allPeople[who] = tf - - prune = [] - for k, v in activePeople.items(): - if v < (t - (hl*30.45*86400)): - prune.append(k) - lost += 1 - - for who in prune: - del activePeople[who] - del peopleSeen[who] - retained = len(activePeople) - added - ts.append({ - 'date': tf, - 'People who (re)joined': added, - 'People who quit': lost, - 'People retained': retained, - 'Active people': added + retained - }) - - groups = [ - ['More than 5 years', (5*365*86400)+1], - ['2 - 5 years', (2*365*86400)+1], - ['1 - 2 years', (365*86400)], - ['Less than a year', 1] - ] - - counts = {} - totExp = 0 - for person, age in activePeople.items(): - totExp += time.time() - allPeople[person] - for el in sorted(groups, key = lambda x: x[1], reverse = True): - if allPeople[person] <= time.time() - el[1]: - counts[el[0]] = counts.get(el[0], 0) + 1 - break - avgyr = (totExp / (86400*365)) / max(len(activePeople),1) - - ts = sorted(ts, key = lambda x: x['date']) - - avgm = "" - yr = int(avgyr) - ym = round((avgyr-yr)*12) - if yr >= 1: - avgm += "%u year%s" % (yr, "s" if yr != 1 else "") - if ym > 0: - avgm += "%s%u month%s" % (", " if yr > 0 else "", ym, "s" if ym != 1 else "") - JSON_OUT = { - 'text': "This shows Contributor retention as calculated over a %u month timespan. The average experience of currently active people is %s." % (hl, avgm), - 'timeseries': ts, - 'counts': counts, - 'averageYears': avgyr, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/top-count.py b/api/pages/issue/top-count.py deleted file mode 100644 index f17f721b..00000000 --- a/api/pages/issue/top-count.py +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/top-count -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 issue trackers by issues -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows top 25 issue trackers by issues -# -######################################################################## - - - - - -""" -This is the TopN repos by commits list renderer for Kibble -""" - -import json -import time -import re - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [ - {'term': {'issueCreator': indata.get('email')}}, - {'term': {'issueCloser': indata.get('email')}} - ] - query['query']['bool']['minimum_should_match'] = 1 - - - # Get top 25 committers this period - query['aggs'] = { - 'by_repo': { - 'terms': { - 'field': 'sourceID', - 'size': 5000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - - toprepos = [] - for bucket in res['aggregations']['by_repo']['buckets']: - ID = bucket['key'] - if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = ID): - it = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id = ID)['_source'] - repo = re.sub(r".+/([^/]+)$", r"\1", it['sourceURL']) - count = bucket['doc_count'] - toprepos.append([repo, count]) - - toprepos = sorted(toprepos, key = lambda x: x[1], reverse = True) - top = toprepos[0:24] - if len(toprepos) > 25: - count = 0 - for repo in toprepos[25:]: - count += repo[1] - top.append(["Other trackers", count]) - - tophash = {} - for v in top: - tophash[v[0]] = v[1] - - JSON_OUT = { - 'counts': tophash, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/top.py b/api/pages/issue/top.py deleted file mode 100644 index 33cde721..00000000 --- a/api/pages/issue/top.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/top -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/TopList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N issues by interactions -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/TopList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N issues by interactions -# -######################################################################## - - - - - -""" -This is the issue actors stats page for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - }, - 'sort': { - 'comments': 'desc' - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 25, - body = query - ) - top = [] - for bucket in res['hits']['hits']: - doc = bucket['_source'] - doc['source'] = doc.get('url', '#') - doc['name'] = doc.get('key', 'unknown') - doc['subject'] = doc.get('title') - doc['count'] = doc.get('comments', 0) - top.append(doc) - - - JSON_OUT = { - 'topN': { - 'denoter': 'interactions', - 'icon': 'bug', - 'items': top - }, - 'okay': True, - 'responseTime': time.time() - now, - 'widgetType': { - 'chartType': 'line' - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/issue/trends.py b/api/pages/issue/trends.py deleted file mode 100644 index 7387d88f..00000000 --- a/api/pages/issue/trends.py +++ /dev/null @@ -1,357 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/issue/trends -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of issue trackers over a given period of time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows trend data for a set of issue trackers over a given period of time -# -######################################################################## - - - - - -""" -This is the Issue trends renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - dOrg = session.user['defaultOrganisation'] or "apache" - - #################################################################### - # We start by doing all the queries for THIS period. # - # Then we reset the query, and change date to yonder-->from # - # and rerun the same queries. # - #################################################################### - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of issues created, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_created = res['count'] - - - # Get number of open/close, this period - query['aggs'] = { - 'opener': { - 'cardinality': { - 'field': 'issueCreator' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - no_creators = res['aggregations']['opener']['value'] - - - # CLOSERS - - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of issues created, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_closed = res['count'] - - - # Get number of open/close, this period - query['aggs'] = { - 'closer': { - 'cardinality': { - 'field': 'issueCloser' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - no_closers = res['aggregations']['closer']['value'] - - - - #################################################################### - # Change to PRIOR SPAN # - #################################################################### - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'created': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - if viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of issues, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_created_before = res['count'] - - # Get number of committers, this period - query['aggs'] = { - 'opener': { - 'cardinality': { - 'field': 'issueCreator' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - no_creators_before = res['aggregations']['opener']['value'] - - - - # CLOSERS - - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'closed': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - if viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'issueCreator': indata.get('email')}}, {'term': {'issueCloser': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of issues created, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="issue", - body = query - ) - no_issues_closed_before = res['count'] - - - # Get number of open/close, this period - query['aggs'] = { - 'closer': { - 'cardinality': { - 'field': 'issueCloser' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="issue", - size = 0, - body = query - ) - no_closers_before = res['aggregations']['closer']['value'] - - - trends = { - "created": { - 'before': no_issues_created_before, - 'after': no_issues_created, - 'title': "Issues opened this period" - }, - "authors": { - 'before': no_creators_before, - 'after': no_creators, - 'title': "People opening issues this period" - }, - "closed": { - 'before': no_issues_closed_before, - 'after': no_issues_closed, - 'title': "Issues closed this period" - }, - "closers": { - 'before': no_closers_before, - 'after': no_closers, - 'title': "People closing issues this period" - } - } - - JSON_OUT = { - 'trends': trends, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/keyphrases.py b/api/pages/mail/keyphrases.py deleted file mode 100644 index 4f8936e1..00000000 --- a/api/pages/mail/keyphrases.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/keyphrases -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/PhraseList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the common key phrases in use on one or more mailing lists -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/PhraseList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the common key phrases in use on one or more mailing lists -# -######################################################################## - - - - - -""" -This is the common key phrases renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - }, - 'aggs': { - 'kpe': { - 'terms': { - 'field': 'kpe.keyword', - 'size': 50 - } - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - topN = [] - for bucket in res['aggregations']['kpe']['buckets']: - topN.append( { - 'phrase': bucket['key'], - 'count': bucket['doc_count'] - }) - - JSON_OUT = { - 'widgetType': { - 'chartType': 'bar' - }, - 'phrases': topN, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/map.py b/api/pages/mail/map.py deleted file mode 100644 index 3c446cc8..00000000 --- a/api/pages/mail/map.py +++ /dev/null @@ -1,313 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/map -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of email author reply mappings -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of email author reply mappings -# -######################################################################## - - - - - -""" -This is the committer relationship list renderer for Kibble -""" - -import json -import time -import hashlib -import copy -import re -import math - -badBots = r"(JIRA|Hudson|jira|jenkins|GitHub|git@|dev@|bugzilla|gerrit)" - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - span = dateTo - dateFrom - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('search'): - query['query']['bool']['must'].append({'regexp': {'subject': indata.get('search')}}) - - if indata.get('email'): - query['query']['bool']['minimum_should_match'] = 1 - query['query']['bool']['should'] = [ - {'term': {'replyto.keyword': indata.get('email')}}, - {'term': {'sender': indata.get('email')}}, - ] - - # Get number of commits, this period, per repo - query['aggs'] = { - 'per_ml': { - 'terms': { - 'field': 'replyto.keyword' if not indata.get('author') else 'sender', - 'size': 150 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - repos = {} - repo_commits = {} - authorlinks = {} - max_emails = 0 - max_links = 0 - max_shared = 0 - max_authors = 0 - minLinks = indata.get('links', 1) - - if indata.get('email'): - del query['query']['bool']['should'] - del query['query']['bool']['minimum_should_match'] - - # For each repo, count commits and gather data on authors - for doc in res['aggregations']['per_ml']['buckets']: - sourceID = doc['key'] - emails = doc['doc_count'] - if re.search(badBots, sourceID): # No bots - continue - if emails > (span/86400)*4: # More than 4/day and we consider you a bot! - continue - - - # Gather the unique authors/committers - query['aggs'] = { - 'per_ml': { - 'terms': { - 'field': 'sender' if not indata.get('author') else 'replyto.keyword', - 'size': 5000 - } - } - } - xquery = copy.deepcopy(query) - - xquery['query']['bool']['must'].append({'term': {'replyto.keyword' if not indata.get('author') else 'sender': sourceID}}) - xres = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = xquery - ) - authors = [] - for person in xres['aggregations']['per_ml']['buckets']: - pk = person['key'] - authors.append(pk) - if emails > max_emails: - max_emails = emails - repos[sourceID] = authors - repo_commits[sourceID] = emails - - # Now, figure out which repos share the same contributors - repo_links = {} - repo_notoriety = {} - repodatas = {} - repo_authors = {} - - # Grab data of all sources - for ID, repo in repos.items(): - mylinks = {} - hID = hashlib.sha1( ("%s%s" % (dOrg, ID)).encode('ascii', errors='replace')).hexdigest() - if not session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id = hID): - continue - repodatas[ID] = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id = hID) - - for ID, repo in repos.items(): - mylinks = {} - if not ID in repodatas: - continue - repodata = repodatas[ID] - oID = ID - if indata.get('collapse'): - m = re.search(indata.get('collapse'), repodata['_source']['email']) - if m: - ID = m.group(1) - xlinks = [] - for xID, xrepo in repos.items(): - if xID in repodatas: - xrepodata = repodatas[xID] - if indata.get('collapse'): - m = re.search(indata.get('collapse'), xrepodata['_source']['email']) - if m: - xID = m.group(1) - if xID != ID: - - if ID in xrepo: - xlinks.append(xID) - lname = "%s||%s" % (ID, xID) # Link name - rname = "%s||%s" % (xID, ID) # Reverse link name - if len(xlinks) > 0 and rname not in repo_links and len(xlinks) >= minLinks: - mylinks[ID] = mylinks.get(ID, 0) + 1 - repo_links[lname] = repo_links.get(lname, 0) + len(xlinks) # How many contributors in common between project A and B? - if repo_links[lname] > max_shared: - max_shared = repo_links[lname] - elif rname in repo_links: - repo_links[rname] = repo_links.get(rname, 0) + len(xlinks) - if ID not in repo_notoriety: - repo_notoriety[ID] = set() - repo_notoriety[ID].update(mylinks.keys()) # How many projects is this repo connected to? - - if ID not in repo_authors: - repo_authors[ID] = set() - repo_authors[ID].update(repo) # How many projects is this repo connected to? - - if ID != oID: - repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] - if repo_commits[ID] > max_emails: - max_emails = repo_commits[ID] # Used for calculating max link thickness - if len(repo_notoriety[ID]) > max_links: - max_links = len(repo_notoriety[ID]) - if len(repo_authors[ID]) > max_authors: - max_authors = len(repo_authors[ID]) # Used for calculating max sphere size in charts - - # Now, pull it all together! - nodes = [] - links = [] - existing_repos = [] - for sourceID, ns in repo_notoriety.items(): - lsize = 0 - for k in repo_links.keys(): - fr, to = k.split('||') - if fr == sourceID or to == sourceID: - lsize += 1 - asize = len(repo_authors[sourceID]) - doc = { - 'id': sourceID, - 'gravatar': hashlib.md5(sourceID.lower().encode('utf-8')).hexdigest(), - 'name': repodatas[sourceID]['_source'].get('name', sourceID), - 'replies': repo_commits[sourceID], - 'authors': asize, - 'links': lsize, - 'size': max(5, (1 - abs(math.log10(repo_commits[sourceID] / max_emails))) * 45), - 'tooltip': "%u connections, %u fellows, %u replies to" % (lsize, asize, repo_commits[sourceID]) - } - nodes.append(doc) - existing_repos.append(sourceID) - - for k, s in repo_links.items(): - size = s - fr, to = k.split('||') - if fr in existing_repos and to in existing_repos: - doc = { - 'source': fr, - 'target': to, - 'value': max(1, (size/max_shared) * 5), - 'name': "%s ↔ %s" % (fr, to), - 'tooltip': "%u topics exchanged" % size - } - links.append(doc) - - JSON_OUT = { - 'maxLinks': max_links, - 'maxShared': max_shared, - 'widgetType': { - 'chartType': 'link' # Recommendation for the UI - }, - 'links': links, - 'nodes': nodes, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/mood-timeseries.py b/api/pages/mail/mood-timeseries.py deleted file mode 100644 index be7e3b28..00000000 --- a/api/pages/mail/mood-timeseries.py +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/mood-timeseries -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of the (analyzed) mood in emails as a timeseries -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of the (analyzed) mood in emails as a timeseries -# -######################################################################## - - - - - -""" -This is the email mood timeseries renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - interval = indata.get('interval', 'week') - - # Define moods we know of - moods_good = set(['trust', 'joy', 'confident', 'positive']) - moods_bad = set(['sadness', 'anger', 'disgust', 'fear', 'negative']) - moods_neutral = set(['anticipation', 'surprise', 'tentative', 'analytical', 'neutral']) - all_moods = set(moods_good | moods_bad | moods_neutral) - - # Fetch all sources for default org - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - }, - { 'exists': { - 'field': 'mood' - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - emls = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - )['count'] - - query['aggs'] = { - 'history': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - }, - 'aggs': { - } - } - } - - # Add aggregations for moods - for mood in all_moods: - query['aggs']['history']['aggs'][mood] = { - 'sum': { - 'field': "mood.%s" % mood - } - } - - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - timeseries = [] - - - for tz in res['aggregations']['history']['buckets']: - moods = {} - emls = tz['doc_count'] - for mood in all_moods: - moods[mood] = int (100 * tz.get(mood, {'value':0})['value'] / max(1, emls)) - moods['date'] = int(tz['key']/1000) - timeseries.append(moods) - - JSON_OUT = { - 'timeseries': timeseries, - 'okay': True - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/mood.py b/api/pages/mail/mood.py deleted file mode 100644 index a1beb46b..00000000 --- a/api/pages/mail/mood.py +++ /dev/null @@ -1,247 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/mood -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of the (analyzed) mood in emails -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of the (analyzed) mood in emails -# -######################################################################## - - - - - -""" -This is the email mood renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - # Define moods we know of - moods_good = set(['trust', 'joy', 'confident', 'positive']) - moods_bad = set(['sadness', 'anger', 'disgust', 'fear', 'negative']) - moods_neutral = set(['anticipation', 'surprise', 'tentative', 'analytical', 'neutral']) - all_moods = set(moods_good | moods_bad | moods_neutral) - - # Start off with a query for the entire org (we want to compare) - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - }, - { 'exists': { - 'field': 'mood' - } - } - ] - } - } - } - - # Count all emails, for averaging scores - gemls = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - )['count'] - - # Add aggregations for moods - query['aggs'] = { - - } - for mood in all_moods: - query['aggs'][mood] = { - 'sum': { - 'field': "mood.%s" % mood - } - } - - - global_mood_compiled = {} - mood_compiled = {} - txt = "This chart shows the ten potential mood types as they average on the emails in this period. A score of 100 means a sentiment is highly visible in most emails." - gtxt = "This shows the overall estimated mood as a gauge from terrible to good." - # If we're comparing against all lists, first do a global query - # and compile moods overall - if indata.get('relative'): - txt = "This chart shows the ten potential mood types on the selected lists as they compare against all mailing lists in the database. A score of 100 here means the sentiment conforms to averages across all lists." - gtxt = "This shows the overall estimated mood compared to all lists, as a gauge from terrible to good." - global_moods = {} - - gres = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - for mood, el in gres['aggregations'].items(): - # If a mood is not present (iow sum is 0), remove it from the equation by setting to -1 - if el['value'] == 0: - el['value'] == -1 - global_moods[mood] = el['value'] - for k, v in global_moods.items(): - if v >= 0: - global_mood_compiled[k] = int( (v / max(1,gemls)) * 100) - - # Now, if we have a view (or not distinguishing), ... - ss = False - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - ss = True - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - ss = True - - # If we have a view enabled (and distinguish), compile local view against global view - # Else, just copy global as local - if ss or not indata.get('relative'): - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - del query['aggs'] # we have to remove these to do a count() - emls = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - )['count'] - - moods = {} - years = 0 - - for mood, el in res['aggregations'].items(): - if el['value'] == 0: - el['value'] == -1 - moods[mood] = el['value'] - for k, v in moods.items(): - if v > 0: - mood_compiled[k] = int(100 * int( ( v / max(1,emls)) * 100) / max(1, global_mood_compiled.get(k, 100))) - else: - mood_compiled = global_mood_compiled - - # If relative mode and a field is missing, assume 100 (norm) - if indata.get('relative'): - for M in all_moods: - if mood_compiled.get(M, 0) == 0: - mood_compiled[M] = 100 - - # Compile an overall happiness level - MAX = max(max(mood_compiled.values()),1) - X = 100 if indata.get('relative') else 0 - bads = X - for B in moods_bad: - if mood_compiled.get(B) and mood_compiled[B] > X: - bads += mood_compiled[B] - - happ = 50 - - goods = X - for B in moods_good: - if mood_compiled.get(B) and mood_compiled[B] > X: - goods += mood_compiled[B] - MAX = max(MAX, bads, goods) - if bads > 0: - happ -= (50*bads/MAX) - if goods > 0: - happ += (50*goods/MAX) - swingometer = max(0, min(100, happ)) - - # JSON out! - JSON_OUT = { - 'relativeMode': True, - 'text': txt, - 'counts': mood_compiled, - 'okay': True, - 'gauge': { - 'key': 'Happiness', - 'value': swingometer, - 'text': gtxt - } - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/pony-timeseries.py b/api/pages/mail/pony-timeseries.py deleted file mode 100644 index fefd7762..00000000 --- a/api/pages/mail/pony-timeseries.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/pony-timeseries -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of Pony Factor over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows timeseries of Pony Factor over time -# -######################################################################## - - - - - -""" -This is the pony factor renderer for Kibble -""" - -import json -import time -import re -import datetime -import dateutil.relativedelta - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - hl = indata.get('span', 24) - tnow = datetime.date.today() - nm = tnow.month - (tnow.month % 3) - ny = tnow.year - ts = [] - - if nm < 1: - nm += 12 - ny = ny - 1 - - while ny > 1970: - d = datetime.date(ny, nm, 1) - t = time.mktime(d.timetuple()) - d = d - dateutil.relativedelta.relativedelta(months=hl) - tf = time.mktime(d.timetuple()) - nm -= 3 - if nm < 1: - nm += 12 - ny = ny - 1 - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': tf, - 'to': t - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ], - 'must_not': [ - { - 'match': { - 'sourceURL': 'commits*' - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - ) - - globcount = res['count'] - if globcount == 0: - break - - # Get top 25 committers this period - query['aggs'] = { - 'by_sender': { - 'terms': { - 'field': 'sender', - 'size': 2500 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - - # PF for authors - pf_author = 0 - pf_author_count = 0 - cpf = {} - for bucket in res['aggregations']['by_sender']['buckets']: - count = bucket['doc_count'] - # Assume anyone sending > 10 emails per day is a bot (or a commit list)! - if count > (10*365*hl): - globcount -= count - continue - pf_author += 1 - pf_author_count += count - if '@' in bucket['key']: - mldom = bucket['key'].lower().split('@')[-1] - cpf[mldom] = True - if pf_author_count > int(globcount/2): - break - ts.append({ - 'date': t, - 'Pony Factor (authors)': pf_author, - 'Meta-Pony Factor': len(cpf) - }) - - ts = sorted(ts, key = lambda x: x['date']) - - JSON_OUT = { - 'text': "This shows Pony Factors as calculated over a %u month timespan. Authorship is a measure of the people it takes to make up the bulk of email traffic, and meta-pony is an estimation of how many organisations/companies are involved." % hl, - 'timeseries': ts, - 'okay': True, - 'responseTime': time.time() - now, - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/relationships.py b/api/pages/mail/relationships.py deleted file mode 100644 index 16a0cdb0..00000000 --- a/api/pages/mail/relationships.py +++ /dev/null @@ -1,293 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/relationships -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of contributor relationships between mailing lists -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a breakdown of contributor relationships between mailing lists -# -######################################################################## - - - - - -""" -This is the committer relationship list renderer for Kibble -""" - -import json -import time -import hashlib -import copy -import re -import math - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'sender': indata.get('email')}}) - - # Get number of commits, this period, per repo - query['aggs'] = { - 'per_ml': { - 'terms': { - 'field': 'sourceID', - 'size': 10000 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - repos = {} - repo_commits = {} - authorlinks = {} - max_emails = 0 - max_links = 0 - max_shared = 0 - max_authors = 0 - minLinks = indata.get('links', 1) - - # For each repo, count commits and gather data on authors - for doc in res['aggregations']['per_ml']['buckets']: - sourceID = doc['key'] - emails = doc['doc_count'] - - # Gather the unique authors/committers - query['aggs'] = { - 'per_ml': { - 'terms': { - 'field': 'sender', - 'size': 10000 - } - } - } - xquery = copy.deepcopy(query) - xquery['query']['bool']['must'].append({'term': {'sourceID': sourceID}}) - xres = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = xquery - ) - authors = [] - for person in xres['aggregations']['per_ml']['buckets']: - authors.append(person['key']) - if emails > max_emails: - max_emails = emails - repos[sourceID] = authors - repo_commits[sourceID] = emails - - # Now, figure out which repos share the same contributors - repo_links = {} - repo_notoriety = {} - repodatas = {} - repo_authors = {} - - # Grab data of all sources - for ID, repo in repos.items(): - mylinks = {} - if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = ID): - continue - repodatas[ID] = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id = ID) - - for ID, repo in repos.items(): - mylinks = {} - if not ID in repodatas: - continue - repodata = repodatas[ID] - oID = ID - if indata.get('collapse'): - m = re.search(indata.get('collapse'), repodata['_source']['sourceURL']) - if m: - ID = m.group(1) - else: - ID = re.sub(r"^.+/(?:list\.html\?)?", "", repodata['_source']['sourceURL']) - for xID, xrepo in repos.items(): - if xID in repodatas: - xrepodata = repodatas[xID] - if indata.get('collapse'): - m = re.search(indata.get('collapse'), xrepodata['_source']['sourceURL']) - if m: - xID = m.group(1) - else: - xID = re.sub(r"^.+/(?:list\.html\?)?", "", xrepodata['_source']['sourceURL']) - if xID != ID: - xlinks = [] - for author in xrepo: - if author in repo: - xlinks.append(author) - lname = "%s||%s" % (ID, xID) # Link name - rname = "%s||%s" % (xID, ID) # Reverse link name - if len(xlinks) >= minLinks and not rname in repo_links: - mylinks[xID] = len(xlinks) - repo_links[lname] = repo_links.get(lname, 0) + len(xlinks) # How many contributors in common between project A and B? - if repo_links[lname] > max_shared: - max_shared = repo_links[lname] - if ID not in repo_notoriety: - repo_notoriety[ID] = set() - repo_notoriety[ID].update(mylinks.keys()) # How many projects is this repo connected to? - - if ID not in repo_authors: - repo_authors[ID] = set() - repo_authors[ID].update(repo) # How many projects is this repo connected to? - - if ID != oID: - repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] - if repo_commits[ID] > max_emails: - max_emails = repo_commits[ID] # Used for calculating max link thickness - if len(repo_notoriety[ID]) > max_links: - max_links = len(repo_notoriety[ID]) - if len(repo_authors[ID]) > max_authors: - max_authors = len(repo_authors[ID]) # Used for calculating max sphere size in charts - - # Now, pull it all together! - nodes = [] - links = [] - existing_repos = [] - for sourceID in repo_notoriety.keys(): - lsize = 0 - for k in repo_links.keys(): - fr, to = k.split('||') - if fr == sourceID or to == sourceID: - lsize += 1 - asize = len(repo_authors[sourceID]) - doc = { - 'id': sourceID, - 'name': sourceID, - 'emails': repo_commits[sourceID], - 'authors': asize, - 'links': lsize, - 'size': max(5, (1 - abs(math.log10(asize / max_authors))) * 45), - 'tooltip': "%u connections, %u contributors, %u emails" % (lsize, asize, repo_commits[sourceID]) - } - nodes.append(doc) - existing_repos.append(sourceID) - - for k, s in repo_links.items(): - size = s - fr, to = k.split('||') - if fr in existing_repos and to in existing_repos: - doc = { - 'source': fr, - 'target': to, - 'value': max(1, (size/max_shared) * 8), - 'name': "%s ↔ %s" % (fr, to), - 'tooltip': "%u contributors in common" % size - } - links.append(doc) - - JSON_OUT = { - 'maxLinks': max_links, - 'maxShared': max_shared, - 'widgetType': { - 'chartType': 'link' # Recommendation for the UI - }, - 'links': links, - 'nodes': nodes, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/timeseries-single.py b/api/pages/mail/timeseries-single.py deleted file mode 100644 index 855539cc..00000000 --- a/api/pages/mail/timeseries-single.py +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/timeseries-single -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows email sent over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows email sent over time -# -######################################################################## - - - - - -""" -This is the email-only timeseries renderer for Kibble -unlike timeseries.py, this only shows mail sent, not topics or authors. -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'sender': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of committers, this period - query['aggs'] = { - 'timeseries': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - timeseries = [] - for bucket in res['aggregations']['timeseries']['buckets']: - ts = int(bucket['key'] / 1000) - timeseries.append({ - 'date': ts, - 'emails': bucket['doc_count'] - }) - - JSON_OUT = { - 'widgetType': { - 'chartType': 'bar' # Recommendation for the UI - }, - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/timeseries.py b/api/pages/mail/timeseries.py deleted file mode 100644 index 7b446f42..00000000 --- a/api/pages/mail/timeseries.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/timeseries -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows email sent over time -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Timeseries' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows email sent over time -# -######################################################################## - - - - - -""" -This is the email timeseries renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - which = 'committer_email' - role = 'committer' - if indata.get('author', False): - which = 'author_email' - role = 'author' - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d 00:00:00", time.gmtime(dateFrom)), - 'to': time.strftime("%Y/%m/%d 23:59:59", time.gmtime(dateTo)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['should'] = [{'term': {'sender': indata.get('email')}}] - query['query']['bool']['minimum_should_match'] = 1 - - # Get number of committers, this period - query['aggs'] = { - 'timeseries': { - 'date_histogram': { - 'field': 'date', - 'interval': interval - }, - 'aggs': { - 'email': { - 'sum': { - 'field': 'emails' - } - }, - 'topics': { - 'sum': { - 'field': 'topics' - } - }, - 'authors': { - 'sum': { - 'field': 'authors' - } - } - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="mailstats", - size = 0, - body = query - ) - - timeseries = [] - for bucket in res['aggregations']['timeseries']['buckets']: - ts = int(bucket['key'] / 1000) - timeseries.append({ - 'date': ts, - 'emails': bucket['email']['value'], - 'topics': bucket['topics']['value'], - 'authors': bucket['authors']['value'] - }) - - JSON_OUT = { - 'widgetType': { - 'chartType': 'bar' # Recommendation for the UI - }, - 'timeseries': timeseries, - 'interval': interval, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/top-authors.py b/api/pages/mail/top-authors.py deleted file mode 100644 index 52da07df..00000000 --- a/api/pages/mail/top-authors.py +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/top-authors -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of email authors -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of email authors -# -######################################################################## - - - - - -""" -This is the TopN committers list renderer for Kibble -""" - -import json -import time -import hashlib -import re - -ROBITS = r"(git|jira|jenkins|gerrit)@" - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Get top 25 committers this period - query['aggs'] = { - 'authors': { - 'terms': { - 'field': 'sender', - 'size': 30 - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - people = {} - for bucket in res['aggregations']['authors']['buckets']: - email = bucket['key'] - # By default, we want to see humans, not bots on this list! - if re.match(ROBITS, email): - continue - count = bucket['doc_count'] - sha = hashlib.sha1( ("%s%s" % (dOrg, email)).encode('utf-8') ).hexdigest() - if session.DB.ES.exists(index=session.DB.dbname,doc_type="person",id = sha): - pres = session.DB.ES.get( - index=session.DB.dbname, - doc_type="person", - id = sha - ) - person = pres['_source'] - person['name'] = person.get('name', 'unknown') - people[email] = person - people[email]['gravatar'] = hashlib.md5(person.get('email', 'unknown').encode('utf-8')).hexdigest() - people[email]['count'] = count - - topN = [] - for email, person in people.items(): - topN.append(person) - topN = sorted(topN, key = lambda x: x['count'], reverse = True) - - JSON_OUT = { - 'topN': { - 'denoter': 'emails', - 'items': topN - }, - 'sorted': people, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/top-topics.py b/api/pages/mail/top-topics.py deleted file mode 100644 index 9acda6cf..00000000 --- a/api/pages/mail/top-topics.py +++ /dev/null @@ -1,153 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/top-topics -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of email authors -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/CommitterList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows the top N of email authors -# -######################################################################## - - - - - -""" -This is the TopN committers list renderer for Kibble -""" - -import json -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - - interval = indata.get('interval', 'month') - - - #################################################################### - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - }, - 'sort': [{ - 'emails': 'desc' - }] - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="mailtop", - size = 25, - body = query - ) - - topN = [] - for bucket in res['hits']['hits']: - topN.append( { - 'source': bucket['_source']['sourceURL'], - 'name': bucket['_source']['subject'], - 'count': bucket['_source']['emails'] - }) - - JSON_OUT = { - 'topN': { - 'denoter': 'emails', - 'items': topN, - 'icon': 'envelope' - }, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/trends.py b/api/pages/mail/trends.py deleted file mode 100644 index ac0d5186..00000000 --- a/api/pages/mail/trends.py +++ /dev/null @@ -1,335 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/mail/trends -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a quick email trend summary of the past 6 months for your org -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a quick email trend summary of the past 6 months for your org -# -######################################################################## - - - - - -""" -This is the Email trends renderer for Kibble -""" - -import json -import time -import datetime - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - dOrg = session.user['defaultOrganisation'] or "apache" - - #################################################################### - # We start by doing all the queries for THIS period. # - # Then we reset the query, and change date to yonder-->from # - # and rerun the same queries. # - #################################################################### - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateFrom)), - 'to': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateTo)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'sender': indata.get('email')}}) - - - # Get number of threads and emails, this period - query['aggs'] = { - 'topics': { - 'sum': { - 'field': 'topics' - } - }, - 'emails': { - 'sum': { - 'field': 'emails' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="mailstats", - size = 0, - body = query - ) - no_topics = res['aggregations']['topics']['value'] - no_emails = res['aggregations']['emails']['value'] - - - # Authors - - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateFrom)), - 'to': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateTo)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'sender': indata.get('email')}}) - - # Get number of authors, this period - query['aggs'] = { - 'authors': { - 'cardinality': { - 'field': 'sender' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - no_authors = res['aggregations']['authors']['value'] - - - - #################################################################### - # Change to PRIOR SPAN # - #################################################################### - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateYonder)), - 'to': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateFrom-1)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'sender': indata.get('email')}}) - - - # Get number of threads and emails, this period - query['aggs'] = { - 'topics': { - 'sum': { - 'field': 'topics' - } - }, - 'emails': { - 'sum': { - 'field': 'emails' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="mailstats", - size = 0, - body = query - ) - no_topics_before = res['aggregations']['topics']['value'] - no_emails_before = res['aggregations']['emails']['value'] - - - # Authors - - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'date': { - 'from': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateYonder)), - 'to': time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(dateFrom-1)) - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - if indata.get('email'): - query['query']['bool']['must'].append({'term': {'sender': indata.get('email')}}) - - # Get number of authors, this period - query['aggs'] = { - 'authors': { - 'cardinality': { - 'field': 'sender' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - no_authors_before = res['aggregations']['authors']['value'] - - - - - trends = { - "authors": { - 'before': no_authors_before, - 'after': no_authors, - 'title': "People sending email this period" - }, - "topics": { - 'before': no_topics_before, - 'after': no_topics, - 'title': "Topics discussed this period" - }, - "email": { - 'before': no_emails_before, - 'after': no_emails, - 'title': "Emails sent this period" - } - } - - JSON_OUT = { - 'trends': trends, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/org/contributors.py b/api/pages/org/contributors.py deleted file mode 100644 index 9210dfff..00000000 --- a/api/pages/org/contributors.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/org/contributors -######################################################################## -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/contributorList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows contributors for the entire org or matching filters. -# -######################################################################## - - - - - -""" -This is the contributor list renderer for Kibble -""" - -import json -import time -import hashlib - -cached_people = {} # Store people we know, so we don't have to fetch them again. - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - # Fetch all contributors for the org - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - - # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) - elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - - # Date specific? - dateTo = indata.get('to', int(time.time())) - dateFrom = indata.get('from', dateTo - (86400*30*6)) # Default to a 6 month span - query['query']['bool']['must'].append( - {'range': - { - 'ts': { - 'from': dateFrom, - 'to': dateTo - } - } - } - ) - emails = [] - contribs = {} - - for field in ['sender', 'author_email', 'issueCreator', 'issueCloser']: - N = 0 - while N < 5: - query['aggs'] = { - 'by_id': { - 'terms': { - 'field': field, - 'size': 10000, - 'include': { - 'partition': N, - 'num_partitions': 5 - }, - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="*,-*_code_commit,-*_file_history", - size = 0, - body = query - ) - # Break if we've found nothing more - #if len(res['aggregations']['by_id']['buckets']) == 0: - #break - # otherwise, add 'em to the pile - for k in res['aggregations']['by_id']['buckets']: - if k['key'] not in emails: - emails.append(k['key']) - contribs[k['key']] = contribs.get(k['key'], 0) + k['doc_count'] - N += 1 - - people = [] - for email in emails: - pid = hashlib.sha1( ("%s%s" % (dOrg, email)).encode('ascii', errors='replace')).hexdigest() - person = None - if pid in cached_people: - person = cached_people[pid] - else: - try: - doc = session.DB.ES.get(index=session.DB.dbname, doc_type = 'person', id = pid) - cached_people[pid] = { - 'name': doc['_source']['name'], - 'email': doc['_source']['email'], - 'gravatar': hashlib.md5( email.encode('ascii', errors = 'replace')).hexdigest() - } - person = cached_people[pid] - except: - pass # Couldn't find 'em, booo - if person: - person['contributions'] = contribs.get(email, 0) - people.append(person) - - JSON_OUT = { - 'people': people, - 'okay': True - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/org/members.py b/api/pages/org/members.py deleted file mode 100644 index 3b58852c..00000000 --- a/api/pages/org/members.py +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/org/members -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/OrgMembers' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Lists the members of an organisation -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# description: Nothing... -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# type: array -# items: -# $ref: '#/components/schemas/OrgMembers' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Lists the members of an organisation -# put: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/UserAccountEdit' -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Invite a person to an organisation -# delete: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/UserAccountEdit' -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Remove a person from an organisation -# -######################################################################## - - - - - -""" -This is the Org list renderer for Kibble -""" - -import json -import time -import hashlib - -def canInvite(session): - """ Determine if the user can edit sources in this org """ - if session.user['userlevel'] == 'admin': - return True - - dOrg = session.user['defaultOrganisation'] or "apache" - if session.DB.ES.exists(index=session.DB.dbname, doc_type="organisation", id= dOrg): - xorg = session.DB.ES.get(index=session.DB.dbname, doc_type="organisation", id= dOrg)['_source'] - if session.user['email'] in xorg['admins']: - return True - - -def run(API, environ, indata, session): - now = time.time() - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint!") - - method = environ['REQUEST_METHOD'] - - ################################################# - # Inviting a new member? # - ################################################# - if method == "PUT": - if canInvite(session): - newmember = indata.get('email') - isadmin = indata.get('admin', False) - orgid = session.user['defaultOrganisation'] or "apache" - # Make sure the org exists - if not session.DB.ES.exists(index=session.DB.dbname, doc_type='organisation', id = orgid): - raise API.exception(403, "No such organisation!") - - # make sure the user account exists - if not session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = newmember): - raise API.exception(403, "No such user!") - - # Modify user account - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='useraccount', id = newmember) - if orgid not in doc['_source']['organisations']: # No duplicates, please - doc['_source']['organisations'].append(orgid) - session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = newmember, body = doc['_source']) - - - # Get org doc from ES - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='organisation', id = orgid) - if isadmin: - if newmember not in doc['_source']['admins']: - doc['_source']['admins'].append(newmember) - # Override old doc - session.DB.ES.index(index=session.DB.dbname, doc_type='organisation', id = orgid, body = doc['_source']) - time.sleep(1) # Bleh!! - - # If an admin, and not us, and reinvited, we purge the admin bit - elif newmember in doc['_source']['admins']: - if newmember == session.user['email']: - raise API.exception(403, "You can't remove yourself from an organisation.") - doc['_source']['admins'].remove(newmember) - # Override old doc - session.DB.ES.index(index=session.DB.dbname, doc_type='organisation', id = orgid, body = doc['_source']) - time.sleep(1) # Bleh!! - yield json.dumps({"okay": True, "message": "Member invited!!"}) - - return - else: - raise API.exception(403, "Only administrators or organisation owners can invite new members.") - - ################################################# - # DELETE: Remove a member # - ################################################# - if method == "DELETE": - if canInvite(session): - memberid = indata.get('email') - isadmin = indata.get('admin', False) - orgid = session.user['defaultOrganisation'] or "apache" - - # We can't remove ourselves! - if memberid == session.user['email']: - raise API.exception(403, "You can't remove yourself from an organisation.") - - # Make sure the org exists - if not session.DB.ES.exists(index=session.DB.dbname, doc_type='organisation', id = orgid): - raise API.exception(403, "No such organisation!") - - # make sure the user account exists - if not session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = memberid): - raise API.exception(403, "No such user!") - - # Modify user account - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='useraccount', id = memberid) - if orgid in doc['_source']['organisations']: # No duplicates, please - doc['_source']['organisations'].remove(orgid) - session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = memberid, body = doc['_source']) - - # Check is user is admin and remove if so - # Get org doc from ES - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='organisation', id = orgid) - if memberid in doc['_source']['admins']: - doc['_source']['admins'].remove(memberid) - # Override old doc - session.DB.ES.index(index=session.DB.dbname, doc_type='organisation', id = orgid, body = doc['_source']) - time.sleep(1) # Bleh!! - - yield json.dumps({"okay": True, "message": "Member removed!"}) - return - else: - raise API.exception(403, "Only administrators or organisation owners can invite new members.") - - - ################################################# - # GET/POST: Display members # - ################################################# - if method in ["GET", "POST"]: - orgid = session.user['defaultOrganisation'] or "apache" - if not session.DB.ES.exists(index=session.DB.dbname, doc_type='organisation', id = orgid): - raise API.exception(403, "No such organisation!") - - # Only admins should be able to view this! - if not canInvite(session): - raise API.exception(403, "Only organisation owners can view this list.") - - # Find everyone affiliated with this org - query = { - 'query': { - 'bool': { - 'must': [ - { - 'term': { - 'organisations': orgid - } - } - ] - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="useraccount", - size = 5000, # TO-DO: make this a scroll?? - body = query - ) - members = [] - for doc in res['hits']['hits']: - members.append(doc['_id']) - - # Get org doc from ES - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='organisation', id = orgid) - JSON_OUT = { - 'members': members, - 'admins': doc['_source']['admins'], - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/org/trends.py b/api/pages/org/trends.py deleted file mode 100644 index d0188a41..00000000 --- a/api/pages/org/trends.py +++ /dev/null @@ -1,220 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/org/trends -######################################################################## -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Trend' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a quick trend summary of the past 6 months for your org -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Sloc' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Shows a quick trend summary of the past 6 months for your org -# -######################################################################## - - - - - -""" -This is the org trend renderer for Kibble -""" - -import json -import time - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - now = time.time() - - # First, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - if session.DB.ES.exists(index=session.DB.dbname, doc_type="view", id = indata['view']): - view = session.DB.ES.get(index=session.DB.dbname, doc_type="view", id = indata['view']) - viewList = view['_source']['sourceList'] - - dateTo = int(time.time()) - dateFrom = dateTo - (86400*30*3) # Default to a quarter - if dateFrom < 0: - dateFrom = 0 - dateYonder = dateFrom - (dateTo - dateFrom) - - - - #################################################################### - # We start by doing all the queries for THIS period. # - # Then we reset the query, and change date to yonder-->from # - # and rerun the same queries. # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "kibbledemo" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateFrom, - 'to': dateTo - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - - # Get number of commits, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - no_commits = res['count'] - - - # Get number of committers, this period - query['aggs'] = { - 'authors': { - 'cardinality': { - 'field': 'author_email' - } - } - - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - no_authors = res['aggregations']['authors']['value'] - - - #################################################################### - # Change to PRIOR SPAN # - #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" - query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'tsday': { - 'from': dateYonder, - 'to': dateFrom-1 - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - - # Get number of commits, this period - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="code_commit", - body = query - ) - no_commits_before = res['count'] - - # Get number of committers, this period - query['aggs'] = { - 'authors': { - 'cardinality': { - 'field': 'author_email' - } - } - } - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="code_commit", - size = 0, - body = query - ) - no_authors_before = res['aggregations']['authors']['value'] - - - trends = { - "authors": { - 'before': no_authors_before, - 'after': no_authors, - 'title': "Contributors this quarter" - }, - 'commits': { - 'before': no_commits_before, - 'after': no_commits, - 'title': "Commits this quarter" - } - } - - JSON_OUT = { - 'trends': trends, - 'okay': True, - 'responseTime': time.time() - now - } - yield json.dumps(JSON_OUT) diff --git a/api/pages/sources.py b/api/pages/sources.py deleted file mode 100644 index 0a46756b..00000000 --- a/api/pages/sources.py +++ /dev/null @@ -1,283 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/sources -######################################################################## -# delete: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/SourceID' -# description: Source ID info -# required: true -# security: -# - cookieAuth: [] -# summary: Delete an existing source -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/SourceList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Fetches a list of all sources for this organisation -# patch: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Source' -# description: New source data to set -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Edit an existing source -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/SourceList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Fetches a list of all sources for this organisation -# put: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/SourceListAdd' -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Add a new source -# -######################################################################## - - - - - -""" -This is the source list handler for Kibble -""" - -import json -import re -import time -import hashlib -import yaml - -def canModifySource(session): - """ Determine if the user can edit sources in this org """ - - dOrg = session.user['defaultOrganisation'] or "apache" - if session.DB.ES.exists(index=session.DB.dbname, doc_type="organisation", id= dOrg): - xorg = session.DB.ES.get(index=session.DB.dbname, doc_type="organisation", id= dOrg)['_source'] - if session.user['email'] in xorg['admins']: - return True - if session.user['userlevel'] == 'admin': - return True - return False - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - method = environ['REQUEST_METHOD'] - dOrg = session.user['defaultOrganisation'] - - if method in ['GET', 'POST']: - # Fetch organisation data - - # Make sure we have a default/current org set - if 'defaultOrganisation' not in session.user or not session.user['defaultOrganisation']: - raise API.exception(400, "You must specify an organisation as default/current in order to add sources.") - - if session.DB.ES.exists(index=session.DB.dbname, doc_type="organisation", id= dOrg): - org = session.DB.ES.get(index=session.DB.dbname, doc_type="organisation", id= dOrg)['_source'] - del org['admins'] - else: - raise API.exception(404, "No such organisation, '%s'" % (dOrg or "(None)")) - - sourceTypes = indata.get('types', []) - # Fetch all sources for default org - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="source", - size = 5000, - body = { - 'query': { - 'term': { - 'organisation': dOrg - } - } - } - ) - - # Secondly, fetch the view if we have such a thing enabled - viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter') and indata.get('quick'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - sources = [] - for hit in res['hits']['hits']: - doc = hit['_source'] - if viewList and not doc['sourceID'] in viewList: - continue - if sourceTypes and not doc['type'] in sourceTypes: - continue - if indata.get('quick'): - xdoc = { - 'sourceID': doc['sourceID'], - 'type': doc['type'], - 'sourceURL': doc['sourceURL'] - } - sources.append(xdoc) - else: - # Creds should be anonymous here - if 'creds' in doc: - del doc['creds'] - sources.append(doc) - - JSON_OUT = { - 'sources': sources, - 'okay': True, - 'organisation': org - } - yield json.dumps(JSON_OUT) - return - - # Add one or more sources - if method == "PUT": - if canModifySource(session): - new = 0 - old = 0 - stypes = yaml.load(open("yaml/sourcetypes.yaml")) - for source in indata.get('sources', []): - sourceURL = source['sourceURL'] - sourceType = source['type'] - creds = {} - if not sourceType in stypes: - raise API.exception(400, "Attempt to add unknown source type!") - if 'optauth' in stypes[sourceType]: - for el in stypes[sourceType]['optauth']: - if el in source and len(source[el]) > 0: - creds[el] = source[el] - sourceID = hashlib.sha224( ("%s-%s" % (sourceType, sourceURL)).encode('utf-8') ).hexdigest() - - # Make sure we have a default/current org set - if 'defaultOrganisation' not in session.user or not session.user['defaultOrganisation']: - raise API.exception(400, "You must first specify an organisation as default/current in order to add sources.") - - doc = { - 'organisation': dOrg, - 'sourceURL': sourceURL, - 'sourceID': sourceID, - 'type': sourceType, - 'creds': creds, - 'steps': {} - } - if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = sourceID): - old += 1 - else: - new += 1 - session.DB.ES.index(index=session.DB.dbname, doc_type="source", id = sourceID, body = doc) - yield json.dumps({ - "message": "Sources added/updated", - "added": new, - "updated": old - }) - else: - raise API.exception(403, "You don't have permission to add sources to this organisation.") - - # Delete a source - if method == "DELETE": - if canModifySource(session): - sourceID = indata.get('id') - if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id = sourceID): - # Delete all data pertainig to this source - # For ES >= 6.x, use a glob for removing from all indices - if session.DB.ESversion > 5: - session.DB.ES.delete_by_query(index=session.DB.dbname+'_*', body = {'query': {'match': {'sourceID': sourceID}}}) - else: - # For ES <= 5.x, just remove from the main index - session.DB.ES.delete_by_query(index=session.DB.dbname, body = {'query': {'match': {'sourceID': sourceID}}}) - yield json.dumps({'message': "Source deleted"}) - else: - raise API.exception(404, "No such source item") - else: - raise API.exception(403, "You don't have permission to delete this source.") - - # Edit a source - if method == "PATCH": - pass diff --git a/api/pages/verify.py b/api/pages/verify.py deleted file mode 100644 index 0b4d7071..00000000 --- a/api/pages/verify.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/verify/{email}/{vcode} -######################################################################## -# get: -# summary: Verify an account -# parameters: -# - name: email -# in: path -# description: Email address of account -# required: true -# schema: -# type: string -# - name: vcode -# in: path -# description: Verification code -# required: true -# schema: -# type: string -# responses: -# '200': -# description: 200 Response -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# default: -# description: unexpected error -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# -######################################################################## - - - - - -""" -This is the user account verifier for Kibble. -""" - - -def run(API, environ, indata, session): - - # Get vocde, make sure it's 40 chars - vcode = indata.get('vcode') - if len(vcode) != 40: - raise API.exception(400, "Invalid verification code!") - - # Find the account with this vcode - email = indata.get('email') - if len(email) < 7: - raise API.exception(400, "Invalid email address presented.") - - if session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = email): - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='useraccount', id = email) - # Do the codes match?? - if doc['_source']['vcode'] == vcode: - doc['_source']['verified'] = True - # Save account as verified - session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = email, body = doc['_source']) - yield("Your account has been verified, you can now log in!") - else: - raise API.exception(404, "Invalid verification code presented!") - else: - raise API.exception(404, "Invalid verification code presented!") # Don't give away if such a user exists, pssst - \ No newline at end of file diff --git a/api/pages/views.py b/api/pages/views.py deleted file mode 100644 index 5898e110..00000000 --- a/api/pages/views.py +++ /dev/null @@ -1,306 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -######################################################################## -# OPENAPI-URI: /api/views -######################################################################## -# delete: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/editView' -# description: View to delete -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Delete a new view -# get: -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ViewList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Fetches a list of all views (filters) for this user -# patch: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/editView' -# description: New source data to set -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Edit an existing source -# post: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/defaultWidgetArgs' -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ViewList' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Fetches a list of all views (filters) for this user -# put: -# requestBody: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/editView' -# description: New view data to add -# required: true -# responses: -# '200': -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/ActionCompleted' -# description: 200 Response -# default: -# content: -# application/json: -# schema: -# $ref: '#/components/schemas/Error' -# description: unexpected error -# security: -# - cookieAuth: [] -# summary: Add a new view -# -######################################################################## - - - - - -""" -This is the views (filters) list handler for Kibble -""" - -import json -import re -import time -import hashlib - -def run(API, environ, indata, session): - - # We need to be logged in for this! - if not session.user: - raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - method = environ['REQUEST_METHOD'] - dOrg = session.user['defaultOrganisation'] or "apache" - - # Are we adding a view? - if method == 'PUT': - viewID = hashlib.sha224( ("%s-%s-%s" % (time.time(), session.user['email'], dOrg) ).encode('utf-8') ).hexdigest() - sources = indata.get('sources', []) - name = indata.get('name', "unknown view") - public = indata.get('public', False) - if public: - if not (session.user['userlevel'] == 'admin' or dOrg in session.user['ownerships']): - raise API.exception(403, "Only owners of an organisation may create public views.") - doc = { - 'id': viewID, - 'email': session.user['email'], - 'organisation': dOrg, - 'sourceList': sources, - 'name': name, - 'created': int(time.time()), - 'publicView': public - } - session.DB.ES.index(index=session.DB.dbname, doc_type="view", id = viewID, body = doc) - yield json.dumps({'okay': True, 'message': "View created"}) - - # Are we editing (patching) a view? - if method == 'PATCH': - viewID = indata.get('id') - if viewID and session.DB.ES.exists(index=session.DB.dbname, doc_type="view", id = viewID): - doc = session.DB.ES.get(index=session.DB.dbname, doc_type="view", id = viewID) - if session.user['userlevel'] == 'admin' or doc['_source']['email'] == session.user['email']: - sources = indata.get('sources', []) - doc['_source']['sourceList'] = sources - session.DB.ES.index(index=session.DB.dbname, doc_type="view", id = viewID, body = doc['_source']) - yield json.dumps({'okay': True, 'message': "View updated"}) - else: - raise API.exception(403, "You don't own this view, and cannot edit it.") - else: - raise API.exception(404, "We couldn't find a view with this ID.") - - # Removing a view? - if method == 'DELETE': - viewID = indata.get('id') - if viewID and session.DB.ES.exists(index=session.DB.dbname, doc_type="view", id = viewID): - doc = session.DB.ES.get(index=session.DB.dbname, doc_type="view", id = viewID) - if session.user['userlevel'] == 'admin' or doc['_source']['email'] == session.user['email']: - session.DB.ES.delete(index=session.DB.dbname, doc_type="view", id = viewID) - yield json.dumps({'okay': True, 'message': "View deleted"}) - else: - raise API.exception(403, "You don't own this view, and cannot delete it.") - else: - raise API.exception(404, "We couldn't find a view with this ID.") - - - if method in ['GET', 'POST']: - # Fetch all views for default org - - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="view", - size = 5000, - body = { - 'query': { - 'term': { - 'email': session.user['email'] - } - } - } - ) - - - # Are we looking at someone elses view? - if indata.get('view'): - viewID = indata.get('view') - if session.DB.ES.exists(index=session.DB.dbname, doc_type="view", id = viewID): - blob = session.DB.ES.get(index=session.DB.dbname, doc_type="view", id = viewID) - if blob['_source']['email'] != session.user['email'] and not blob['_source']['publicView']: - blob['_source']['name'] += " (shared by " + blob['_source']['email'] + ")" - res['hits']['hits'].append(blob) - sources = [] - - # Include public views?? - if not indata.get('sources', False): - pres = session.DB.ES.search( - index=session.DB.dbname, - doc_type="view", - size = 5000, - body = { - 'query': { - 'bool': { - 'must': [ - {'term': - { - 'publicView': True - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } - } - ) - for hit in pres['hits']['hits']: - if hit['_source']['email'] != session.user['email']: - hit['_source']['name'] += " (shared view)" - res['hits']['hits'].append(hit) - - for hit in res['hits']['hits']: - doc = hit['_source'] - if doc['organisation'] != dOrg: - continue - if indata.get('quick'): - xdoc = { - 'id': doc['id'], - 'name': doc['name'], - 'organisation': doc['organisation'] - } - sources.append(xdoc) - else: - sources.append(doc) - - allsources = [] - if indata.get('sources', False): - res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="source", - size = 5000, - body = { - 'query': { - 'term': { - 'organisation': dOrg - } - } - } - ) - for zdoc in res['hits']['hits']: - doc = zdoc['_source'] - xdoc = { - 'sourceID': doc['sourceID'], - 'type': doc['type'], - 'sourceURL': doc['sourceURL'] - } - allsources.append(xdoc) - - JSON_OUT = { - 'views': sources, - 'sources': allsources, - 'okay': True, - 'organisation': dOrg - } - yield json.dumps(JSON_OUT) diff --git a/api/plugins/database.py b/api/plugins/database.py deleted file mode 100644 index 80b94dd1..00000000 --- a/api/plugins/database.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -This is the ES library for Apache Kibble. -It stores the elasticsearch handler and config options. -""" - - -# Main imports -import cgi -import re -#import aaa -import elasticsearch - -class KibbleESWrapper(object): - """ - Class for rewriting old-style queries to the new ones, - where doc_type is an integral part of the DB name - """ - def __init__(self, ES): - self.ES = ES - - def get(self, index, doc_type, id): - return self.ES.get(index = index+'_'+doc_type, doc_type = '_doc', id = id) - def exists(self, index, doc_type, id): - return self.ES.exists(index = index+'_'+doc_type, doc_type = '_doc', id = id) - def delete(self, index, doc_type, id): - return self.ES.delete(index = index+'_'+doc_type, doc_type = '_doc', id = id) - def index(self, index, doc_type, id, body): - return self.ES.index(index = index+'_'+doc_type, doc_type = '_doc', id = id, body = body) - def update(self, index, doc_type, id, body): - return self.ES.update(index = index+'_'+doc_type, doc_type = '_doc', id = id, body = body) - def scroll(self, scroll_id, scroll): - return self.ES.scroll(scroll_id = scroll_id, scroll = scroll) - def delete_by_query(self, **kwargs): - return self.ES.delete_by_query(**kwargs) - def search(self, index, doc_type, size = 100, scroll = None, _source_include = None, body = None): - return self.ES.search( - index = index+'_'+doc_type, - doc_type = '_doc', - size = size, - scroll = scroll, - _source_include = _source_include, - body = body - ) - def count(self, index, doc_type = '*', body = None): - return self.ES.count( - index = index+'_'+doc_type, - doc_type = '_doc', - body = body - ) - -class KibbleESWrapperSeven(object): - """ - Class for rewriting old-style queries to the >= 7.x ones, - where doc_type is an integral part of the DB name and NO DOC_TYPE! - """ - def __init__(self, ES): - self.ES = ES - - def get(self, index, doc_type, id): - return self.ES.get(index = index+'_'+doc_type, id = id) - def exists(self, index, doc_type, id): - return self.ES.exists(index = index+'_'+doc_type, id = id) - def delete(self, index, doc_type, id): - return self.ES.delete(index = index+'_'+doc_type, id = id) - def index(self, index, doc_type, id, body): - return self.ES.index(index = index+'_'+doc_type, id = id, body = body) - def update(self, index, doc_type, id, body): - return self.ES.update(index = index+'_'+doc_type, id = id, body = body) - def scroll(self, scroll_id, scroll): - return self.ES.scroll(scroll_id = scroll_id, scroll = scroll) - def delete_by_query(self, **kwargs): - return self.ES.delete_by_query(**kwargs) - def search(self, index, doc_type, size = 100, scroll = None, _source_include = None, body = None): - return self.ES.search( - index = index+'_'+doc_type, - size = size, - scroll = scroll, - _source_includes = _source_include, - body = body - ) - def count(self, index, doc_type = '*', body = None): - return self.ES.count( - index = index+'_'+doc_type, - body = body - ) - - -class KibbleDatabase(object): - def __init__(self, config): - self.config = config - self.dbname = config['elasticsearch']['dbname'] - self.ES = elasticsearch.Elasticsearch([{ - 'host': config['elasticsearch']['host'], - 'port': int(config['elasticsearch']['port']), - 'use_ssl': config['elasticsearch']['ssl'], - 'verify_certs': False, - 'url_prefix': config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '', - 'http_auth': config['elasticsearch']['auth'] if 'auth' in config['elasticsearch'] else None - }], - max_retries=5, - retry_on_timeout=True - ) - - # IMPORTANT BIT: Figure out if this is ES < 6.x, 6.x or >= 7.x. - # If so, we're using the new ES DB mappings, and need to adjust ALL - # ES calls to match this. - self.ESversion = int(self.ES.info()['version']['number'].split('.')[0]) - if self.ESversion >= 7: - self.ES = KibbleESWrapperSeven(self.ES) - elif self.ESVersion >= 6: - self.ES = KibbleESWrapper(self.ES) - diff --git a/api/plugins/openapi.py b/api/plugins/openapi.py deleted file mode 100644 index ba6153a5..00000000 --- a/api/plugins/openapi.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -This is the OpenAPI validator library. -Validates input using the OpenAPI specification version 3 from -https://github.com/OAI/OpenAPI-Specification (a simplified version, ahem) -""" - -import yaml -import json -import functools -import operator -import re - -class OpenAPIException(Exception): - def __init__(self, message): - self.message = message - -# Python type names to JSON type names -py2JSON = { - 'int': 'integer', - 'float': 'float', - 'str': 'string', - 'list': 'array', - 'dict': 'object', - 'bool': 'boolean' -} - -mcolors = { - 'PUT': '#fca130', - 'DELETE': '#f93e3e', - 'GET': '#61affe', - 'POST': '#49cc5c', - 'PATCH': '#d5a37e' -} - -class OpenAPI(): - def __init__(self, APIFile): - """ Instantiates an OpenAPI validator given a YAML specification""" - if APIFile.endswith(".json") or APIFile.endswith(".js"): - self.API = json.load(open(APIFile)) - else: - self.API = yaml.load(open(APIFile)) - - def validateType(self, field, value, ftype): - """ Validate a single field value against an expected type """ - - # Get type of value, convert to JSON name of type. - pyType = type(value).__name__ - jsonType = py2JSON[pyType] if pyType in py2JSON else pyType - - # Check if type matches - if ftype != jsonType: - raise OpenAPIException("OpenAPI mismatch: Field '%s' was expected to be %s, but was really %s!" % (field, ftype, jsonType)) - - def validateSchema(self, pdef, formdata, schema = None): - """ Validate (sub)parameters against OpenAPI specs """ - - # allOf: list of schemas to validate against - if 'allOf' in pdef: - for subdef in pdef['allOf']: - self.validateSchema(subdef, formdata) - - where = "JSON body" - # Symbolic link?? - if 'schema' in pdef: - schema = pdef['schema']['$ref'] - if '$ref' in pdef: - schema = pdef['$ref'] - if schema: - # #/foo/bar/baz --> dict['foo']['bar']['baz'] - pdef = functools.reduce(operator.getitem, schema.split('/')[1:], self.API) - where = "item matching schema %s" % schema - - # Check that all required fields are present - if 'required' in pdef: - for field in pdef['required']: - if not field in formdata: - raise OpenAPIException("OpenAPI mismatch: Missing input field '%s' in %s!" % (field, where)) - - # Now check for valid format of input data - for field in formdata: - if 'properties' not in pdef or field not in pdef['properties'] : - raise OpenAPIException("Unknown input field '%s' in %s!" % (field, where)) - if 'type' not in pdef['properties'][field]: - raise OpenAPIException("OpenAPI mismatch: Field '%s' was found in api.yaml, but no format was specified in specs!" % field) - ftype = pdef['properties'][field]['type'] - self.validateType(field, formdata[field], ftype) - - # Validate sub-arrays - if ftype == 'array' and 'items' in pdef['properties'][field]: - for item in formdata[field]: - if '$ref' in pdef['properties'][field]['items']: - self.validateSchema(pdef['properties'][field]['items'], item) - else: - self.validateType(field, formdata[field], pdef['properties'][field]['items']['type']) - - # Validate sub-hashes - if ftype == 'hash' and 'schema' in pdef['properties'][field]: - self.validateSchema(pdef['properties'][field], formdata[field]) - def validateParameters(self, defs, formdata): - # - pass - - def validate(self, method = "GET", path = "/foo", formdata = None): - """ Validate the request method and input data against the OpenAPI specification """ - - # Make sure we're not dealing with a dynamic URL. - # If we find /foo/{key}, we fold that into the form data - # and process as if it's a json input field for now. - if not self.API['paths'].get(path): - for xpath in self.API['paths']: - pathRE = re.sub(r"\{(.+?)\}", r"(?P<\1>[^/]+)", xpath) - m = re.match(pathRE, path) - if m: - for k,v in m.groupdict().items(): - formdata[k] = v - path = xpath - break - - if self.API['paths'].get(path): - defs = self.API['paths'].get(path) - method = method.lower() - if method in defs: - mdefs = defs[method] - if formdata and 'parameters' in mdefs: - self.validateParameters(mdefs['parameters'], formdata) - elif formdata and 'requestBody' not in mdefs: - raise OpenAPIException("OpenAPI mismatch: JSON data is now allowed for this request type") - elif formdata and 'requestBody' in mdefs and 'content' in mdefs['requestBody']: - - # SHORTCUT: We only care about JSON input for Kibble! Disregard other types - if not 'application/json' in mdefs['requestBody']['content']: - raise OpenAPIException ("OpenAPI mismatch: API endpoint accepts input, but no application/json definitions found in api.yaml!") - jdefs = mdefs['requestBody']['content']['application/json'] - - # Check that required params are here - self.validateSchema(jdefs, formdata) - - else: - raise OpenAPIException ("OpenAPI mismatch: Method %s is not registered for this API" % method) - else: - raise OpenAPIException("OpenAPI mismatch: Unknown API path '%s'!" % path) - - def dumpExamples(self, pdef, array = False): - schema = None - if 'schema' in pdef: - if 'type' in pdef['schema'] and pdef['schema']['type'] == 'array': - array = True - schema = pdef['schema']['items']['$ref'] - else: - schema = pdef['schema']['$ref'] - if '$ref' in pdef: - schema = pdef['$ref'] - if schema: - # #/foo/bar/baz --> dict['foo']['bar']['baz'] - pdef = functools.reduce(operator.getitem, schema.split('/')[1:], self.API) - js = {} - desc = {} - if 'properties' in pdef: - for k, v in pdef['properties'].items(): - if 'description' in v: - desc[k] = [v['type'], v['description']] - if 'example' in v: - js[k] = v['example'] - elif 'items' in v: - if v['type'] == 'array': - js[k], foo = self.dumpExamples(v['items'], True) - else: - js[k], foo = self.dumpExamples(v['items']) - return [js if not array else [js], desc] - - def toHTML(self): - """ Blurps out the specs in a pretty HTML blob """ - print(""" - - - - - -""") - li = "

Overview:

" - print(li) - for path, spec in sorted(self.API['paths'].items()): - for method, mspec in sorted(spec.items()): - method = method.upper() - summary = mspec.get('summary', 'No summary available') - resp = "" - inp = "" - inpvars = "" - linkname = "%s%s" % (method.lower(), path.replace('/', '-')) - if 'responses' in mspec: - for code, cresp in sorted(mspec['responses'].items()): - for ctype, pdef in cresp['content'].items(): - xjs, desc = self.dumpExamples(pdef) - js = json.dumps(xjs, indent = 4) - resp += "
%s:\n%s
\n
\n" % (code, js) - - if 'requestBody' in mspec: - for ctype, pdef in mspec['requestBody']['content'].items(): - xjs, desc = self.dumpExamples(pdef) - if desc: - for k, v in desc.items(): - inpvars += "%s: (%s) %s
\n" % (k, v[0], v[1]) - js = json.dumps(xjs, indent = 4) - inp += "

Input examples:

%s:\n%s
\n
" % (ctype, js) - - if inpvars: - inpvars = "
%s
\n
" % inpvars - - - print(""" -
-
- - -
%s
- - - %s -
- %s
-
-

JSON parameters:

- %s -
- %s -
-
-

Response examples:

-
%s
-
-
-
- """ % (linkname, mcolors[method], mcolors[method], mcolors[method], method, path, summary, "block" if inp else "none", inpvars, inp, resp)) - #print("%s %s: %s" % (method.upper(), path, mspec['summary'])) - print("") \ No newline at end of file diff --git a/api/plugins/session.py b/api/plugins/session.py deleted file mode 100644 index 68614e13..00000000 --- a/api/plugins/session.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -This is the session library for Apache Kibble. -It handles setting/getting cookies and user prefs -""" - - -# Main imports -import cgi -import re -import sys -import traceback -import http.cookies -import uuid -import elasticsearch -import time - -class KibbleSession(object): - - def getView(self, viewID): - if self.DB.ES.exists(index=self.DB.dbname, doc_type="view", id = viewID): - view = self.DB.ES.get(index=self.DB.dbname, doc_type="view", id = viewID) - return view['_source']['sourceList'] - return [] - - def subFilter(self, subfilter, view = []): - if len(subfilter) == 0: - return view - dOrg = self.user['defaultOrganisation'] or "apache" - res = self.DB.ES.search( - index=self.DB.dbname, - doc_type="source", - size = 10000, - _source_include = ['sourceURL', 'sourceID'], - body = { - 'query': { - 'bool': { - 'must': [ - {'term': { - 'organisation': dOrg - } - }] - } - - } - } - ) - sources = [] - for doc in res['hits']['hits']: - sid = doc['_source']['sourceID'] - m = re.search(subfilter, doc['_source']['sourceURL'], re.IGNORECASE) - if m and ((not view) or (sid in view)): - sources.append(sid) - if not sources: - sources = ['x'] # blank return to not show eeeeverything - return sources - - def subType(self, stype, view = []): - if len(stype) == 0: - return view - if type(stype) is str: - stype = [stype] - dOrg = self.user['defaultOrganisation'] or "apache" - res = self.DB.ES.search( - index=self.DB.dbname, - doc_type="source", - size = 10000, - _source_include = ['sourceURL', 'sourceID', 'type'], - body = { - 'query': { - 'bool': { - 'must': [ - {'term': { - 'organisation': dOrg - } - }, - {'terms': { - 'type': stype - } - } - ] - } - - } - } - ) - sources = [] - for doc in res['hits']['hits']: - sid = doc['_source']['sourceID'] - m = doc['_source']['type'] in stype - if m and ((not view) or (sid in view)): - sources.append(sid) - if not sources: - sources = ['x'] # blank return to not show eeeeverything - return sources - - def logout(self): - """Log out user and wipe cookie""" - if self.user and self.cookie: - cookies = http.cookies.SimpleCookie() - cookies['kibble_session'] = "null" - self.headers.append(('Set-Cookie', cookies['kibble_session'].OutputString())) - try: - self.DB.ES.delete(index=self.DB.dbname, doc_type='uisession', id = self.cookie) - self.cookie = None - self.user = None - except: - pass - def newCookie(self): - cookie = uuid.uuid4() - cookies = http.cookies.SimpleCookie() - cookies['kibble_session'] = cookie - cookies['kibble_session']['expires'] = 86400 * 365 # Expire one year from now - self.headers.append(('Set-Cookie', cookies['kibble_session'].OutputString())) - def __init__(self, DB, environ, config): - """ - Loads the current user session or initiates a new session if - none was found. - """ - self.config = config - self.user = None - self.DB = DB - self.headers = [('Content-Type', 'application/json; charset=utf-8')] - self.cookie = None - - # Construct the URL we're visiting - self.url = "%s://%s" % (environ['wsgi.url_scheme'], environ.get('HTTP_HOST', environ.get('SERVER_NAME'))) - self.url += environ.get('SCRIPT_NAME', '/') - - # Get Kibble cookie - cookie = None - cookies = None - if 'HTTP_KIBBLE_TOKEN' in environ: - token = environ.get('HTTP_KIBBLE_TOKEN') - if re.match(r"^[-a-f0-9]+$", token): # Validate token, must follow UUID4 specs - res = self.DB.ES.search(index=self.DB.dbname, doc_type='useraccount', body = {"query": { "match": { "token": token}}}) - if res['hits']['hits']: - self.user = res['hits']['hits'][0]['_source'] - self.newCookie() - else: - if 'HTTP_COOKIE' in environ: - cookies = http.cookies.SimpleCookie(environ['HTTP_COOKIE']) - if cookies and 'kibble_session' in cookies: - cookie = cookies['kibble_session'].value - try: - if re.match(r"^[-a-f0-9]+$", cookie): # Validate cookie, must follow UUID4 specs - doc = None - sdoc = self.DB.ES.get(index=self.DB.dbname, doc_type='uisession', id = cookie) - if sdoc and 'cid' in sdoc['_source']: - doc = self.DB.ES.get(index=self.DB.dbname, doc_type='useraccount', id = sdoc['_source']['cid']) - if doc and '_source' in doc: - # Make sure this cookie has been used in the past 7 days, else nullify it. - # Further more, run an update of the session if >1 hour ago since last update. - age = time.time() - sdoc['_source']['timestamp'] - if age > (7*86400): - self.DB.ES.delete(index=self.DB.dbname, doc_type='uisession', id = cookie) - sdoc['_source'] = None # Wipe it! - doc = None - elif age > 3600: - sdoc['_source']['timestamp'] = int(time.time()) # Update timestamp in session DB - self.DB.ES.update(index=self.DB.dbname, doc_type='uisession', id = cookie, body = {'doc':sdoc['_source']}) - if doc: - self.user = doc['_source'] - else: - cookie = None - except Exception as err: - print(err) - if not cookie: - self.newCookie() - self.cookie = cookie - \ No newline at end of file diff --git a/docker-compose-dev.yaml b/docker-compose-dev.yaml new file mode 100644 index 00000000..e4cbb0d4 --- /dev/null +++ b/docker-compose-dev.yaml @@ -0,0 +1,86 @@ +version: '3' + +networks: + kibble: + driver: bridge + +services: + # Helper service to setup the Apache Kibble es node + setup: + image: &img apache/kibble + build: + context: . + dockerfile: Dockerfile.dev + command: bash -c "kibble setup --autoadmin --skiponexist" + volumes: + - .:/kibble/ + depends_on: + - elasticsearch + networks: + - kibble + + # Apache Kibble API server + kibble: + image: *img + command: bash -c "gunicorn --reload -w 1 -b 0.0.0.0:8001 kibble.api.handler:application" + expose: + - 8001 + ports: + - 8001:8001 + volumes: + - .:/kibble/ + depends_on: + - elasticsearch + networks: + - kibble + + # Apache Kibble web ui server + ui: + image: nginx:latest + volumes: + - ./nginx-dev.conf:/etc/nginx/nginx.conf + - ./ui/:/kibble/ui/ + ports: + - 8000:8000 + depends_on: + - kibble + networks: + - kibble + + # Elasticsearch node required as a database for Apache Kibble + elasticsearch: + image: elasticsearch:7.9.2 + ports: + - 9200:9200 + - 9300:9300 + environment: + node.name: es01 + discovery.seed_hosts: es02 + cluster.initial_master_nodes: es01 + cluster.name: kibble + ES_JAVA_OPTS: -Xms256m -Xmx256m + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: + - "kibble-es-data:/usr/share/elasticsearch/data" + networks: + - kibble + + # Kibana to view and manage Elasticsearch + kibana: + image: kibana:7.9.3 + ports: + - 5601:5601 + depends_on: + - elasticsearch + environment: + ELASTICSEARCH_URL: http://elasticsearch:9200 + ELASTICSEARCH_HOSTS: http://elasticsearch:9200 + networks: + - kibble + +volumes: + # named volumes can be managed easier using docker-compose + kibble-es-data: diff --git a/docs/Makefile b/docs/Makefile index dd5b2a84..bacc19da 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -17,4 +17,4 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/source/_static/images/kibble-architecture.puml b/docs/source/_static/images/kibble-architecture.puml index 4a924faa..7b21b209 100644 --- a/docs/source/_static/images/kibble-architecture.puml +++ b/docs/source/_static/images/kibble-architecture.puml @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + @startuml actor user database elasticsearch diff --git a/docs/source/conf.py b/docs/source/conf.py index bc9edb23..35f0f221 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,4 +1,20 @@ -# -*- coding: utf-8 -*- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # # Apache Kibble documentation build configuration file, created by # sphinx-quickstart on Thu Jan 11 06:05:51 2018. @@ -30,34 +46,33 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.todo', - 'sphinx.ext.imgmath'] +extensions = ["sphinx.ext.todo", "sphinx.ext.imgmath"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'Apache Kibble' -copyright = u'2018, The Apache Kibble Community' -author = u'The Apache Kibble Community' +project = u"Apache Kibble" +copyright = u"2018, The Apache Kibble Community" +author = u"The Apache Kibble Community" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = u'0.1' +version = u"0.1" # The full version, including alpha/beta/rc tags. -release = u'0.1' +release = u"0.1" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -72,7 +87,7 @@ exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -83,8 +98,8 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' -html_logo = '_static/images/kibble-logo.png' +html_theme = "sphinx_rtd_theme" +html_logo = "_static/images/kibble-logo.png" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -95,7 +110,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -103,9 +118,9 @@ # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', + "**": [ + "relations.html", # needs 'show_related': True theme option to display + "searchbox.html", ] } @@ -113,7 +128,7 @@ # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. -htmlhelp_basename = 'ApacheKibbledoc' +htmlhelp_basename = "ApacheKibbledoc" # -- Options for LaTeX output --------------------------------------------- @@ -122,15 +137,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -140,8 +152,13 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'ApacheKibble.tex', u'Apache Kibble Documentation', - u'The Apache Kibble Community', 'manual'), + ( + master_doc, + "ApacheKibble.tex", + u"Apache Kibble Documentation", + u"The Apache Kibble Community", + "manual", + ) ] @@ -149,10 +166,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'apachekibble', u'Apache Kibble Documentation', - [author], 1) -] +man_pages = [(master_doc, "apachekibble", u"Apache Kibble Documentation", [author], 1)] # -- Options for Texinfo output ------------------------------------------- @@ -161,10 +175,13 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'ApacheKibble', u'Apache Kibble Documentation', - author, 'ApacheKibble', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "ApacheKibble", + u"Apache Kibble Documentation", + author, + "ApacheKibble", + "One line description of project.", + "Miscellaneous", + ) ] - - - diff --git a/docs/source/index.rst b/docs/source/index.rst index 11c8879e..3d39a2f4 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,3 +1,20 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + .. Apache Kibble documentation master file, created by sphinx-quickstart on Thu Jan 11 06:05:51 2018. You can adapt this file completely to your liking, but it should at least diff --git a/docs/source/managing.rst b/docs/source/managing.rst index dd577519..2020808f 100644 --- a/docs/source/managing.rst +++ b/docs/source/managing.rst @@ -1,3 +1,20 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + Managing Apache Kibble ====================== @@ -90,7 +107,7 @@ BugZilla JIRA This is a JIRA project. Most JIRA instances will require the login credentials of an anonymous account in order to perform API calls. - + Twitter This is a Twitter account. Currently not much done there. WIP. diff --git a/docs/source/setup.rst b/docs/source/setup.rst index ad821704..04c7ab39 100644 --- a/docs/source/setup.rst +++ b/docs/source/setup.rst @@ -1,3 +1,20 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + Setting up Apache Kibble ======================== @@ -20,7 +37,7 @@ The Kibble Server (kibble) The Kibble Scanner Applications (kibble-scanners) This is a collection of scanning applications each designed to work with a specific type of resource (a git repo, a mailing list, a JIRA - instance etc) and push copmpiled data objects to the Kibble Server. + instance etc) and push compiled data objects to the Kibble Server. Some resources only have one scanner plugin, while others may have multiple plugins capable of dealing with specific aspects of a resource. @@ -83,7 +100,7 @@ Source Code Location *Apache Kibble does not currently have any releases.* *You are however welcome to try out the development version.* -For the time being, we recommend that you use the ``master`` branch for +For the time being, we recommend that you use the ``main`` branch for testing Kibble. This applies to both scanners and the server. The Kibble Server can be found via our source repository at @@ -108,11 +125,7 @@ following components installed and set up: existing databases, but not for new setups). Does not have to be on the same machine, but it may help speed up processing. - A web server of your choice (Apache HTTP Server, NGINX, lighttp etc) -- Python 3.4 or newer with the following libraries installed: -- - elasticsearch -- - certifi -- - pyyaml -- - bcrypt +- Python 3.4 or newer with installed libraries from `setup/requirements.txt` - Gunicorn for Python 3.x (often called gunicorn3) or mod_wsgi ########################################### @@ -125,10 +138,9 @@ Assuming you wish to install kibble in /var/www/kibble, you would set it up by issuing the following: - ``git clone https://github.com/apache/kibble.git /var/www/kibble`` -- ``cd /var/www/kibble/setup`` -- ``pip3 install -r requirements.txt`` -- ``python3 setup.py`` -- Enter the configuration parameters the setup process asks for +- ``cd /var/www/kibble`` +- ``pip install -r setup/requirements.txt`` +- ``python setup/setup.py`` This will set up the database, the configuration file, and create your initial administrator account for the UI. You can later on do additional diff --git a/docs/source/usecases.rst b/docs/source/usecases.rst index da941b96..da65192e 100644 --- a/docs/source/usecases.rst +++ b/docs/source/usecases.rst @@ -1,3 +1,20 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + Use Cases ======================== @@ -10,10 +27,10 @@ Add an Organisation ********************** This use case describes the process of adding an organisation -Actors: +Actors: User -Precondition: +Precondition: User is logged in Flow of Events: @@ -23,11 +40,11 @@ Flow of Events: 4. The system will verify the information. 5. The system will add the new organisation. 6. The system will then display the new organisation along with any existing organisations. - + Exception Scenario: The user does not enter an organisation name or description. -Post Conditions: +Post Conditions: The user creates the organisation or leaves the page. @@ -36,10 +53,10 @@ Add a View ********************** This use case describes the process of adding a view to an organisation -Actors: +Actors: User -Precondition: +Precondition: User is logged in and has an organisation created Flow of Events: @@ -51,23 +68,23 @@ Flow of Events: 6. The system will add the new view. 7. The system will then display the new view along with any existing views. 8. The user with then be able to edit or delete the view. - + Exception Scenario: The user does not enter a view name. -Post Conditions: +Post Conditions: The user creates the source or leaves the page. - + ********************** Add a Source ********************** This use case describes the process of adding a source to an organisation -Actors: +Actors: User -Precondition: +Precondition: User is logged in and has an organisation created Flow of Events: @@ -79,23 +96,23 @@ Flow of Events: 6. The system will add the new source. 7. The system will then display the new source along with any existing sources. 8. The user with then have to run the kibble scanner to process the new source. - + Exception Scenario: The user does not enter a source URL/ID. -Post Conditions: +Post Conditions: The user creates the source or leaves the page. - + ********************** Add a User ********************** This use case describes the process of adding a user to an organisation -Actors: +Actors: User -Precondition: +Precondition: User is logged in and has an organisation created Flow of Events: @@ -104,11 +121,9 @@ Flow of Events: 3. The user will enter the email address of a user. 4. The system will verify the information. 5. The system will add the user to the organisation's membership. - + Exception Scenario: The user enters a user that does not exist. -Post Conditions: +Post Conditions: The user invites a member or leaves the page. - - diff --git a/kibble.ini b/kibble.ini new file mode 100644 index 00000000..8e4f042a --- /dev/null +++ b/kibble.ini @@ -0,0 +1,63 @@ +[accounts] +allowSignup = True +verify = True + +[api] +# Kibble elasticsearch database revision +database = 2 +# Version f the API +version = 0.1.0 + +[broker] +enabled = false +url = https://localhost/api/ +username = kibble +password = kibble4life + +[scanner] +# scratchdir: Location for storing file objects like git repos etc +# This should be permanent to speed up scans of large repositories +# on consecutive scans, but may be ephemeral like /tmp +scratchdir = /tmp +# If you are load balancing the scans, you should specify +# how many nodes are working, and which one you are, +# using the format: $nodeNo/$totalNodes. If there are 4 nodes, +# each node will gat 1/4th of all jobs to work on. +balance = + +[git] +# Comma-separated branch names +wanted_branches = + +# Watson/BlueMix configuration for sentiment analysis, if applicable +[watson] +username = +password = +api = https://gateway-location.watsonplatform.net/tone-analyzer/api + +# Azure Text Analysis API configuration, if applicable +[azure] +apikey = +location = west-us + +# picoAPI Text Analysis configuration +[picoapi] +key = + + +[elasticsearch] +# Elasticsearch database name +dbname = kibble +# Connection uri used to determine host and port of elasticsearch instance +conn_uri = http://elasticsearch:9200 +# Number of shards in es cluster +shards = 5 +# Number of replicase in es cluster +replicas = 1 +ssl = False +uri = +auth = + +[mail] +mailhost = localhost:25 +sender = Kibble diff --git a/kibble/__init__.py b/kibble/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/__main__.py b/kibble/__main__.py new file mode 100644 index 00000000..8e266a89 --- /dev/null +++ b/kibble/__main__.py @@ -0,0 +1,160 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import List + +import click + +from kibble.cli import setup_command +from kibble.cli.make_account_command import make_account_cmd +from kibble.cli.scanner_command import scan_cmd +from kibble.configuration import conf +from kibble.version import version as kibble_version + + +@click.group() +def cli(): + """A simple command line tool for kibble""" + + +@cli.command("version", short_help="displays the current kibble version") +def version(): + print(kibble_version) + + +@cli.command("setup", short_help="starts the setup process for kibble") +@click.option( + "-u", + "--uri", + default=conf.get("elasticsearch", "conn_uri"), + help="connection uri for ElasticSearch", +) +@click.option( + "-d", + "--dbname", + default=conf.get("elasticsearch", "dbname"), + help="elasticsearch database prefix", +) +@click.option( + "-s", + "--shards", + default=conf.get("elasticsearch", "shards"), + help="number of ES shards", +) +@click.option( + "-r", + "--replicas", + default=conf.get("elasticsearch", "replicas"), + help="number of replicas for ES", +) +@click.option("-a", "--autoadmin", default=False, help="generate generic admin account") +@click.option("-k", "--skiponexist", default=True, help="skip DB creation if DBs exist") +def setup( + uri: str, + dbname: str, + shards: str, + replicas: str, + autoadmin: bool, + skiponexist: bool, +): + setup_command.do_setup( + uri=uri, + dbname=dbname, + shards=shards, + replicas=replicas, + autoadmin=autoadmin, + skiponexist=skiponexist, + ) + + +@cli.command("make_account", short_help="creates new kibble user account") +@click.option( + "-u", "--username", help="username (email) of account to create", required=True +) +@click.option("-p", "--password", help="password to set for account", required=True) +@click.option("-A", "--admin", default=False, help="make account global admin") +@click.option( + "-a", "--orgadmin", default=False, help="make account owner of orgs invited to" +) +@click.option("-o", "--org", default=None, help="invite to this organisation") +def make_account( + username: str, + password: str, + admin: bool = False, + orgadmin: bool = False, + org: str = None, +): + make_account_cmd( + username=username, password=password, admin=admin, adminorg=orgadmin, org=org + ) + + +@cli.command("scan", short_help="starts scanning process") +@click.option( + "-t", + "--type", + "scanners", + help="Specific type of scanner to run (default is run all scanners). Can be used multiple times.", + multiple=True, +) +@click.option( + "-e", + "--exclude", + help="Specific type of scanner(s) to exclude. Can be used multiple times.", + multiple=True, +) +@click.option( + "-o", + "--org", + help="The organisation to gather stats for. If left out, all organisations will be scanned.", +) +@click.option( + "-a", + "--age", + help="Minimum age in hours before performing a new scan on an already processed source. " + "--age 12 will not process any source that was processed less than 12 hours ago, but " + "will process new sources.", +) +@click.option("-s", "--source", help="Specific source (wildcard) to run scans on.") +@click.option( + "-v", + "--view", + help="Specific source view to scan (default is scan all sources).", +) +def run_scan( + scanners: List[str] = None, + exclude: List[str] = None, + org: str = None, + age: int = None, + source: str = None, + view: str = None, +): + scan_cmd( + scanners=scanners, + exclude=exclude, + org=org, + age=age, + source=source, + view=view, + ) + + +def main(): + cli() + + +if __name__ == "__main__": + main() diff --git a/kibble/api/__init__.py b/kibble/api/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/api/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/api/handler.py b/kibble/api/handler.py new file mode 100644 index 00000000..54e77b0b --- /dev/null +++ b/kibble/api/handler.py @@ -0,0 +1,182 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the main WSGI handler file for Apache Kibble. +It compiles a list of valid URLs from the 'pages' library folder, +and if a URL matches it runs the specific submodule's run() function. It +also handles CGI parsing and exceptions in the applications. +""" +import json +import os +import re +import sys +import traceback + +from kibble.api.plugins import openapi +from kibble.api.plugins.database import KibbleDatabase +from kibble.api.plugins.session import KibbleSession +from kibble.configuration import conf +from kibble.settings import YAML_DIRECTORY + +# Compile valid API URLs from the pages library +# Allow backwards compatibility by also accepting .lua URLs + +urls = [] +if __name__ != "__main__": + from kibble.api.pages import handlers + + for page, handler in handlers.items(): + urls.append((r"^(/api/%s)(/.+)?$" % page, handler.run)) + + +# Instantiate database connections +DB = None + +# Load Open API specifications +openapi_yaml = os.path.join(YAML_DIRECTORY, "openapi.yaml") +KibbleOpenAPI = openapi.OpenAPI(openapi_yaml) + + +class KibbleHTTPError(Exception): + def __init__(self, code, message): + super().__init__() + self.code = code + self.message = message + + +class KibbleAPIWrapper: + """ + Middleware wrapper for exceptions in the application + """ + + def __init__(self, path, func): + self.func = func + self.API = KibbleOpenAPI + self.path = path + self.exception = KibbleHTTPError + + def __call__(self, environ, start_response, session): + """Run the function, return response OR return stacktrace""" + try: + # Read JSON client data if any + try: + request_size = int(environ.get("CONTENT_LENGTH", 0)) + except ValueError: + request_size = 0 + requestBody = environ["wsgi.input"].read(request_size) + formdata = {} + if requestBody and len(requestBody) > 0: + try: + formdata = json.loads(requestBody.decode("utf-8")) + except json.JSONDecodeError as err: + start_response( + "400 Invalid request", [("Content-Type", "application/json")] + ) + yield json.dumps({"code": 400, "reason": "Invalid JSON: %s" % err}) + return + + # Validate URL against OpenAPI specs + try: + self.API.validate(environ["REQUEST_METHOD"], self.path, formdata) + except openapi.OpenAPIException as err: + start_response( + "400 Invalid request", [("Content-Type", "application/json")] + ) + yield json.dumps({"code": 400, "reason": err.message}) + return + + # Call page with env, SR and form data + try: + response = self.func(self, environ, formdata, session) + if response: + for bucket in response: + yield bucket + except KibbleHTTPError as err: + errHeaders = { + 403: "403 Authentication failed", + 404: "404 Resource not found", + 500: "500 Internal Server Error", + 501: "501 Gateway error", + } + errHeader = ( + errHeaders[err.code] + if err.code in errHeaders + else "400 Bad request" + ) + start_response(errHeader, [("Content-Type", "application/json")]) + yield json.dumps( + {"code": err.code, "reason": err.message}, indent=4 + ) + "\n" + return + + except: # pylint: disable=bare-except + err_type, err_value, tb = sys.exc_info() + traceback_output = ["API traceback:"] + traceback_output += traceback.format_tb(tb) + traceback_output.append("%s: %s" % (err_type.__name__, err_value)) + # We don't know if response has been given yet, try giving one, fail gracefully. + try: + start_response( + "500 Internal Server Error", [("Content-Type", "application/json")] + ) + except: # pylint: disable=bare-except + pass + yield json.dumps({"code": "500", "reason": "\n".join(traceback_output)}) + + +def fourohfour(environ, start_response): + """A very simple 404 handler""" + start_response("404 Not Found", [("Content-Type", "application/json")]) + yield json.dumps({"code": 404, "reason": "API endpoint not found"}, indent=4) + "\n" + + +def application(environ, start_response): + """ + This is the main handler. Every API call goes through here. + Checks against the pages library, and if submod found, runs + it and returns the output. + """ + db = KibbleDatabase(conf) + path = environ.get("PATH_INFO", "") + for regex, function in urls: + m = re.match(regex, path) + if m: + callback = KibbleAPIWrapper(path, function) + session = KibbleSession(db, environ, conf) + a = 0 + for bucket in callback(environ, start_response, session): + if a == 0: + session.headers.append(bucket) + try: + start_response("200 Okay", session.headers) + except: # pylint: disable=bare-except + pass + a += 1 + # WSGI prefers byte strings, so convert if regular py3 string + if isinstance(bucket, str): + yield bytes(bucket, encoding="utf-8") + elif isinstance(bucket, bytes): + yield bucket + return + + for bucket in fourohfour(environ, start_response): + yield bytes(bucket, encoding="utf-8") + + +if __name__ == "__main__": + KibbleOpenAPI.toHTML() diff --git a/kibble/api/pages/__init__.py b/kibble/api/pages/__init__.py new file mode 100644 index 00000000..76f12359 --- /dev/null +++ b/kibble/api/pages/__init__.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Kibble API scripts library: + + oauth: oauth manager + +""" + +import importlib +import os + +# Define all the submodules we have + +rootpath = os.path.join(os.path.dirname(os.path.realpath(__file__))) +print("Reading pages from %s" % rootpath) + +# Import each submodule into a hash called 'handlers' +handlers = {} + + +def loadPage(path): + for el in os.listdir(path): + filepath = os.path.join(path, el) + if el.find("__") == -1: + if os.path.isdir(filepath): + loadPage(filepath) + else: + p = filepath.replace(rootpath, "")[1:].replace("/", ".")[:-3] + xp = p.replace(".", "/") + print("Loading endpoint pages.%s as %s" % (p, xp)) + handlers[xp] = importlib.import_module(f"kibble.api.pages.{p}") + + +loadPage(rootpath) diff --git a/api/pages/account.py b/kibble/api/pages/account.py similarity index 52% rename from api/pages/account.py rename to kibble/api/pages/account.py index 48577735..c1eef28f 100644 --- a/api/pages/account.py +++ b/kibble/api/pages/account.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ######################################################################## # OPENAPI-URI: /api/account ######################################################################## @@ -85,136 +86,150 @@ # $ref: '#/components/schemas/Error' # description: unexpected error # summary: Create a new account -# +# ######################################################################## - - - """ This is the user account handler for Kibble. adds, removes and edits accounts. """ +import email.message +import hashlib import json import re -import time -import bcrypt -import hashlib import smtplib -import email.message + +import bcrypt def sendCode(session, addr, code): msg = email.message.EmailMessage() - msg['To'] = addr - msg['From'] = session.config['mail']['sender'] - msg['Subject'] = "Please verify your account" - msg.set_content("""\ + msg["To"] = addr + msg["From"] = session.config["mail"]["sender"] + msg["Subject"] = "Please verify your account" + msg.set_content( + """\ Hi there! Please verify your account by visiting: %s/api/verify/%s/%s With regards, Apache Kibble. -""" % (session.url, addr, code) +""" + % (session.url, addr, code) + ) + s = smtplib.SMTP( + "%s:%s" + % (session.config["mail"]["mailhost"], session.config["mail"]["mailport"]) ) - s = smtplib.SMTP("%s:%s" % (session.config['mail']['mailhost'], session.config['mail']['mailport'])) s.send_message(msg) s.quit() + def run(API, environ, indata, session): - - method = environ['REQUEST_METHOD'] + + method = environ["REQUEST_METHOD"] # Add a new account?? if method == "PUT": - u = indata['email'] - p = indata['password'] - d = indata['displayname'] - + u = indata["email"] + p = indata["password"] + d = indata["displayname"] + # Are new accounts allowed? (admin can always make accounts, of course) - if not session.config['accounts'].get('allowSignup', False): - if not (session.user and session.user['level'] == 'admin'): - raise API.exception(403, "New account requests have been administratively disabled.") - + if not session.config["accounts"].get("allowSignup", False): + if not (session.user and session.user["level"] == "admin"): + raise API.exception( + 403, "New account requests have been administratively disabled." + ) + # Check if we already have that username in use - if session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = u): + if session.DB.ES.exists(index=session.DB.dbname, doc_type="useraccount", id=u): raise API.exception(403, "Username already in use") - + # We require a username, displayName password of at least 3 chars each if len(p) < 3 or len(u) < 3 or len(d) < 3: - raise API.exception(400, "Username, display-name and password must each be at elast 3 characters long.") - + raise API.exception( + 400, + "Username, display-name and password must each be at elast 3 characters long.", + ) + # We loosely check that the email is an email if not re.match(r"^\S+@\S+\.\S+$", u): raise API.exception(400, "Invalid email address presented.") - + # Okay, let's make an account...I guess salt = bcrypt.gensalt() - pwd = bcrypt.hashpw(p.encode('utf-8'), salt).decode('ascii') - + pwd = bcrypt.hashpw(p.encode("utf-8"), salt).decode("ascii") + # Verification code, if needed vsalt = bcrypt.gensalt() vcode = hashlib.sha1(vsalt).hexdigest() - + # Auto-verify unless verification is enabled. # This is so previously unverified accounts don'thave to verify # if we later turn verification on. verified = True - if session.config['accounts'].get('verify'): + if session.config["accounts"].get("verify"): verified = False - sendCode(session, u, vcode) # Send verification email + sendCode(session, u, vcode) # Send verification email # If verification email fails, skip account creation. - + doc = { - 'email': u, # Username (email) - 'password': pwd, # Hashed password - 'displayName': d, # Display Name - 'organisations': [], # Orgs user belongs to (default is none) - 'ownerships': [], # Orgs user owns (default is none) - 'defaultOrganisation': None, # Default org for user - 'verified': verified, # Account verified via email? - 'vcode': vcode, # Verification code - 'userlevel': "user" # User level (user/admin) + "email": u, # Username (email) + "password": pwd, # Hashed password + "displayName": d, # Display Name + "organisations": [], # Orgs user belongs to (default is none) + "ownerships": [], # Orgs user owns (default is none) + "defaultOrganisation": None, # Default org for user + "verified": verified, # Account verified via email? + "vcode": vcode, # Verification code + "userlevel": "user", # User level (user/admin) } - - + # If we have auto-invite on, check if there are orgs to invite to - if 'autoInvite' in session.config['accounts']: - dom = u.split('@')[-1].lower() - for ai in session.config['accounts']['autoInvite']: - if ai['domain'] == dom: - doc['organisations'].append(ai['organisation']) - - session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = u, body = doc) + if "autoInvite" in session.config["accounts"]: + dom = u.split("@")[-1].lower() + for ai in session.config["accounts"]["autoInvite"]: + if ai["domain"] == dom: + doc["organisations"].append(ai["organisation"]) + + session.DB.ES.index( + index=session.DB.dbname, doc_type="useraccount", id=u, body=doc + ) yield json.dumps({"message": "Account created!", "verified": verified}) return - + # We need to be logged in for the rest of this! if not session.user: raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - + # Patch (edit) an account if method == "PATCH": - userid = session.user['email'] - if indata.get('email') and session.user['userlevel'] == "admin": - userid = indata.get('email') - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='useraccount', id = userid) - udoc = doc['_source'] - if indata.get('defaultOrganisation'): + userid = session.user["email"] + if indata.get("email") and session.user["userlevel"] == "admin": + userid = indata.get("email") + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="useraccount", id=userid + ) + udoc = doc["_source"] + if indata.get("defaultOrganisation"): # Make sure user is a member or admin here.. - if session.user['userlevel'] == "admin" or indata.get('defaultOrganisation') in udoc['organisations']: - udoc['defaultOrganisation'] = indata.get('defaultOrganisation') + if ( + session.user["userlevel"] == "admin" + or indata.get("defaultOrganisation") in udoc["organisations"] + ): + udoc["defaultOrganisation"] = indata.get("defaultOrganisation") # Changing pasword? - if indata.get('password'): - p = indata.get('password') + if indata.get("password"): + p = indata.get("password") salt = bcrypt.gensalt() - pwd = bcrypt.hashpw(p.encode('utf-8'), salt).decode('ascii') + pwd = bcrypt.hashpw(p.encode("utf-8"), salt).decode("ascii") # Update user doc - session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = userid, body = udoc) + session.DB.ES.index( + index=session.DB.dbname, doc_type="useraccount", id=userid, body=udoc + ) yield json.dumps({"message": "Account updated!"}) return - \ No newline at end of file diff --git a/kibble/api/pages/bio/bio.py b/kibble/api/pages/bio/bio.py new file mode 100644 index 00000000..9e7a60b5 --- /dev/null +++ b/kibble/api/pages/bio/bio.py @@ -0,0 +1,180 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/bio/bio +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Biography' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows some facts about a contributor +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Biography' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows some facts about a contributor +# +######################################################################## + + +""" +This is the contributor trends renderer for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dOrg = session.user["defaultOrganisation"] or "apache" + + pid = hashlib.sha1( + ("%s%s" % (dOrg, indata.get("email", "???"))).encode("ascii", errors="replace") + ).hexdigest() + person = {} + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=pid): + person = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id=pid)[ + "_source" + ] + else: + raise API.exception(404, "No such biography!") + + query = { + "query": {"bool": {"must": [{"term": {"organisation": dOrg}}]}}, + "size": 1, + "sort": [{"ts": "asc"}], + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + codeKey = "committer_email" + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + {"term": {"sender": indata.get("email")}}, + {"term": {codeKey: indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # FIRST EMAIL + res = session.DB.ES.search(index=session.DB.dbname, doc_type="email", body=query) + firstEmail = None + if res["hits"]["hits"]: + firstEmail = res["hits"]["hits"][0]["_source"]["ts"] + + # FIRST COMMIT + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + firstCommit = None + if res["hits"]["hits"]: + firstCommit = res["hits"]["hits"][0]["_source"]["ts"] + + # FIRST AUTHORSHIP + query["query"]["bool"]["should"][3] = { + "term": {"author_email": indata.get("email")} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + firstAuthor = None + if res["hits"]["hits"]: + firstAuthor = res["hits"]["hits"][0]["_source"]["ts"] + + # COUNT EMAIL, CODE, LINES CHANGED + del query["sort"] + del query["size"] + no_emails = session.DB.ES.count( + index=session.DB.dbname, doc_type="email", body=query + )["count"] + + no_commits = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + )["count"] + + JSON_OUT = { + "found": True, + "bio": { + "organisation": dOrg, + "name": person["name"], + "email": person["email"], + "id": pid, + "gravatar": hashlib.md5( + person["email"].lower().encode("utf-8") + ).hexdigest(), + "firstEmail": firstEmail, + "firstCommit": firstCommit, + "firstAuthor": firstAuthor, + "tags": person.get("tags", []), + "alts": person.get("alts", []), + "emails": no_emails, + "commits": no_commits, + }, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/bio/newtimers.py b/kibble/api/pages/bio/newtimers.py new file mode 100644 index 00000000..cdf063bf --- /dev/null +++ b/kibble/api/pages/bio/newtimers.py @@ -0,0 +1,278 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/bio/newtimers +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Biography' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows some facts about a contributor +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Biography' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows some facts about a contributor +# +######################################################################## + + +""" +This is the newtimers list renderer for Kibble +""" + +import hashlib +import json +import time + + +def find_earlier(session, query, when, who, which, where, doctype, dOrg): + """Find earlier document pertaining to this user. return True if found""" + if "aggs" in query: + del query["aggs"] + + range_query = {"range": {which: {"from": 0, "to": time.time()}}} + + query["query"]["bool"]["must"] = [ + range_query, + {"term": {"organisation": dOrg}}, + {"term": {where: who}}, + ] + query["size"] = 1 + query["sort"] = [{which: "asc"}] + + res = session.DB.ES.search(index=session.DB.dbname, doc_type=doctype, body=query) + if res["hits"]["hits"]: + doc = res["hits"]["hits"][0]["_source"] + if doc[which] >= when: + return [doc[which], doc] + return [-1, None] + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dOrg = session.user["defaultOrganisation"] or "apache" + + # Keep track of all contributors, and newcomers + contributors = [] + newcomers = {} + + #################################################################### + # Start by grabbing all contributors this period via terms agg # + #################################################################### + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + ############################ + # CODE NEWTIMERS # + ############################ + rangeKey = "ts" + rangeQuery = {"range": {rangeKey: {"from": dateFrom, "to": dateTo}}} + + query = { + "query": {"bool": {"must": [rangeQuery, {"term": {"organisation": dOrg}}]}} + } + + query["aggs"] = { + "by_committer": {"terms": {"field": "committer_email", "size": 500}}, + "by_author": {"terms": {"field": "author_email", "size": 500}}, + } + + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + + code_contributors = [] + for bucket in res["aggregations"]["by_committer"]["buckets"]: + email = bucket["key"] + if email not in code_contributors: + code_contributors.append(email) + + for bucket in res["aggregations"]["by_author"]["buckets"]: + email = bucket["key"] + if email not in code_contributors: + code_contributors.append(email) + + # Now, for each contributor, find if they have done anything before + for email in code_contributors: + ea = find_earlier( + session, query, dateFrom, email, "ts", "author_email", "code_commit", dOrg + ) + ec = find_earlier( + session, + query, + dateFrom, + email, + "ts", + "committer_email", + "code_commit", + dOrg, + ) + if ea[0] != -1 and ec[0] != -1: + earliest = ea + if earliest[0] == -1 or (earliest[0] > ec[0] and ec[0] != -1): + earliest = ec + newcomers[email] = {"code": earliest} + + ############################ + # ISSUE NEWTIMERS # + ############################ + rangeKey = "created" + rangeQuery = {"range": {rangeKey: {"from": dateFrom, "to": dateTo}}} + + query = { + "query": {"bool": {"must": [rangeQuery, {"term": {"organisation": dOrg}}]}} + } + + query["aggs"] = { + "by_creator": {"terms": {"field": "issueCreator", "size": 500}}, + "by_closer": {"terms": {"field": "issueCloser", "size": 500}}, + } + + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + res = session.DB.ES.search(index=session.DB.dbname, doc_type="issue", body=query) + + issue_contributors = [] + for bucket in res["aggregations"]["by_creator"]["buckets"]: + email = bucket["key"] + if email not in issue_contributors: + issue_contributors.append(email) + + for bucket in res["aggregations"]["by_closer"]["buckets"]: + email = bucket["key"] + if email not in issue_contributors: + issue_contributors.append(email) + + # Now, for each contributor, find if they have done anything before + for email in issue_contributors: + ecr = find_earlier( + session, query, dateFrom, email, "created", "issueCreator", "issue", dOrg + ) + ecl = find_earlier( + session, query, dateFrom, email, "closed", "issueCloser", "issue", dOrg + ) + if ecr[0] != -1 and ecl[0] != -1: + earliest = ecr + if earliest[0] == -1 or (earliest[0] > ecl[0] and ecl[0] != -1): + earliest = ecl + newcomers[email] = newcomers.get(email, {}) + newcomers[email]["issue"] = earliest + + email_contributors = [] + + ################################ + # For each newtimer, get a bio # + ################################ + + for email in newcomers: + pid = hashlib.sha1( + ("%s%s" % (dOrg, email)).encode("ascii", errors="replace") + ).hexdigest() + person = {} + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=pid): + person = session.DB.ES.get( + index=session.DB.dbname, doc_type="person", id=pid + )["_source"] + person["md5"] = hashlib.md5( + person["email"].encode("utf-8") + ).hexdigest() # gravatar needed for UI! + newcomers[email]["bio"] = person + + newcomers_code = [] + newcomers_issues = [] + newcomers_email = [] + + # Count newcomers in each category (TODO: put this elsewhere earlier) + for email, entry in newcomers.items(): + if "code" in entry: + newcomers_code.append(email) + if "issue" in entry: + newcomers_issues.append(email) + if "email" in entry: + newcomers_email.append(email) + + JSON_OUT = { + "okay": True, + "stats": { + "code": {"newcomers": newcomers_code, "seen": len(code_contributors)}, + "issues": {"newcomers": newcomers_issues, "seen": len(issue_contributors)}, + "email": {"newcomers": newcomers_email, "seen": len(email_contributors)}, + }, + "bios": newcomers, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT, indent=2) diff --git a/kibble/api/pages/bio/trends.py b/kibble/api/pages/bio/trends.py new file mode 100644 index 00000000..d8dffa66 --- /dev/null +++ b/kibble/api/pages/bio/trends.py @@ -0,0 +1,225 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +####################################################################### +# OPENAPI-URI: /api/bio/trends +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a quick trend summary of the past 6 months for a contributor +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a quick trend summary of the past 6 months for a contributor +# +######################################################################## + + +""" +This is the contributor trends renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + dOrg = session.user["defaultOrganisation"] or "apache" + + #################################################################### + # We start by doing all the queries for THIS period. # + # Then we reset the query, and change date to yonder-->from # + # and rerun the same queries. # + #################################################################### + + rangeKey = "created" + rangeQuery = {"range": {rangeKey: {"from": dateFrom, "to": dateTo}}} + # ISSUES OPENED + query = { + "query": {"bool": {"must": [rangeQuery, {"term": {"organisation": dOrg}}]}} + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + codeKey = "committer_email" if not indata.get("author") else "author_email" + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + {"term": {"sender": indata.get("email")}}, + {"term": {codeKey: indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # ISSUES CREATED + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_created = res["count"] + + # ISSUES CLOSED + rangeKey = "closed" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateFrom, "to": dateTo}} + } + + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_closed = res["count"] + + # EMAIL SENT + rangeKey = "ts" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateFrom, "to": dateTo}} + } + + res = session.DB.ES.count(index=session.DB.dbname, doc_type="email", body=query) + no_email_sent = res["count"] + + # COMMITS MADE + rangeKey = "ts" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateFrom, "to": dateTo}} + } + + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + no_commits = res["count"] + + #################################################################### + # Change to PRIOR SPAN # + #################################################################### + + # ISSUES OPENED + rangeKey = "created" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateYonder, "to": dateFrom - 1}} + } + + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_created_before = res["count"] + + # ISSUES CLOSED + rangeKey = "closed" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateYonder, "to": dateFrom - 1}} + } + + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_closed_before = res["count"] + + # EMAIL SENT + rangeKey = "ts" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateYonder, "to": dateFrom - 1}} + } + + res = session.DB.ES.count(index=session.DB.dbname, doc_type="email", body=query) + no_email_sent_before = res["count"] + + # CODE COMMITS + rangeKey = "ts" + query["query"]["bool"]["must"][0] = { + "range": {rangeKey: {"from": dateYonder, "to": dateFrom - 1}} + } + + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + no_commits_before = res["count"] + + trends = { + "created": { + "before": no_issues_created_before, + "after": no_issues_created, + "title": "Issues opened this period", + }, + "closed": { + "before": no_issues_closed_before, + "after": no_issues_closed, + "title": "Issues closed this period", + }, + "email": { + "before": no_email_sent_before, + "after": no_email_sent, + "title": "Emails sent this period", + }, + "code": { + "before": no_commits_before, + "after": no_commits, + "title": "Commits this period", + }, + } + + JSON_OUT = {"trends": trends, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/ci/queue.py b/kibble/api/pages/ci/queue.py new file mode 100644 index 00000000..a7332646 --- /dev/null +++ b/kibble/api/pages/ci/queue.py @@ -0,0 +1,176 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/ci/queue +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows email sent over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows CI queue over time +# +######################################################################## + + +""" +This is the CI queue timeseries renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + # We only want build sources, so we can sum up later. + viewList = session.subType(["jenkins", "travis", "buildbot"], viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"time": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + viewList = [indata.get("source")] + + query["query"]["bool"]["must"].append({"term": {"sourceID": "x"}}) + + timeseries = [] + for source in viewList: + query["query"]["bool"]["must"][2] = {"term": {"sourceID": source}} + + # Get queue stats + query["aggs"] = { + "timeseries": { + "date_histogram": {"field": "date", "interval": interval}, + "aggs": { + "size": {"avg": {"field": "size"}}, + "blocked": {"avg": {"field": "blocked"}}, + "building": {"avg": {"field": "building"}}, + "stuck": {"avg": {"field": "stuck"}}, + "wait": {"avg": {"field": "avgwait"}}, + }, + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="ci_queue", size=0, body=query + ) + + for bucket in res["aggregations"]["timeseries"]["buckets"]: + ts = int(bucket["key"] / 1000) + bucket["wait"]["value"] = bucket["wait"].get("value", 0) or 0 + if bucket["doc_count"] == 0: + continue + + found = False + for t in timeseries: + if t["date"] == ts: + found = True + t["queue size"] += bucket["size"]["value"] + t["builds running"] += bucket["building"]["value"] + t["average wait (hours)"] += bucket["wait"]["value"] + t["builders"] += 1 + if not found: + timeseries.append( + { + "date": ts, + "queue size": bucket["size"]["value"], + "builds running": bucket["building"]["value"], + "average wait (hours)": bucket["wait"]["value"], + "builders": 1, + } + ) + + for t in timeseries: + t["average wait (hours)"] = int(t["average wait (hours)"] / 360) / 10.0 + del t["builders"] + + JSON_OUT = { + "widgetType": { + "chartType": "line", # Recommendation for the UI + "nofill": True, + }, + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/ci/status.py b/kibble/api/pages/ci/status.py new file mode 100644 index 00000000..be4615e1 --- /dev/null +++ b/kibble/api/pages/ci/status.py @@ -0,0 +1,151 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/ci/status +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows email sent over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows CI queue over time +# +######################################################################## + + +""" +This is the CI queue status (blocked/stuck) timeseries renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"time": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get queue stats + query["aggs"] = { + "timeseries": { + "date_histogram": {"field": "date", "interval": interval}, + "aggs": { + "size": {"avg": {"field": "size"}}, + "blocked": {"avg": {"field": "blocked"}}, + "stuck": {"avg": {"field": "stuck"}}, + "wait": {"avg": {"field": "avgwait"}}, + }, + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="ci_queue", size=0, body=query + ) + + timeseries = [] + for bucket in res["aggregations"]["timeseries"]["buckets"]: + if bucket["doc_count"] == 0: + continue + ts = int(bucket["key"] / 1000) + timeseries.append( + { + "date": ts, + "builds blocked": bucket["blocked"]["value"], + "builds stuck": bucket["stuck"]["value"], + } + ) + + JSON_OUT = { + "widgetType": {"chartType": "bar"}, # Recommendation for the UI + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/ci/top-buildcount.py b/kibble/api/pages/ci/top-buildcount.py new file mode 100644 index 00000000..83d98a0b --- /dev/null +++ b/kibble/api/pages/ci/top-buildcount.py @@ -0,0 +1,154 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/ci/top-buildcount +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 repos by lines of code +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 jobs by total builds done. Essentially buildtime, tweaked +# +######################################################################## + + +""" +This is the TopN CI jobs by total build time renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(dateFrom) + ), + "to": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(dateTo) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + query["aggs"] = { + "by_job": { + "terms": {"field": "jobURL.keyword", "size": 5000}, + "aggs": { + "duration": {"sum": {"field": "duration"}}, + "ci": {"terms": {"field": "ci.keyword", "size": 1}}, + "name": {"terms": {"field": "job.keyword", "size": 1}}, + }, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="ci_build", size=0, body=query + ) + + jobs = [] + for doc in res["aggregations"]["by_job"]["buckets"]: + job = doc["key"] + builds = doc["doc_count"] + duration = doc["duration"]["value"] + ci = doc["ci"]["buckets"][0]["key"] + jobname = doc["name"]["buckets"][0]["key"] + jobs.append([builds, duration, jobname, ci]) + + topjobs = sorted(jobs, key=lambda x: int(x[0]), reverse=True) + tophash = {} + for v in topjobs: + tophash["%s (%s)" % (v[2], v[3])] = v[0] + + JSON_OUT = {"counts": tophash, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/ci/top-buildtime.py b/kibble/api/pages/ci/top-buildtime.py new file mode 100644 index 00000000..20bc688a --- /dev/null +++ b/kibble/api/pages/ci/top-buildtime.py @@ -0,0 +1,161 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/ci/top-buildtime +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 repos by lines of code +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 jobs by total build time spent +# +######################################################################## + + +""" +This is the TopN CI jobs by total build time renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(dateFrom) + ), + "to": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(dateTo) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + query["aggs"] = { + "by_job": { + "terms": {"field": "jobURL.keyword", "size": 5000}, + "aggs": { + "duration": {"sum": {"field": "duration"}}, + "ci": {"terms": {"field": "ci.keyword", "size": 1}}, + "name": {"terms": {"field": "job.keyword", "size": 1}}, + }, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="ci_build", size=0, body=query + ) + + jobs = [] + for doc in res["aggregations"]["by_job"]["buckets"]: + job = doc["key"] + builds = doc["doc_count"] + duration = doc["duration"]["value"] + ci = doc["ci"]["buckets"][0]["key"] + jobname = doc["name"]["buckets"][0]["key"] + jobs.append([builds, duration, jobname, ci]) + + topjobs = sorted(jobs, key=lambda x: int(x[1]), reverse=True) + top = topjobs[0:24] + if len(topjobs) > 25: + count = 0 + for repo in topjobs[24:]: + count += repo[1] + top.append([1, count, "Other jobs", "??"]) + + tophash = {} + for v in top: + tophash["%s (%s)" % (v[2], v[3])] = int((v[1] / 360000)) / 10 + + JSON_OUT = {"counts": tophash, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/changes.py b/kibble/api/pages/code/changes.py new file mode 100644 index 00000000..83fd305d --- /dev/null +++ b/kibble/api/pages/code/changes.py @@ -0,0 +1,168 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/changes +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show insertions/deletions as a timeseries +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show insertions/deletions as a timeseries +# +######################################################################## + + +""" +This is the code changes timeseries renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + which = "committer_email" + role = "committer" + if indata.get("author", False): + which = "author_email" + role = "author" + + interval = indata.get("interval", "day") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"committer_email": indata.get("email")}}, + {"term": {"author_email": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "date", "interval": interval}, + "aggs": { + "insertions": {"sum": {"field": "insertions"}}, + "deletions": {"sum": {"field": "deletions"}}, + }, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + timeseries = [] + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + icount = bucket["insertions"]["value"] + dcount = bucket["deletions"]["value"] + timeseries.append({"date": ts, "insertions": icount, "deletions": dcount}) + + JSON_OUT = { + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "area"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/commits.py b/kibble/api/pages/code/commits.py new file mode 100644 index 00000000..b9963abd --- /dev/null +++ b/kibble/api/pages/code/commits.py @@ -0,0 +1,161 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/commits +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show commits as a timeseries +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show commits as a timeseries +# +######################################################################## + + +""" +This is the TopN committers list renderer for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + which = "committer_email" + role = "committer" + if indata.get("author", False): + which = "author_email" + role = "author" + + interval = indata.get("interval", "day") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"committer_email": indata.get("email")}}, + {"term": {"author_email": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get number of committers, this period + query["aggs"] = { + "commits": {"date_histogram": {"field": "date", "interval": interval}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + timeseries = [] + for bucket in res["aggregations"]["commits"]["buckets"]: + ts = int(bucket["key"] / 1000) + count = bucket["doc_count"] + timeseries.append({"date": ts, "commits": count}) + + JSON_OUT = { + "widgetType": {"chartType": "bar"}, # Recommendation for the UI + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/committers.py b/kibble/api/pages/code/committers.py new file mode 100644 index 00000000..3add5ed0 --- /dev/null +++ b/kibble/api/pages/code/committers.py @@ -0,0 +1,216 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/committers +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of committers +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of repos over a given period of time +# +######################################################################## + + +""" +This is the TopN committers list renderer for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + which = "committer_email" + role = "committer" + if indata.get("author", False): + which = "author_email" + role = "author" + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"committer_email": indata.get("email")}}, + {"term": {"author_email": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get top 25 committers this period + query["aggs"] = { + "committers": { + "terms": {"field": which, "size": 25}, + "aggs": { + "byinsertions": { + "terms": {"field": which}, + "aggs": {"stats": {"sum": {"field": "insertions"}}}, + }, + "bydeletions": { + "terms": {"field": which}, + "aggs": {"stats": {"sum": {"field": "deletions"}}}, + }, + }, + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + people = {} + for bucket in res["aggregations"]["committers"]["buckets"]: + email = bucket["key"] + count = bucket["doc_count"] + sha = hashlib.sha1(("%s%s" % (dOrg, email)).encode("utf-8")).hexdigest() + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=sha): + pres = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id=sha) + person = pres["_source"] + person["name"] = person.get("name", "unknown") + people[email] = person + people[email]["gravatar"] = hashlib.md5( + person.get("email", "unknown").encode("utf-8") + ).hexdigest() + people[email]["count"] = count + people[email]["subcount"] = { + "insertions": int( + bucket["byinsertions"]["buckets"][0]["stats"]["value"] + ), + "deletions": int(bucket["bydeletions"]["buckets"][0]["stats"]["value"]), + } + + topN = [] + for email, person in people.items(): + topN.append(person) + topN = sorted(topN, key=lambda x: x["count"], reverse=True) + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "date", "interval": interval}, + "aggs": { + "by_committer": {"cardinality": {"field": "committer_email"}}, + "by_author": {"cardinality": {"field": "author_email"}}, + }, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + timeseries = [] + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + ccount = bucket["by_committer"]["value"] + acount = bucket["by_author"]["value"] + timeseries.append({"date": ts, "committers": ccount, "authors": acount}) + + JSON_OUT = { + "topN": {"denoter": "commits", "items": topN}, + "timeseries": timeseries, + "sorted": people, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/evolution.py b/kibble/api/pages/code/evolution.py new file mode 100644 index 00000000..5d589211 --- /dev/null +++ b/kibble/api/pages/code/evolution.py @@ -0,0 +1,159 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/evolution +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show code evolution as a timeseries +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show code evolution as a timeseries +# +######################################################################## + + +""" +This is the TopN committers list renderer for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + breakdown = False + onlycode = False + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"time": {"from": 0, "to": int(time.time())}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # We need scrolling here! + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="evolution", + scroll="2m", + size=5000, + body=query, + ) + sid = res["_scroll_id"] + scroll_size = res["hits"]["total"] + if isinstance(scroll_size, dict): + scroll_size = scroll_size["value"] # ES >= 7.x + + timeseries = [] + tstmp = {} + + while scroll_size > 0: + for doc in res["hits"]["hits"]: + updates = doc["_source"] + ts = updates["time"] # round(updates['time']/86400) * 86400 + if updates["time"] % 86400 != 0: + continue + tstmp[ts] = tstmp.get(ts, {}) + item = tstmp[ts] + if breakdown: + pass + else: + item["code"] = item.get("code", 0) + (updates["loc"] or 0) + item["comments"] = item.get("comments", 0) + (updates["comments"] or 0) + item["blanks"] = item.get("blanks", 0) + (updates["blank"] or 0) + + res = session.DB.ES.scroll(scroll_id=sid, scroll="1m") + sid = res["_scroll_id"] + scroll_size = len(res["hits"]["hits"]) + + for k, v in tstmp.items(): + v["date"] = k + timeseries.append(v) + + timeseries = sorted(timeseries, key=lambda x: x["date"]) + JSON_OUT = { + "widgetType": {"chartType": "line", "stack": True}, # Recommendation for the UI + "timeseries": timeseries, + "sortOrder": ["code", "comments", "blanks"], + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/pony-timeseries.py b/kibble/api/pages/code/pony-timeseries.py new file mode 100644 index 00000000..1d23d9fd --- /dev/null +++ b/kibble/api/pages/code/pony-timeseries.py @@ -0,0 +1,190 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/pony-timeseries +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of Pony Factor over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of Pony Factor over time +# +######################################################################## + + +""" +This is the pony factor renderer for Kibble +""" + +import datetime +import json +import time + +import dateutil.relativedelta + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + hl = indata.get("span", 24) + tnow = datetime.date.today() + nm = tnow.month - (tnow.month % 3) + ny = tnow.year + ts = [] + + if nm < 1: + nm += 12 + ny = ny - 1 + + while ny > 1970: + d = datetime.date(ny, nm, 1) + t = time.mktime(d.timetuple()) + d = d - dateutil.relativedelta.relativedelta(months=hl) + tf = time.mktime(d.timetuple()) + nm -= 3 + if nm < 1: + nm += 12 + ny = ny - 1 + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": tf, "to": t}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + + globcount = res["count"] + if globcount == 0: + break + + # Get top 25 committers this period + query["aggs"] = { + "by_committer": {"terms": {"field": "committer_email", "size": 1000}}, + "by_author": {"terms": {"field": "author_email", "size": 1000}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + # PF for committers + pf_committer = 0 + pf_committer_count = 0 + for bucket in res["aggregations"]["by_committer"]["buckets"]: + count = bucket["doc_count"] + pf_committer += 1 + pf_committer_count += count + if pf_committer_count > int(globcount / 2): + break + + # PF for authors + pf_author = 0 + pf_author_count = 0 + cpf = {} + for bucket in res["aggregations"]["by_author"]["buckets"]: + count = bucket["doc_count"] + pf_author += 1 + pf_author_count += count + if "@" in bucket["key"]: + mldom = bucket["key"].lower().split("@")[-1] + cpf[mldom] = True + if pf_author_count > int(globcount / 2): + break + ts.append( + { + "date": t, + "Pony Factor (committership)": pf_committer, + "Pony Factor (authorship)": pf_author, + "Meta-Pony Factor": len(cpf), + } + ) + + ts = sorted(ts, key=lambda x: x["date"]) + + JSON_OUT = { + "text": "This shows Pony Factors as calculated over a %u month timespan. Authorship measures the people writing the bulk of the codebase, committership mesaures the people committing (merging) the code, and meta-pony is an estimation of how many organisations/companies are involved." + % hl, + "timeseries": ts, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/pony.py b/kibble/api/pages/code/pony.py new file mode 100644 index 00000000..c64ba372 --- /dev/null +++ b/kibble/api/pages/code/pony.py @@ -0,0 +1,236 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/pony +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Factor' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows pony factor data for a set of repos over a given period of time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Factor' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows pony factor data for a set of repos over a given period of time +# +######################################################################## + + +""" +This is the pony factor renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 24) + ) # Default to a 24 month span + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + + globcount = res["count"] + + # Get top 25 committers this period + query["aggs"] = { + "by_committer": {"terms": {"field": "committer_email", "size": 5000}}, + "by_author": {"terms": {"field": "author_email", "size": 5000}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + # PF for committers + pf_committer = 0 + pf_committer_count = 0 + for bucket in res["aggregations"]["by_committer"]["buckets"]: + count = bucket["doc_count"] + pf_committer += 1 + pf_committer_count += count + if pf_committer_count > int(globcount / 2): + break + + # PF for authors + pf_author = 0 + pf_author_count = 0 + cpf = {} + for bucket in res["aggregations"]["by_author"]["buckets"]: + count = bucket["doc_count"] + pf_author += 1 + pf_author_count += count + mldom = bucket["key"].lower().split("@")[1] + cpf[mldom] = True + if pf_author_count > int(globcount / 2): + break + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + + globcount = res["count"] + + # Get top 25 committers this period + query["aggs"] = { + "by_committer": {"terms": {"field": "committer_email", "size": 5000}}, + "by_author": {"terms": {"field": "author_email", "size": 5000}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + # PF for committers + pf_committer_b = 0 + pf_committer_count = 0 + for bucket in res["aggregations"]["by_committer"]["buckets"]: + count = bucket["doc_count"] + pf_committer_b += 1 + pf_committer_count += count + if pf_committer_count > int(globcount / 2): + break + + # PF for authors + pf_author_b = 0 + pf_author_count = 0 + cpf_b = {} + for bucket in res["aggregations"]["by_author"]["buckets"]: + count = bucket["doc_count"] + pf_author_b += 1 + pf_author_count += count + mldom = bucket["key"].lower().split("@")[1] + cpf_b[mldom] = True + if pf_author_count > int(globcount / 2): + break + + JSON_OUT = { + "factors": [ + { + "title": "Pony Factor (by committership)", + "count": pf_committer, + "previous": pf_committer_b, + }, + { + "title": "Pony Factor (by authorship)", + "count": pf_author, + "previous": pf_author_b, + }, + { + "title": "Meta-Pony Factor (by authorship)", + "count": len(cpf), + "previous": len(cpf_b), + }, + ], + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/punchcard.py b/kibble/api/pages/code/punchcard.py new file mode 100644 index 00000000..a15ec29e --- /dev/null +++ b/kibble/api/pages/code/punchcard.py @@ -0,0 +1,163 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/punchcard +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show commits as a timeseries +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Show commits as a timeseries +# +######################################################################## + + +""" +This is the commit punch-card renderer for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + which = "committer_email" + role = "committer" + if indata.get("author", False): + which = "author_email" + role = "author" + + interval = indata.get("interval", "day") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"committer_email": indata.get("email")}}, + {"term": {"author_email": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get number of committers, this period + query["aggs"] = { + "commits": { + "date_histogram": {"field": "date", "interval": "hour", "format": "E - k"} + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + timeseries = {} + for bucket in res["aggregations"]["commits"]["buckets"]: + ts = bucket["key_as_string"] + count = bucket["doc_count"] + timeseries[ts] = timeseries.get(ts, 0) + count + + JSON_OUT = { + "widgetType": {"chartType": "punchcard"}, # Recommendation for the UI + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/relationships.py b/kibble/api/pages/code/relationships.py new file mode 100644 index 00000000..3c4b7320 --- /dev/null +++ b/kibble/api/pages/code/relationships.py @@ -0,0 +1,298 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/relationships +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of contributor relationships between repositories +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of contributor relationships between repositories +# +######################################################################## + + +""" +This is the committer relationship list renderer for Kibble +""" + +import copy +import json +import math +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + view_list = [] + if indata.get("view"): + view_list = session.getView(indata.get("view")) + if indata.get("subfilter"): + view_list = session.subFilter(indata.get("subfilter"), view=view_list) + + date_to = indata.get("to", int(time.time())) + date_from = indata.get( + "from", date_to - (86400 * 30 * 6) + ) # Default to a 6 month span + + # which = "committer_email" + # role = "committer" + # if indata.get("author", False): + # which = "author_email" + # role = "author" + + interval = indata.get("interval", "day") + + #################################################################### + #################################################################### + d_org = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": date_from, "to": date_to}}}, + {"term": {"organisation": d_org}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif view_list: + query["query"]["bool"]["must"].append({"terms": {"sourceID": view_list}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + { + "term": { + "committer_email" + if not indata.get("author") + else "author_email": indata.get("email") + } + } + ) + + # Get number of commits, this period, per repo + query["aggs"] = {"per_repo": {"terms": {"field": "sourceID", "size": 10000}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + repos = {} + repo_commits = {} + max_commits = 0 + max_links = 0 + max_shared = 0 + max_authors = 0 + min_links = indata.get("links", 1) + + # For each repo, count commits and gather data on authors + for doc in res["aggregations"]["per_repo"]["buckets"]: + source_id = doc["key"] + commits = doc["doc_count"] + + # Gather the unique authors/committers + query["aggs"] = { + "per_contributor": { + "terms": { + "field": "committer_email" + if not indata.get("author") + else "author_email", + "size": 10000, + } + } + } + xquery = copy.deepcopy(query) + xquery["query"]["bool"]["must"].append({"term": {"sourceID": source_id}}) + xres = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=xquery + ) + authors = [] + for person in xres["aggregations"]["per_contributor"]["buckets"]: + authors.append(person["key"]) + if commits > max_commits: + max_commits = commits + repos[source_id] = authors + repo_commits[source_id] = commits + + # Now, figure out which repos share the same contributors + repo_links = {} + repo_notoriety = {} + repodatas = {} + repo_authors = {} + + # Grab data of all sources + for ID, repo in repos.items(): + if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id=ID): + continue + repodatas[ID] = session.DB.ES.get( + index=session.DB.dbname, doc_type="source", id=ID + ) + + for ID, repo in repos.items(): + mylinks = {} + if ID not in repodatas: + continue + repodata = repodatas[ID] + oID = ID + if indata.get("collapse"): + m = re.search(indata.get("collapse"), repodata["_source"]["sourceURL"]) + if m: + ID = m.group(1) + else: + ID = re.sub(r"^.+/", "", repodata["_source"]["sourceURL"]) + for xID, xrepo in repos.items(): + if xID in repodatas: + xrepodata = repodatas[xID] + if indata.get("collapse"): + m = re.search( + indata.get("collapse"), xrepodata["_source"]["sourceURL"] + ) + if m: + xID = m.group(1) + else: + xID = re.sub(r"^.+/", "", xrepodata["_source"]["sourceURL"]) + if xID != ID: + xlinks = [] + for author in xrepo: + if author in repo: + xlinks.append(author) + lname = "%s@%s" % (ID, xID) # Link name + rname = "%s@%s" % (xID, ID) # Reverse link name + if len(xlinks) >= min_links and not rname in repo_links: + mylinks[xID] = len(xlinks) + repo_links[lname] = repo_links.get(lname, 0) + len( + xlinks + ) # How many contributors in common between project A and B? + if repo_links[lname] > max_shared: + max_shared = repo_links[lname] + if ID not in repo_notoriety: + repo_notoriety[ID] = set() + repo_notoriety[ID].update( + mylinks.keys() + ) # How many projects is this repo connected to? + + if ID not in repo_authors: + repo_authors[ID] = set() + repo_authors[ID].update(repo) # How many projects is this repo connected to? + + if ID != oID: + repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] + if repo_commits[ID] > max_commits: + max_commits = repo_commits[ + ID + ] # Used for calculating max link thickness + if len(repo_notoriety[ID]) > max_links: + max_links = len(repo_notoriety[ID]) + if len(repo_authors[ID]) > max_authors: + max_authors = len( + repo_authors[ID] + ) # Used for calculating max sphere size in charts + + # Now, pull it all together! + nodes = [] + links = [] + existing_repos = [] + for source_id in repo_notoriety: + lsize = 0 + for k in repo_links: + fr, to = k.split("@") + if source_id in (fr, to): + lsize += 1 + asize = len(repo_authors[source_id]) + doc = { + "id": source_id, + "name": source_id, + "commits": repo_commits[source_id], + "authors": asize, + "links": lsize, + "size": max(5, (1 - abs(math.log10(asize / max_authors))) * 45), + "tooltip": "%u connections, %u contributors, %u commits" + % (lsize, asize, repo_commits[source_id]), + } + nodes.append(doc) + existing_repos.append(source_id) + + for k, s in repo_links.items(): + size = s + fr, to = k.split("@") + if fr in existing_repos and to in existing_repos: + doc = { + "source": fr, + "target": to, + "value": max(1, (size / max_shared) * 8), + "name": "%s ↔ %s" % (fr, to), + "tooltip": "%u committers in common" % size, + } + links.append(doc) + + JSON_OUT = { + "maxLinks": max_links, + "maxShared": max_shared, + "widgetType": {"chartType": "link"}, # Recommendation for the UI + "links": links, + "nodes": nodes, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/retention.py b/kibble/api/pages/code/retention.py new file mode 100644 index 00000000..16570c6d --- /dev/null +++ b/kibble/api/pages/code/retention.py @@ -0,0 +1,229 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/retention +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Factor' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows retention metrics for a set of repos over a given period of time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Factor' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows retention metrics for a set of repos over a given period of time +# +######################################################################## + + +""" +This is the code contributor retention factor renderer for Kibble +""" + +import datetime +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + hl = indata.get( + "span", 12 + ) # By default, we define a contributor as active if having committer in the past year + tnow = datetime.date.today() + nm = tnow.month - (tnow.month % 3) + ny = tnow.year + cy = ny + ts = [] + + if nm < 1: + nm += 12 + ny = ny - 1 + + peopleSeen = {} + activePeople = {} + allPeople = {} + FoundSomething = False + + ny = 1970 + while ny < cy or (ny == cy and (nm + 3) <= tnow.month): + d = datetime.date(ny, nm, 1) + t = time.mktime(d.timetuple()) + nm += 3 + if nm > 12: + nm -= 12 + ny = ny + 1 + if ny == cy and nm > tnow.month: + break + d = datetime.date(ny, nm, 1) + tf = time.mktime(d.timetuple()) + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": t, "to": tf}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + + globcount = res["count"] + if globcount == 0 and not FoundSomething: + continue + FoundSomething = True + + # Get top 1000 committers this period + query["aggs"] = { + "by_committer": {"terms": {"field": "committer_email", "size": 25000}}, + "by_author": {"terms": {"field": "author_email", "size": 25000}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + retained = 0 + added = 0 + lost = 0 + + thisPeriod = [] + for bucket in res["aggregations"]["by_author"]["buckets"]: + who = bucket["key"] + thisPeriod.append(who) + if who not in peopleSeen: + peopleSeen[who] = tf + added += 1 + activePeople[who] = tf + if who not in allPeople: + allPeople[who] = tf + + prune = [] + for k, v in activePeople.items(): + if v < (t - (hl * 30.45 * 86400)): + prune.append(k) + lost += 1 + + for who in prune: + del activePeople[who] + del peopleSeen[who] + retained = len(activePeople) - added + + ts.append( + { + "date": tf, + "People who (re)joined": added, + "People who quit": lost, + "People retained": retained, + "Active people": added + retained, + } + ) + + groups = [ + ["More than 5 years", (5 * 365 * 86400) + 1], + ["2 - 5 years", (2 * 365 * 86400) + 1], + ["1 - 2 years", (365 * 86400)], + ["Less than a year", 1], + ] + + counts = {} + totExp = 0 + for person, age in activePeople.items(): + totExp += time.time() - allPeople[person] + for el in sorted(groups, key=lambda x: x[1], reverse=True): + if allPeople[person] <= time.time() - el[1]: + counts[el[0]] = counts.get(el[0], 0) + 1 + break + avgyr = (totExp / (86400 * 365)) / max(len(activePeople), 1) + + ts = sorted(ts, key=lambda x: x["date"]) + avgm = "" + yr = int(avgyr) + ym = round((avgyr - yr) * 12) + if yr >= 1: + avgm += "%u year%s" % (yr, "s" if yr != 1 else "") + if ym > 0: + avgm += "%s%u month%s" % (", " if yr > 0 else "", ym, "s" if ym != 1 else "") + JSON_OUT = { + "text": "This shows Contributor retention as calculated over a %u month timespan. The average experience of currently active people is %s." + % (hl, avgm), + "timeseries": ts, + "counts": counts, + "averageYears": avgyr, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/sloc.py b/kibble/api/pages/code/sloc.py new file mode 100644 index 00000000..b221f953 --- /dev/null +++ b/kibble/api/pages/code/sloc.py @@ -0,0 +1,122 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/sloc +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of lines of code for one or more sources +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of lines of code for one or more sources +# +######################################################################## + + +""" +This is the SLoC renderer for Kibble +""" + +import json + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + # Fetch all sources for default org + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"terms": {"type": ["git", "svn", "github"]}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="source", size=5000, body=query + ) + + languages = {} + years = 0 + for hit in res["hits"]["hits"]: + doc = hit["_source"] + if "sloc" in doc: + sloc = doc["sloc"] + years += sloc["years"] + for k, v in sloc["languages"].items(): + languages[k] = languages.get(k, {"code": 0, "comment": 0, "blank": 0}) + languages[k]["code"] += v.get("code", 0) + languages[k]["comment"] += v.get("comment", 0) + languages[k]["blank"] += v.get("blank", 0) + + JSON_OUT = {"languages": languages, "okay": True, "years": years} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/top-commits.py b/kibble/api/pages/code/top-commits.py new file mode 100644 index 00000000..6d031d5e --- /dev/null +++ b/kibble/api/pages/code/top-commits.py @@ -0,0 +1,158 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/top-commits +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 repos by commit volume +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 repos by commit volume +# +######################################################################## + + +""" +This is the TopN repos by commits list renderer for Kibble +""" + +import json +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"committer_email": indata.get("email")}}, + {"term": {"author_email": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get top 25 committers this period + query["aggs"] = {"by_repo": {"terms": {"field": "sourceURL", "size": 5000}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + + toprepos = [] + for bucket in res["aggregations"]["by_repo"]["buckets"]: + repo = re.sub(r".+/([^/]+?)(?:\.git)?$", r"\1", bucket["key"]) + count = bucket["doc_count"] + + toprepos.append([repo, count]) + + toprepos = sorted(toprepos, key=lambda x: x[1], reverse=True) + top = toprepos[0:24] + if len(toprepos) > 25: + count = 0 + for repo in toprepos[25:]: + count += repo[1] + top.append(["Other repos", count]) + + tophash = {} + for v in top: + tophash[v[0]] = v[1] + + JSON_OUT = {"counts": tophash, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/top-sloc.py b/kibble/api/pages/code/top-sloc.py new file mode 100644 index 00000000..eeb3af77 --- /dev/null +++ b/kibble/api/pages/code/top-sloc.py @@ -0,0 +1,136 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/top-sloc +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 repos by lines of code +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 repos by lines of code +# +######################################################################## + + +""" +This is the TopN repos by SLoC list renderer for Kibble +""" + +import json +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"terms": {"type": ["git", "svn", "github"]}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="source", size=5000, body=query + ) + + toprepos = [] + for doc in res["hits"]["hits"]: + repo = doc["_source"] + url = re.sub(r".+/([^/]+?)(?:\.git)?$", r"\1", repo["sourceURL"]) + if "sloc" in repo: + count = repo["sloc"].get("loc", 0) + if not count: + count = 0 + toprepos.append([url, count]) + + toprepos = sorted(toprepos, key=lambda x: int(x[1]), reverse=True) + top = toprepos[0:24] + if len(toprepos) > 25: + count = 0 + for repo in toprepos[25:]: + count += repo[1] + top.append(["Other repos", count]) + + tophash = {} + for v in top: + tophash[v[0]] = v[1] + + JSON_OUT = {"counts": tophash, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/code/trends.py b/kibble/api/pages/code/trends.py new file mode 100644 index 00000000..d1239dbe --- /dev/null +++ b/kibble/api/pages/code/trends.py @@ -0,0 +1,257 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/code/trends +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of repos over a given period of time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of repos over a given period of time +# +######################################################################## + + +""" +This is the SLoC renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + #################################################################### + # We start by doing all the queries for THIS period. # + # Then we reset the query, and change date to yonder-->from # + # and rerun the same queries. # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"committer_email": indata.get("email")}}, + {"term": {"author_email": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get number of commits, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + no_commits = res["count"] + + # Get number of committers, this period + query["aggs"] = { + "commits": {"cardinality": {"field": "committer_email"}}, + "authors": {"cardinality": {"field": "author_email"}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + no_committers = res["aggregations"]["commits"]["value"] + no_authors = res["aggregations"]["authors"]["value"] + + # Get number of insertions, this period + query["aggs"] = {"changes": {"sum": {"field": "insertions"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + insertions = res["aggregations"]["changes"]["value"] + + # Get number of deletions, this period + query["aggs"] = {"changes": {"sum": {"field": "deletions"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + deletions = res["aggregations"]["changes"]["value"] + + #################################################################### + # Change to PRIOR SPAN # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Path filter? + if indata.get("pathfilter"): + pf = indata.get("pathfilter") + if "!" in pf: + pf = pf.replace("!", "") + query["query"]["bool"]["must_not"] = query["query"]["bool"].get( + "must_not", [] + ) + query["query"]["bool"]["must_not"].append({"regexp": {"files_changed": pf}}) + else: + query["query"]["bool"]["must"].append({"regexp": {"files_changed": pf}}) + + # Get number of commits, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + no_commits_before = res["count"] + + # Get number of committers, this period + query["aggs"] = { + "commits": {"cardinality": {"field": "committer_email"}}, + "authors": {"cardinality": {"field": "author_email"}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + no_committers_before = res["aggregations"]["commits"]["value"] + no_authors_before = res["aggregations"]["authors"]["value"] + + # Get number of insertions, this period + query["aggs"] = {"changes": {"sum": {"field": "insertions"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + insertions_before = res["aggregations"]["changes"]["value"] + + # Get number of deletions, this period + query["aggs"] = {"changes": {"sum": {"field": "deletions"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + deletions_before = res["aggregations"]["changes"]["value"] + + trends = { + "committers": { + "before": no_committers_before, + "after": no_committers, + "title": "Committers this period", + }, + "authors": { + "before": no_authors_before, + "after": no_authors, + "title": "Authors this period", + }, + "commits": { + "before": no_commits_before, + "after": no_commits, + "title": "Commits this period", + }, + "changes": { + "before": insertions_before + deletions_before, + "after": insertions + deletions, + "title": "Lines changed this period", + }, + } + + json_out = {"trends": trends, "okay": True, "responseTime": time.time() - now} + yield json.dumps(json_out) diff --git a/kibble/api/pages/filters.py b/kibble/api/pages/filters.py new file mode 100644 index 00000000..b879e0de --- /dev/null +++ b/kibble/api/pages/filters.py @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the source list handler for Kibble +""" + +import json + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + # Fetch all sources for default org + dOrg = session.user["defaultOrganisation"] or "apache" + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="view", + size=5000, + body={"query": {"term": {"owner": session.user["email"]}}}, + ) + + sources = [] + for hit in res["hits"]["hits"]: + doc = hit["_source"] + if indata.get("quick"): + xdoc = { + "sourceID": doc["sourceID"], + "type": doc["type"], + "sourceURL": doc["sourceURL"], + } + sources.append(xdoc) + else: + sources.append(doc) + + JSON_OUT = {"views": sources, "okay": True, "organisation": dOrg} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/actors.py b/kibble/api/pages/forum/actors.py new file mode 100644 index 00000000..a6f9b439 --- /dev/null +++ b/kibble/api/pages/forum/actors.py @@ -0,0 +1,196 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/actors +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of no. of people opening/closing issues over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of no. of people opening topics or replying to them. +# +######################################################################## + + +""" +This is the forum actors stats page for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}} + ] + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "createdDate", "interval": interval}, + "aggs": {"by_user": {"cardinality": {"field": "creator"}}}, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_post", size=0, body=query + ) + + timeseries = {} + + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + ccount = bucket["by_user"]["value"] + timeseries[ts] = {"date": ts, "topic responders": ccount, "topic creators": 0} + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [{"term": {"creator": indata.get("email")}}] + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "createdDate", "interval": interval}, + "aggs": {"by_user": {"cardinality": {"field": "creator"}}}, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_topic", size=0, body=query + ) + + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + ccount = bucket["by_user"]["value"] + if ts in timeseries: + timeseries[ts]["topic creators"] = ccount + else: + timeseries[ts] = { + "date": ts, + "topic creators": 0, + "topic responders": ccount, + } + + ts = [] + for x, el in timeseries.items(): + ts.append(el) + + JSON_OUT = { + "timeseries": ts, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/creators.py b/kibble/api/pages/forum/creators.py new file mode 100644 index 00000000..32f636d3 --- /dev/null +++ b/kibble/api/pages/forum/creators.py @@ -0,0 +1,157 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/creators +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue openers +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of forum topic creators +# +######################################################################## + + +""" +This is the TopN issue openers list renderer for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + xtitle = None + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"creator": indata.get("email")}} + ) + xtitle = "People opening issues solved by %s" % indata.get("email") + + # Get top 25 committers this period + query["aggs"] = { + "committers": {"terms": {"field": "creator", "size": 25}, "aggs": {}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_topic", size=0, body=query + ) + + people = {} + for bucket in res["aggregations"]["committers"]["buckets"]: + email = bucket["key"] + count = bucket["doc_count"] + sha = email + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=sha): + pres = session.DB.ES.get( + index=session.DB.dbname, doc_type="person", id=email + ) + person = pres["_source"] + person["name"] = person.get("name", "unknown") + people[email] = person + people[email]["gravatar"] = hashlib.md5( + person.get("email", "unknown").encode("utf-8") + ).hexdigest() + people[email]["count"] = count + + topN = [] + for email, person in people.items(): + topN.append(person) + topN = sorted(topN, key=lambda x: x["count"], reverse=True) + JSON_OUT = { + "topN": {"denoter": "topics created", "items": topN}, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar", "title": xtitle}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/issues.py b/kibble/api/pages/forum/issues.py new file mode 100644 index 00000000..48459bd5 --- /dev/null +++ b/kibble/api/pages/forum/issues.py @@ -0,0 +1,224 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/issues +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of issues opened/closed over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of forum topics opened/responded-to over time +# +######################################################################## + + +""" +This is the forum timeseries renderer for Kibble +""" + +import json +import time + + +# This creates an empty timeseries object with +# all categories initialized as 0 opened, 0 closed. +def makeTS(dist): + ts = {} + for k in dist: + ts[k + " topics"] = 0 + ts[k + " replies"] = 0 + return ts + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + # By default, we lump generic forums and question/answer (like SO, askbot) together as one + distinct = {"forum": ["discourse", "stackoverflow", "askbot"]} + + # If requested, we split them into two + if indata.get("distinguish", False): + distinct = { + "forum": ["discourse"], + "question bank": ["stackoverflow", "askbot"], + } + + timeseries = {} + + # For each category and the issue types that go along with that, + # grab opened and closed over time. + for iType, iValues in distinct.items(): + #################################################################### + # ISSUES OPENED # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + {"terms": {"type": iValues}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"creator": indata.get("email")}} + ) + + # Get number of opened ones, this period + query["aggs"] = { + "commits": { + "date_histogram": {"field": "createdDate", "interval": interval} + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_topic", size=0, body=query + ) + + for bucket in res["aggregations"]["commits"]["buckets"]: + ts = int(bucket["key"] / 1000) + count = bucket["doc_count"] + timeseries[ts] = timeseries.get(ts, makeTS(distinct)) + timeseries[ts][iType + " topics"] = ( + timeseries[ts].get(iType + " topics", 0) + count + ) + + #################################################################### + # ISSUES CLOSED # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + {"terms": {"type": iValues}}, + ] + } + } + } + if viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"creator": indata.get("email")}} + ) + + # Get number of closed ones, this period + query["aggs"] = { + "commits": { + "date_histogram": {"field": "createdDate", "interval": interval} + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_post", size=0, body=query + ) + + for bucket in res["aggregations"]["commits"]["buckets"]: + ts = int(bucket["key"] / 1000) + count = bucket["doc_count"] + timeseries[ts] = timeseries.get(ts, makeTS(distinct)) + timeseries[ts][iType + " replies"] = ( + timeseries[ts].get(iType + " replies", 0) + count + ) + + ts = [] + for k, v in timeseries.items(): + v["date"] = k + ts.append(v) + + JSON_OUT = { + "widgetType": { + "chartType": "line", # Recommendation for the UI + "nofill": True, + }, + "timeseries": ts, + "interval": interval, + "okay": True, + "distinguishable": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/responders.py b/kibble/api/pages/forum/responders.py new file mode 100644 index 00000000..b314fcd2 --- /dev/null +++ b/kibble/api/pages/forum/responders.py @@ -0,0 +1,157 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/responders +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue closers +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue closers +# +######################################################################## + + +""" +This is the TopN forum posters list renderer for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + xtitle = None + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"creator": indata.get("email")}} + ) + xTitle = "People closing %s's issues" % indata.get("email") + + # Get top 25 committers this period + query["aggs"] = { + "committers": {"terms": {"field": "creator", "size": 25}, "aggs": {}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_post", size=0, body=query + ) + + people = {} + for bucket in res["aggregations"]["committers"]["buckets"]: + email = bucket["key"] + count = bucket["doc_count"] + sha = email + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=sha): + pres = session.DB.ES.get( + index=session.DB.dbname, doc_type="person", id=email + ) + person = pres["_source"] + person["name"] = person.get("name", "unknown") + people[email] = person + people[email]["gravatar"] = hashlib.md5( + person.get("email", "unknown").encode("utf-8") + ).hexdigest() + people[email]["count"] = count + + topN = [] + for email, person in people.items(): + topN.append(person) + topN = sorted(topN, key=lambda x: x["count"], reverse=True) + JSON_OUT = { + "topN": {"denoter": "replies posted", "items": topN}, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar", "title": xtitle}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/top-count.py b/kibble/api/pages/forum/top-count.py new file mode 100644 index 00000000..c7a48941 --- /dev/null +++ b/kibble/api/pages/forum/top-count.py @@ -0,0 +1,146 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/top-count +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 issue trackers by issues +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 forums by interactions +# +######################################################################## + + +""" +This is the TopN repos by commits list renderer for Kibble +""" + +import json +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [{"term": {"creator": indata.get("email")}}] + + # Get top 25 committers this period + query["aggs"] = {"by_repo": {"terms": {"field": "sourceID", "size": 5000}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_post", size=0, body=query + ) + + toprepos = [] + for bucket in res["aggregations"]["by_repo"]["buckets"]: + ID = bucket["key"] + if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id=ID): + it = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id=ID)[ + "_source" + ] + repo = re.sub(r".+/([^/]+)$", r"\1", it["sourceURL"]) + count = bucket["doc_count"] + toprepos.append([repo, count]) + + toprepos = sorted(toprepos, key=lambda x: x[1], reverse=True) + top = toprepos[0:24] + if len(toprepos) > 25: + count = 0 + for repo in toprepos[25:]: + count += repo[1] + top.append(["Other forums", count]) + + tophash = {} + for v in top: + tophash[v[0]] = v[1] + + JSON_OUT = {"counts": tophash, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/top.py b/kibble/api/pages/forum/top.py new file mode 100644 index 00000000..eb039070 --- /dev/null +++ b/kibble/api/pages/forum/top.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/top +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/TopList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N issues by interactions +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/TopList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N topics by interactions +# +######################################################################## + + +""" +This is the issue actors stats page for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + }, + "sort": {"posts": "desc"}, + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [{"term": {"creator": indata.get("email")}}] + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_topic", size=25, body=query + ) + top = [] + for bucket in res["hits"]["hits"]: + doc = bucket["_source"] + doc["source"] = doc.get("url", "#") + doc["name"] = doc.get("type", "unknown") + doc["subject"] = doc.get("title") + doc["count"] = doc.get("posts", 0) + top.append(doc) + + JSON_OUT = { + "topN": {"denoter": "interactions", "icon": "comment", "items": top}, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "line"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/forum/trends.py b/kibble/api/pages/forum/trends.py new file mode 100644 index 00000000..f6ec6104 --- /dev/null +++ b/kibble/api/pages/forum/trends.py @@ -0,0 +1,256 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/forum/trends +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of issue trackers over a given period of time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of forums over a given period of time +# +######################################################################## + + +""" +This is the forum trends renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + dOrg = session.user["defaultOrganisation"] or "apache" + + #################################################################### + # We start by doing all the queries for THIS period. # + # Then we reset the query, and change date to yonder-->from # + # and rerun the same queries. # + #################################################################### + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get number of issues created, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="forum_topic", body=query + ) + no_issues_created = res["count"] + + # Get number of open/close, this period + query["aggs"] = {"opener": {"cardinality": {"field": "creator"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_topic", size=0, body=query + ) + no_creators = res["aggregations"]["opener"]["value"] + + # REPLIERS + + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get number of issues created, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="forum_post", body=query + ) + no_issues_closed = res["count"] + + # Get number of open/close, this period + query["aggs"] = {"closer": {"cardinality": {"field": "creator"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_post", size=0, body=query + ) + no_closers = res["aggregations"]["closer"]["value"] + + #################################################################### + # Change to PRIOR SPAN # + #################################################################### + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get number of issues, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="forum_topic", body=query + ) + no_issues_created_before = res["count"] + + # Get number of committers, this period + query["aggs"] = {"opener": {"cardinality": {"field": "creator"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_topic", size=0, body=query + ) + no_creators_before = res["aggregations"]["opener"]["value"] + + # REPLIERS + + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get number of issues created, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="forum_post", body=query + ) + no_issues_closed_before = res["count"] + + # Get number of open/close, this period + query["aggs"] = {"closer": {"cardinality": {"field": "creator"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="forum_post", size=0, body=query + ) + no_closers_before = res["aggregations"]["closer"]["value"] + + trends = { + "created": { + "before": no_issues_created_before, + "after": no_issues_created, + "title": "Topics started this period", + }, + "authors": { + "before": no_creators_before, + "after": no_creators, + "title": "People starting topics this period", + }, + "closed": { + "before": no_issues_closed_before, + "after": no_issues_closed, + "title": "Replies this period", + }, + "closers": { + "before": no_closers_before, + "after": no_closers, + "title": "People replying this period", + }, + } + + JSON_OUT = {"trends": trends, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/actors.py b/kibble/api/pages/issue/actors.py new file mode 100644 index 00000000..4df53632 --- /dev/null +++ b/kibble/api/pages/issue/actors.py @@ -0,0 +1,196 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/actors +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of no. of people opening/closing issues over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of no. of people opening/closing issues over time +# +######################################################################## + + +""" +This is the issue actors stats page for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "closedDate", "interval": interval}, + "aggs": {"by_user": {"cardinality": {"field": "issueCloser"}}}, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + timeseries = {} + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + ccount = bucket["by_user"]["value"] + timeseries[ts] = {"date": ts, "closers": ccount, "openers": 0} + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "createdDate", "interval": interval}, + "aggs": {"by_user": {"cardinality": {"field": "issueCreator"}}}, + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + ccount = bucket["by_user"]["value"] + if ts in timeseries: + timeseries[ts]["openers"] = ccount + else: + timeseries[ts] = {"date": ts, "closers": 0, "openers": ccount} + + ts = [] + for x, el in timeseries.items(): + ts.append(el) + + JSON_OUT = { + "timeseries": ts, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/age.py b/kibble/api/pages/issue/age.py new file mode 100644 index 00000000..56383447 --- /dev/null +++ b/kibble/api/pages/issue/age.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/age +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of no. of open tickets by age +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of no. of open tickets by age +# +######################################################################## + + +""" +This is the issue actors stats page for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [{"term": {"status": "open"}}, {"term": {"organisation": dOrg}}] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get timeseries for this period + query["aggs"] = { + "per_interval": { + "date_histogram": {"field": "createdDate", "interval": interval} + } + } + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + timeseries = [] + opened = 0 + for bucket in res["aggregations"]["per_interval"]["buckets"]: + ts = int(bucket["key"] / 1000) + opened += bucket["doc_count"] + timeseries.append({"date": ts, "open": opened}) + + JSON_OUT = { + "timeseries": timeseries, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "line"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/closers.py b/kibble/api/pages/issue/closers.py new file mode 100644 index 00000000..79ad41ab --- /dev/null +++ b/kibble/api/pages/issue/closers.py @@ -0,0 +1,155 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/closers +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue closers +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue closers +# +######################################################################## + + +""" +This is the TopN issue closers list renderer for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + xtitle = None + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"issueCreator": indata.get("email")}} + ) + xTitle = "People closing %s's issues" % indata.get("email") + + # Get top 25 committers this period + query["aggs"] = { + "committers": {"terms": {"field": "issueCloser", "size": 25}, "aggs": {}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + people = {} + for bucket in res["aggregations"]["committers"]["buckets"]: + email = bucket["key"] + count = bucket["doc_count"] + sha = hashlib.sha1(("%s%s" % (dOrg, email)).encode("utf-8")).hexdigest() + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=sha): + pres = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id=sha) + person = pres["_source"] + person["name"] = person.get("name", "unknown") + people[email] = person + people[email]["gravatar"] = hashlib.md5( + person.get("email", "unknown").encode("utf-8") + ).hexdigest() + people[email]["count"] = count + + topN = [] + for email, person in people.items(): + topN.append(person) + topN = sorted(topN, key=lambda x: x["count"], reverse=True) + JSON_OUT = { + "topN": {"denoter": "issues closed", "items": topN}, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar", "title": xtitle}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/issues.py b/kibble/api/pages/issue/issues.py new file mode 100644 index 00000000..b435ff5d --- /dev/null +++ b/kibble/api/pages/issue/issues.py @@ -0,0 +1,220 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/issues +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of issues opened/closed over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of issues opened/closed over time +# +######################################################################## + + +""" +This is the issue timeseries renderer for Kibble +""" + + +import json +import time + + +# This creates an empty timeseries object with +# all categories initialized as 0 opened, 0 closed. +def makeTS(dist): + ts = {} + for k in dist: + ts[k + " opened"] = 0 + ts[k + " closed"] = 0 + return ts + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + # By default, we lump PRs and issues into the same category + distinct = {"issues": ["issue", "pullrequest"]} + + # If requested, we split them into two + if indata.get("distinguish", False): + distinct = {"issues": ["issue"], "pull requests": ["pullrequest"]} + + timeseries = {} + + # For each category and the issue types that go along with that, + # grab opened and closed over time. + for iType, iValues in distinct.items(): + #################################################################### + # ISSUES OPENED # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + {"terms": {"issuetype": iValues}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"issueCreator": indata.get("email")}} + ) + + # Get number of opened ones, this period + query["aggs"] = { + "commits": { + "date_histogram": {"field": "createdDate", "interval": interval} + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + for bucket in res["aggregations"]["commits"]["buckets"]: + ts = int(bucket["key"] / 1000) + count = bucket["doc_count"] + timeseries[ts] = timeseries.get(ts, makeTS(distinct)) + timeseries[ts][iType + " opened"] = ( + timeseries[ts].get(iType + " opened", 0) + count + ) + + #################################################################### + # ISSUES CLOSED # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + {"terms": {"issuetype": iValues}}, + ] + } + } + } + if viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"issueCloser": indata.get("email")}} + ) + + # Get number of closed ones, this period + query["aggs"] = { + "commits": {"date_histogram": {"field": "closedDate", "interval": interval}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + for bucket in res["aggregations"]["commits"]["buckets"]: + ts = int(bucket["key"] / 1000) + count = bucket["doc_count"] + timeseries[ts] = timeseries.get(ts, makeTS(distinct)) + timeseries[ts][iType + " closed"] = ( + timeseries[ts].get(iType + " closed", 0) + count + ) + + ts = [] + for k, v in timeseries.items(): + v["date"] = k + ts.append(v) + + JSON_OUT = { + "widgetType": { + "chartType": "line", # Recommendation for the UI + "nofill": True, + }, + "timeseries": ts, + "interval": interval, + "okay": True, + "distinguishable": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/openers.py b/kibble/api/pages/issue/openers.py new file mode 100644 index 00000000..66d6ab54 --- /dev/null +++ b/kibble/api/pages/issue/openers.py @@ -0,0 +1,155 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/openers +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue openers +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of issue openers +# +######################################################################## + + +""" +This is the TopN issue openers list renderer for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + xtitle = None + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append( + {"term": {"issueCloser": indata.get("email")}} + ) + xtitle = "People opening issues solved by %s" % indata.get("email") + + # Get top 25 committers this period + query["aggs"] = { + "committers": {"terms": {"field": "issueCreator", "size": 25}, "aggs": {}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + people = {} + for bucket in res["aggregations"]["committers"]["buckets"]: + email = bucket["key"] + count = bucket["doc_count"] + sha = hashlib.sha1(("%s%s" % (dOrg, email)).encode("utf-8")).hexdigest() + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=sha): + pres = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id=sha) + person = pres["_source"] + person["name"] = person.get("name", "unknown") + people[email] = person + people[email]["gravatar"] = hashlib.md5( + person.get("email", "unknown").encode("utf-8") + ).hexdigest() + people[email]["count"] = count + + topN = [] + for email, person in people.items(): + topN.append(person) + topN = sorted(topN, key=lambda x: x["count"], reverse=True) + JSON_OUT = { + "topN": {"denoter": "issues opened", "items": topN}, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "bar", "title": xtitle}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/pony-timeseries.py b/kibble/api/pages/issue/pony-timeseries.py new file mode 100644 index 00000000..34fcbe29 --- /dev/null +++ b/kibble/api/pages/issue/pony-timeseries.py @@ -0,0 +1,192 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/pony-timeseries +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of Pony Factor over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of Pony Factor over time +# +######################################################################## + + +""" +This is the pony factor renderer for Kibble +""" + +import datetime +import json +import time + +import dateutil.relativedelta + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + hl = indata.get("span", 24) + tnow = datetime.date.today() + nm = tnow.month - (tnow.month % 3) + ny = tnow.year + ts = [] + + if nm < 1: + nm += 12 + ny = ny - 1 + + while ny > 1970: + d = datetime.date(ny, nm, 1) + t = time.mktime(d.timetuple()) + d = d - dateutil.relativedelta.relativedelta(months=hl) + tf = time.mktime(d.timetuple()) + nm -= 3 + if nm < 1: + nm += 12 + ny = ny - 1 + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": tf, "to": t}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + + globcount = res["count"] + if globcount == 0: + break + + # Get top 25 committers this period + query["aggs"] = { + "by_creator": {"terms": {"field": "issueCreator", "size": 1000}}, + "by_closer": {"terms": {"field": "issueCloser", "size": 1000}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + cpf = {} + + # PF for openers + pf_opener = 0 + pf_opener_count = 0 + for bucket in res["aggregations"]["by_creator"]["buckets"]: + count = bucket["doc_count"] + pf_opener += 1 + pf_opener_count += count + if "@" in bucket["key"]: + mldom = bucket["key"].lower().split("@")[-1] + cpf[mldom] = True + if pf_opener_count > int(globcount / 2): + break + + # PF for closer + pf_closer = 0 + pf_closer_count = 0 + for bucket in res["aggregations"]["by_closer"]["buckets"]: + count = bucket["doc_count"] + pf_closer += 1 + pf_closer_count += count + if "@" in bucket["key"]: + mldom = bucket["key"].lower().split("@")[-1] + cpf[mldom] = True + if pf_closer_count > int(globcount / 2): + break + ts.append( + { + "date": t, + "Pony Factor (openers)": pf_opener, + "Pony Factor (closers)": pf_closer, + "Meta-Pony Factor": len(cpf), + } + ) + + ts = sorted(ts, key=lambda x: x["date"]) + + JSON_OUT = { + "text": "This shows Pony Factors as calculated over a %u month timespan. Openers measures the people submitting the bulk of the issues, closers mesaures the people closing (resolving) the issues, and meta-pony is an estimation of how many organisations/companies are involved." + % hl, + "timeseries": ts, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/relationships.py b/kibble/api/pages/issue/relationships.py new file mode 100644 index 00000000..afecc2a5 --- /dev/null +++ b/kibble/api/pages/issue/relationships.py @@ -0,0 +1,290 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/relationships +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of contributor relationships between issue trackers +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of contributor relationships between issue trackers +# +######################################################################## + + +""" +This is the issue tracker relationship list renderer for Kibble +""" + +import copy +import json +import math +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + which = "committer_email" + role = "committer" + if indata.get("author", False): + which = "author_email" + role = "author" + + interval = indata.get("interval", "day") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of commits, this period, per repo + query["aggs"] = {"per_repo": {"terms": {"field": "sourceID", "size": 10000}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + repos = {} + repo_commits = {} + max_commits = 0 + max_links = 0 + max_shared = 0 + max_authors = 0 + + # For each repo, count commits and gather data on authors + for doc in res["aggregations"]["per_repo"]["buckets"]: + source_id = doc["key"] + commits = doc["doc_count"] + + # Gather the unique authors/committers + query["aggs"] = { + "per_closer": {"terms": {"field": "issueCloser", "size": 10000}}, + "per_creator": {"terms": {"field": "issueCreator", "size": 10000}}, + } + xquery = copy.deepcopy(query) + xquery["query"]["bool"]["must"].append({"term": {"sourceID": source_id}}) + xres = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=xquery + ) + authors = [] + for person in xres["aggregations"]["per_closer"]["buckets"]: + authors.append(person["key"]) + for person in xres["aggregations"]["per_creator"]["buckets"]: + authors.append(person["key"]) + if commits > max_commits: + max_commits = commits + repos[source_id] = authors + repo_commits[source_id] = commits + + # Now, figure out which repos share the same contributors + repo_links = {} + repo_notoriety = {} + repodatas = {} + repo_authors = {} + min_links = indata.get("links", 1) + + # Grab data of all sources + for ID, repo in repos.items(): + if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id=ID): + continue + repodatas[ID] = session.DB.ES.get( + index=session.DB.dbname, doc_type="source", id=ID + ) + + for ID, repo in repos.items(): + mylinks = {} + if ID not in repodatas: + continue + repodata = repodatas[ID] + oID = ID + if indata.get("collapse"): + m = re.search(indata.get("collapse"), repodata["_source"]["sourceURL"]) + if m: + ID = m.group(1) + else: + ID = re.sub(r"^.+/", "", repodata["_source"]["sourceURL"]) + for xID, xrepo in repos.items(): + if xID in repodatas: + xrepodata = repodatas[xID] + if indata.get("collapse"): + m = re.search( + indata.get("collapse"), xrepodata["_source"]["sourceURL"] + ) + if m: + xID = m.group(1) + else: + xID = re.sub(r"^.+/", "", xrepodata["_source"]["sourceURL"]) + if xID != ID: + xlinks = [] + for author in xrepo: + if author in repo: + xlinks.append(author) + lname = "%s@%s" % (ID, xID) # Link name + rname = "%s@%s" % (xID, ID) # Reverse link name + if len(xlinks) >= min_links and not rname in repo_links: + mylinks[xID] = len(xlinks) + repo_links[lname] = repo_links.get(lname, 0) + len( + xlinks + ) # How many contributors in common between project A and B? + if repo_links[lname] > max_shared: + max_shared = repo_links[lname] + if ID not in repo_notoriety: + repo_notoriety[ID] = set() + repo_notoriety[ID].update( + mylinks.keys() + ) # How many projects is this repo connected to? + + if ID not in repo_authors: + repo_authors[ID] = set() + repo_authors[ID].update(repo) # How many projects is this repo connected to? + + if ID != oID: + repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] + if repo_commits[ID] > max_commits: + max_commits = repo_commits[ + ID + ] # Used for calculating max link thickness + if len(repo_notoriety[ID]) > max_links: + max_links = len(repo_notoriety[ID]) + if len(repo_authors[ID]) > max_authors: + max_authors = len( + repo_authors[ID] + ) # Used for calculating max sphere size in charts + + # Now, pull it all together! + nodes = [] + links = [] + existing_repos = [] + for source_id in repo_notoriety: + lsize = 0 + for k in repo_links: + fr, to = k.split("@") + if source_id in (fr, to): + lsize += 1 + asize = len(repo_authors[source_id]) + doc = { + "id": source_id, + "name": source_id, + "issues": repo_commits[source_id], + "authors": asize, + "links": lsize, + "size": max(5, (1 - abs(math.log10(asize / max_authors))) * 45), + "tooltip": "%u connections, %u contributors, %u issues" + % (lsize, asize, repo_commits[source_id]), + } + nodes.append(doc) + existing_repos.append(source_id) + + for k, s in repo_links.items(): + size = s + fr, to = k.split("@") + if fr in existing_repos and to in existing_repos: + doc = { + "source": fr, + "target": to, + "value": max(1, (size / max_shared) * 8), + "name": "%s ↔ %s" % (fr, to), + "tooltip": "%u contributors in common" % size, + } + links.append(doc) + + JSON_OUT = { + "maxLinks": max_links, + "maxShared": max_shared, + "widgetType": {"chartType": "link"}, # Recommendation for the UI + "links": links, + "nodes": nodes, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/retention.py b/kibble/api/pages/issue/retention.py new file mode 100644 index 00000000..4d473f2c --- /dev/null +++ b/kibble/api/pages/issue/retention.py @@ -0,0 +1,239 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/retention +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Factor' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows retention metrics for a set of issue trackers over a given period +# of time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Factor' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows retention metrics for a set of issue trackers over a given period +# of time +# +######################################################################## + + +""" +This is the code contributor retention factor renderer for Kibble +""" + +import datetime +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + hl = indata.get( + "span", 12 + ) # By default, we define a contributor as active if having committer in the past year + tnow = datetime.date.today() + nm = tnow.month - (tnow.month % 3) + ny = tnow.year + cy = ny + ts = [] + + if nm < 1: + nm += 12 + + people_seen = {} + active_people = {} + all_people = {} + found_something = False + + ny = 1970 + while ny < cy or (ny == cy and (nm + 3) <= tnow.month): + d = datetime.date(ny, nm, 1) + t = time.mktime(d.timetuple()) + nm += 3 + if nm > 12: + nm -= 12 + ny = ny + 1 + if ny == cy and nm > tnow.month: + break + d = datetime.date(ny, nm, 1) + tf = time.mktime(d.timetuple()) + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": t, "to": tf}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + + globcount = res["count"] + if not globcount and not found_something: + continue + found_something = True + + # Get top 1000 committers this period + query["aggs"] = { + "by_o": {"terms": {"field": "issueCloser", "size": 50000}}, + "by_c": {"terms": {"field": "issueCreator", "size": 50000}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + retained = 0 + added = 0 + lost = 0 + + thisPeriod = [] + for bucket in res["aggregations"]["by_o"]["buckets"]: + who = bucket["key"] + thisPeriod.append(who) + if who not in people_seen: + people_seen[who] = tf + added += 1 + active_people[who] = tf + if who not in all_people: + all_people[who] = tf + + for bucket in res["aggregations"]["by_c"]["buckets"]: + who = bucket["key"] + thisPeriod.append(who) + if who not in people_seen: + people_seen[who] = tf + added += 1 + if who not in active_people: + active_people[who] = tf + if who not in all_people: + all_people[who] = tf + + prune = [] + for k, v in active_people.items(): + if v < (t - (hl * 30.45 * 86400)): + prune.append(k) + lost += 1 + + for who in prune: + del active_people[who] + del people_seen[who] + retained = len(active_people) - added + ts.append( + { + "date": tf, + "People who (re)joined": added, + "People who quit": lost, + "People retained": retained, + "Active people": added + retained, + } + ) + + groups = [ + ["More than 5 years", (5 * 365 * 86400) + 1], + ["2 - 5 years", (2 * 365 * 86400) + 1], + ["1 - 2 years", (365 * 86400)], + ["Less than a year", 1], + ] + + counts = {} + totExp = 0 + for person, age in active_people.items(): + totExp += time.time() - all_people[person] + for el in sorted(groups, key=lambda x: x[1], reverse=True): + if all_people[person] <= time.time() - el[1]: + counts[el[0]] = counts.get(el[0], 0) + 1 + break + avgyr = (totExp / (86400 * 365)) / max(len(active_people), 1) + + ts = sorted(ts, key=lambda x: x["date"]) + + avgm = "" + yr = int(avgyr) + ym = round((avgyr - yr) * 12) + if yr >= 1: + avgm += "%u year%s" % (yr, "s" if yr != 1 else "") + if ym > 0: + avgm += "%s%u month%s" % (", " if yr > 0 else "", ym, "s" if ym != 1 else "") + JSON_OUT = { + "text": "This shows Contributor retention as calculated over a %u month timespan. The average experience of currently active people is %s." + % (hl, avgm), + "timeseries": ts, + "counts": counts, + "averageYears": avgyr, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/top-count.py b/kibble/api/pages/issue/top-count.py new file mode 100644 index 00000000..9fa39f65 --- /dev/null +++ b/kibble/api/pages/issue/top-count.py @@ -0,0 +1,150 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/top-count +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 issue trackers by issues +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows top 25 issue trackers by issues +# +######################################################################## + + +""" +This is the TopN repos by commits list renderer for Kibble +""" + +import json +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get top 25 committers this period + query["aggs"] = {"by_repo": {"terms": {"field": "sourceID", "size": 5000}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + + toprepos = [] + for bucket in res["aggregations"]["by_repo"]["buckets"]: + ID = bucket["key"] + if session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id=ID): + it = session.DB.ES.get(index=session.DB.dbname, doc_type="source", id=ID)[ + "_source" + ] + repo = re.sub(r".+/([^/]+)$", r"\1", it["sourceURL"]) + count = bucket["doc_count"] + toprepos.append([repo, count]) + + toprepos = sorted(toprepos, key=lambda x: x[1], reverse=True) + top = toprepos[0:24] + if len(toprepos) > 25: + count = 0 + for repo in toprepos[25:]: + count += repo[1] + top.append(["Other trackers", count]) + + tophash = {} + for v in top: + tophash[v[0]] = v[1] + + JSON_OUT = {"counts": tophash, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/top.py b/kibble/api/pages/issue/top.py new file mode 100644 index 00000000..6f83e85f --- /dev/null +++ b/kibble/api/pages/issue/top.py @@ -0,0 +1,141 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/top +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/TopList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N issues by interactions +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/TopList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N issues by interactions +# +######################################################################## + + +""" +This is the issue actors stats page for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + }, + "sort": {"comments": "desc"}, + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=25, body=query + ) + top = [] + for bucket in res["hits"]["hits"]: + doc = bucket["_source"] + doc["source"] = doc.get("url", "#") + doc["name"] = doc.get("key", "unknown") + doc["subject"] = doc.get("title") + doc["count"] = doc.get("comments", 0) + top.append(doc) + + JSON_OUT = { + "topN": {"denoter": "interactions", "icon": "bug", "items": top}, + "okay": True, + "responseTime": time.time() - now, + "widgetType": {"chartType": "line"}, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/issue/trends.py b/kibble/api/pages/issue/trends.py new file mode 100644 index 00000000..f43971ee --- /dev/null +++ b/kibble/api/pages/issue/trends.py @@ -0,0 +1,263 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/issue/trends +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of issue trackers over a given period of time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows trend data for a set of issue trackers over a given period of time +# +######################################################################## + + +""" +This is the Issue trends renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + dOrg = session.user["defaultOrganisation"] or "apache" + + #################################################################### + # We start by doing all the queries for THIS period. # + # Then we reset the query, and change date to yonder-->from # + # and rerun the same queries. # + #################################################################### + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of issues created, this period + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_created = res["count"] + + # Get number of open/close, this period + query["aggs"] = {"opener": {"cardinality": {"field": "issueCreator"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + no_creators = res["aggregations"]["opener"]["value"] + + # CLOSERS + + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of issues created, this period + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_closed = res["count"] + + # Get number of open/close, this period + query["aggs"] = {"closer": {"cardinality": {"field": "issueCloser"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + no_closers = res["aggregations"]["closer"]["value"] + + #################################################################### + # Change to PRIOR SPAN # + #################################################################### + query = { + "query": { + "bool": { + "must": [ + {"range": {"created": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + if viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of issues, this period + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_created_before = res["count"] + + # Get number of committers, this period + query["aggs"] = {"opener": {"cardinality": {"field": "issueCreator"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + no_creators_before = res["aggregations"]["opener"]["value"] + + # CLOSERS + + query = { + "query": { + "bool": { + "must": [ + {"range": {"closed": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + if viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [ + {"term": {"issueCreator": indata.get("email")}}, + {"term": {"issueCloser": indata.get("email")}}, + ] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of issues created, this period + res = session.DB.ES.count(index=session.DB.dbname, doc_type="issue", body=query) + no_issues_closed_before = res["count"] + + # Get number of open/close, this period + query["aggs"] = {"closer": {"cardinality": {"field": "issueCloser"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="issue", size=0, body=query + ) + no_closers_before = res["aggregations"]["closer"]["value"] + + trends = { + "created": { + "before": no_issues_created_before, + "after": no_issues_created, + "title": "Issues opened this period", + }, + "authors": { + "before": no_creators_before, + "after": no_creators, + "title": "People opening issues this period", + }, + "closed": { + "before": no_issues_closed_before, + "after": no_issues_closed, + "title": "Issues closed this period", + }, + "closers": { + "before": no_closers_before, + "after": no_closers, + "title": "People closing issues this period", + }, + } + + JSON_OUT = {"trends": trends, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/keyphrases.py b/kibble/api/pages/mail/keyphrases.py new file mode 100644 index 00000000..0e653254 --- /dev/null +++ b/kibble/api/pages/mail/keyphrases.py @@ -0,0 +1,131 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/keyphrases +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/PhraseList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the common key phrases in use on one or more mailing lists +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/PhraseList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the common key phrases in use on one or more mailing lists +# +######################################################################## + + +""" +This is the common key phrases renderer for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + }, + "aggs": {"kpe": {"terms": {"field": "kpe.keyword", "size": 50}}}, + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + topN = [] + for bucket in res["aggregations"]["kpe"]["buckets"]: + topN.append({"phrase": bucket["key"], "count": bucket["doc_count"]}) + + JSON_OUT = { + "widgetType": {"chartType": "bar"}, + "phrases": topN, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/map.py b/kibble/api/pages/mail/map.py new file mode 100644 index 00000000..a8fe6137 --- /dev/null +++ b/kibble/api/pages/mail/map.py @@ -0,0 +1,325 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/map +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of email author reply mappings +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of email author reply mappings +# +######################################################################## + + +""" +This is the committer relationship list renderer for Kibble +""" + +import copy +import hashlib +import json +import math +import re +import time + +badBots = r"(JIRA|Hudson|jira|jenkins|GitHub|git@|dev@|bugzilla|gerrit)" + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + span = dateTo - dateFrom + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("search"): + query["query"]["bool"]["must"].append( + {"regexp": {"subject": indata.get("search")}} + ) + + if indata.get("email"): + query["query"]["bool"]["minimum_should_match"] = 1 + query["query"]["bool"]["should"] = [ + {"term": {"replyto.keyword": indata.get("email")}}, + {"term": {"sender": indata.get("email")}}, + ] + + # Get number of commits, this period, per repo + query["aggs"] = { + "per_ml": { + "terms": { + "field": "replyto.keyword" if not indata.get("author") else "sender", + "size": 150, + } + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + repos = {} + repo_commits = {} + authorlinks = {} + max_emails = 0 + max_links = 0 + max_shared = 0 + max_authors = 0 + minLinks = indata.get("links", 1) + + if indata.get("email"): + del query["query"]["bool"]["should"] + del query["query"]["bool"]["minimum_should_match"] + + # For each repo, count commits and gather data on authors + for doc in res["aggregations"]["per_ml"]["buckets"]: + source_id = doc["key"] + emails = doc["doc_count"] + if re.search(badBots, source_id): # No bots + continue + if emails > (span / 86400) * 4: # More than 4/day and we consider you a bot! + continue + + # Gather the unique authors/committers + query["aggs"] = { + "per_ml": { + "terms": { + "field": "sender" + if not indata.get("author") + else "replyto.keyword", + "size": 5000, + } + } + } + xquery = copy.deepcopy(query) + + xquery["query"]["bool"]["must"].append( + { + "term": { + "replyto.keyword" + if not indata.get("author") + else "sender": source_id + } + } + ) + xres = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=xquery + ) + authors = [] + for person in xres["aggregations"]["per_ml"]["buckets"]: + pk = person["key"] + authors.append(pk) + if emails > max_emails: + max_emails = emails + repos[source_id] = authors + repo_commits[source_id] = emails + + # Now, figure out which repos share the same contributors + repo_links = {} + repo_notoriety = {} + repodatas = {} + repo_authors = {} + + # Grab data of all sources + for ID, repo in repos.items(): + mylinks = {} + hID = hashlib.sha1( + ("%s%s" % (dOrg, ID)).encode("ascii", errors="replace") + ).hexdigest() + if not session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=hID): + continue + repodatas[ID] = session.DB.ES.get( + index=session.DB.dbname, doc_type="person", id=hID + ) + + for ID, repo in repos.items(): + mylinks = {} + if ID not in repodatas: + continue + repodata = repodatas[ID] + oID = ID + if indata.get("collapse"): + m = re.search(indata.get("collapse"), repodata["_source"]["email"]) + if m: + ID = m.group(1) + xlinks = [] + for xID, xrepo in repos.items(): + if xID in repodatas: + xrepodata = repodatas[xID] + if indata.get("collapse"): + m = re.search(indata.get("collapse"), xrepodata["_source"]["email"]) + if m: + xID = m.group(1) + if xID != ID: + + if ID in xrepo: + xlinks.append(xID) + lname = "%s||%s" % (ID, xID) # Link name + rname = "%s||%s" % (xID, ID) # Reverse link name + if ( + len(xlinks) > 0 + and rname not in repo_links + and len(xlinks) >= minLinks + ): + mylinks[ID] = mylinks.get(ID, 0) + 1 + repo_links[lname] = repo_links.get(lname, 0) + len( + xlinks + ) # How many contributors in common between project A and B? + if repo_links[lname] > max_shared: + max_shared = repo_links[lname] + elif rname in repo_links: + repo_links[rname] = repo_links.get(rname, 0) + len(xlinks) + if ID not in repo_notoriety: + repo_notoriety[ID] = set() + repo_notoriety[ID].update( + mylinks.keys() + ) # How many projects is this repo connected to? + + if ID not in repo_authors: + repo_authors[ID] = set() + repo_authors[ID].update(repo) # How many projects is this repo connected to? + + if ID != oID: + repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] + if repo_commits[ID] > max_emails: + max_emails = repo_commits[ID] # Used for calculating max link thickness + if len(repo_notoriety[ID]) > max_links: + max_links = len(repo_notoriety[ID]) + if len(repo_authors[ID]) > max_authors: + max_authors = len( + repo_authors[ID] + ) # Used for calculating max sphere size in charts + + # Now, pull it all together! + nodes = [] + links = [] + existing_repos = [] + for source_id, ns in repo_notoriety.items(): + lsize = 0 + for k in repo_links: + fr, to = k.split("||") + if source_id in (fr, to): + lsize += 1 + asize = len(repo_authors[source_id]) + doc = { + "id": source_id, + "gravatar": hashlib.md5(source_id.lower().encode("utf-8")).hexdigest(), + "name": repodatas[source_id]["_source"].get("name", source_id), + "replies": repo_commits[source_id], + "authors": asize, + "links": lsize, + "size": max( + 5, (1 - abs(math.log10(repo_commits[source_id] / max_emails))) * 45 + ), + "tooltip": "%u connections, %u fellows, %u replies to" + % (lsize, asize, repo_commits[source_id]), + } + nodes.append(doc) + existing_repos.append(source_id) + + for k, s in repo_links.items(): + size = s + fr, to = k.split("||") + if fr in existing_repos and to in existing_repos: + doc = { + "source": fr, + "target": to, + "value": max(1, (size / max_shared) * 5), + "name": "%s ↔ %s" % (fr, to), + "tooltip": "%u topics exchanged" % size, + } + links.append(doc) + + JSON_OUT = { + "maxLinks": max_links, + "maxShared": max_shared, + "widgetType": {"chartType": "link"}, # Recommendation for the UI + "links": links, + "nodes": nodes, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/mood-timeseries.py b/kibble/api/pages/mail/mood-timeseries.py new file mode 100644 index 00000000..ff7153f1 --- /dev/null +++ b/kibble/api/pages/mail/mood-timeseries.py @@ -0,0 +1,150 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/mood-timeseries +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of the (analyzed) mood in emails as a timeseries +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of the (analyzed) mood in emails as a timeseries +# +######################################################################## + + +""" +This is the email mood timeseries renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + interval = indata.get("interval", "week") + + # Define moods we know of + moods_good = set(["trust", "joy", "confident", "positive"]) + moods_bad = set(["sadness", "anger", "disgust", "fear", "negative"]) + moods_neutral = set( + ["anticipation", "surprise", "tentative", "analytical", "neutral"] + ) + all_moods = set(moods_good | moods_bad | moods_neutral) + + # Fetch all sources for default org + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + {"exists": {"field": "mood"}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + emls = session.DB.ES.count(index=session.DB.dbname, doc_type="email", body=query)[ + "count" + ] + + query["aggs"] = { + "history": { + "date_histogram": {"field": "date", "interval": interval}, + "aggs": {}, + } + } + + # Add aggregations for moods + for mood in all_moods: + query["aggs"]["history"]["aggs"][mood] = {"sum": {"field": "mood.%s" % mood}} + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + timeseries = [] + + for tz in res["aggregations"]["history"]["buckets"]: + moods = {} + emls = tz["doc_count"] + for mood in all_moods: + moods[mood] = int(100 * tz.get(mood, {"value": 0})["value"] / max(1, emls)) + moods["date"] = int(tz["key"] / 1000) + timeseries.append(moods) + + JSON_OUT = {"timeseries": timeseries, "okay": True} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/mood.py b/kibble/api/pages/mail/mood.py new file mode 100644 index 00000000..710ad3f1 --- /dev/null +++ b/kibble/api/pages/mail/mood.py @@ -0,0 +1,224 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/mood +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of the (analyzed) mood in emails +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of the (analyzed) mood in emails +# +######################################################################## + + +""" +This is the email mood renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + # First, fetch the view if we have such a thing enabled + view_list = [] + if indata.get("view"): + view_list = session.getView(indata.get("view")) + if indata.get("subfilter"): + view_list = session.subFilter(indata.get("subfilter"), view=view_list) + + date_to = indata.get("to", int(time.time())) + date_from = indata.get( + "from", date_to - (86400 * 30 * 6) + ) # Default to a 6 month span + + # Define moods we know of + moods_good = {"trust", "joy", "confident", "positive"} + moods_bad = {"sadness", "anger", "disgust", "fear", "negative"} + moods_neutral = {"anticipation", "surprise", "tentative", "analytical", "neutral"} + all_moods = moods_good | moods_bad | moods_neutral + + # Start off with a query for the entire org (we want to compare) + d_org = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": date_from, "to": date_to}}}, + {"term": {"organisation": d_org}}, + {"exists": {"field": "mood"}}, + ] + } + } + } + + # Count all emails, for averaging scores + gemls = session.DB.ES.count(index=session.DB.dbname, doc_type="email", body=query)[ + "count" + ] + + # Add aggregations for moods + query["aggs"] = {} + for mood in all_moods: + query["aggs"][mood] = {"sum": {"field": "mood.%s" % mood}} + + global_mood_compiled = {} + mood_compiled = {} + txt = ( + "This chart shows the ten potential mood types as they average on the emails " + "in this period. A score of 100 means a sentiment is highly visible in most emails." + ) + gtxt = "This shows the overall estimated mood as a gauge from terrible to good." + # If we're comparing against all lists, first do a global query + # and compile moods overall + if indata.get("relative"): + txt = ( + "This chart shows the ten potential mood types on the selected lists " + "as they compare against all mailing lists in the database. A score of 100 " + "here means the sentiment conforms to averages across all lists." + ) + gtxt = "This shows the overall estimated mood compared to all lists, as a gauge from terrible to good." + global_moods = {} + + gres = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + for mood, el in gres["aggregations"].items(): + # If a mood is not present (iow sum is 0), remove it from the equation by setting to -1 + if el["value"] == 0: + el["value"] = -1 + global_moods[mood] = el["value"] + for k, v in global_moods.items(): + if v >= 0: + global_mood_compiled[k] = int((v / max(1, gemls)) * 100) + + # Now, if we have a view (or not distinguishing), ... + ss = False + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + ss = True + elif view_list: + query["query"]["bool"]["must"].append({"terms": {"sourceID": view_list}}) + ss = True + + # If we have a view enabled (and distinguish), compile local view against global view + # Else, just copy global as local + if ss or not indata.get("relative"): + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + del query["aggs"] # we have to remove these to do a count() + emls = session.DB.ES.count( + index=session.DB.dbname, doc_type="email", body=query + )["count"] + + moods = {} + + for mood, el in res["aggregations"].items(): + if el["value"] == 0: + el["value"] = -1 + moods[mood] = el["value"] + for k, v in moods.items(): + if v > 0: + mood_compiled[k] = int( + 100 + * int((v / max(1, emls)) * 100) + / max(1, global_mood_compiled.get(k, 100)) + ) + else: + mood_compiled = global_mood_compiled + + # If relative mode and a field is missing, assume 100 (norm) + if indata.get("relative"): + for M in all_moods: + if mood_compiled.get(M, 0) == 0: + mood_compiled[M] = 100 + + # Compile an overall happiness level + MAX = max(max(mood_compiled.values()), 1) + X = 100 if indata.get("relative") else 0 + bads = X + for B in moods_bad: + if mood_compiled.get(B) and mood_compiled[B] > X: + bads += mood_compiled[B] + + happ = 50 + + goods = X + for B in moods_good: + if mood_compiled.get(B) and mood_compiled[B] > X: + goods += mood_compiled[B] + MAX = max(MAX, bads, goods) + if bads > 0: + happ -= 50 * bads / MAX + if goods > 0: + happ += 50 * goods / MAX + swingometer = max(0, min(100, happ)) + + # JSON out! + json_out = { + "relativeMode": True, + "text": txt, + "counts": mood_compiled, + "okay": True, + "gauge": {"key": "Happiness", "value": swingometer, "text": gtxt}, + } + yield json.dumps(json_out) diff --git a/kibble/api/pages/mail/pony-timeseries.py b/kibble/api/pages/mail/pony-timeseries.py new file mode 100644 index 00000000..20202fa5 --- /dev/null +++ b/kibble/api/pages/mail/pony-timeseries.py @@ -0,0 +1,179 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/pony-timeseries +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of Pony Factor over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows timeseries of Pony Factor over time +# +######################################################################## + + +""" +This is the pony factor renderer for Kibble +""" + +import datetime +import json +import time + +import dateutil.relativedelta + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + hl = indata.get("span", 24) + tnow = datetime.date.today() + nm = tnow.month - (tnow.month % 3) + ny = tnow.year + ts = [] + + if nm < 1: + nm += 12 + ny = ny - 1 + + while ny > 1970: + d = datetime.date(ny, nm, 1) + t = time.mktime(d.timetuple()) + d = d - dateutil.relativedelta.relativedelta(months=hl) + tf = time.mktime(d.timetuple()) + nm -= 3 + if nm < 1: + nm += 12 + ny = ny - 1 + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": tf, "to": t}}}, + {"term": {"organisation": dOrg}}, + ], + "must_not": [{"match": {"sourceURL": "commits*"}}], + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get an initial count of commits + res = session.DB.ES.count(index=session.DB.dbname, doc_type="email", body=query) + + globcount = res["count"] + if globcount == 0: + break + + # Get top 25 committers this period + query["aggs"] = {"by_sender": {"terms": {"field": "sender", "size": 2500}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + # PF for authors + pf_author = 0 + pf_author_count = 0 + cpf = {} + for bucket in res["aggregations"]["by_sender"]["buckets"]: + count = bucket["doc_count"] + # Assume anyone sending > 10 emails per day is a bot (or a commit list)! + if count > (10 * 365 * hl): + globcount -= count + continue + pf_author += 1 + pf_author_count += count + if "@" in bucket["key"]: + mldom = bucket["key"].lower().split("@")[-1] + cpf[mldom] = True + if pf_author_count > int(globcount / 2): + break + ts.append( + { + "date": t, + "Pony Factor (authors)": pf_author, + "Meta-Pony Factor": len(cpf), + } + ) + + ts = sorted(ts, key=lambda x: x["date"]) + + JSON_OUT = { + "text": "This shows Pony Factors as calculated over a %u month timespan. Authorship is a measure of the people it takes to make up the bulk of email traffic, and meta-pony is an estimation of how many organisations/companies are involved." + % hl, + "timeseries": ts, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/relationships.py b/kibble/api/pages/mail/relationships.py new file mode 100644 index 00000000..0dc022bb --- /dev/null +++ b/kibble/api/pages/mail/relationships.py @@ -0,0 +1,274 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/relationships +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of contributor relationships between mailing lists +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a breakdown of contributor relationships between mailing lists +# +######################################################################## + + +""" +This is the committer relationship list renderer for Kibble +""" + +import copy +import json +import math +import re +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append({"term": {"sender": indata.get("email")}}) + + # Get number of commits, this period, per repo + query["aggs"] = {"per_ml": {"terms": {"field": "sourceID", "size": 10000}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + repos = {} + repo_commits = {} + authorlinks = {} + max_emails = 0 + max_links = 0 + max_shared = 0 + max_authors = 0 + minLinks = indata.get("links", 1) + + # For each repo, count commits and gather data on authors + for doc in res["aggregations"]["per_ml"]["buckets"]: + source_id = doc["key"] + emails = doc["doc_count"] + + # Gather the unique authors/committers + query["aggs"] = {"per_ml": {"terms": {"field": "sender", "size": 10000}}} + xquery = copy.deepcopy(query) + xquery["query"]["bool"]["must"].append({"term": {"sourceID": source_id}}) + xres = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=xquery + ) + authors = [] + for person in xres["aggregations"]["per_ml"]["buckets"]: + authors.append(person["key"]) + if emails > max_emails: + max_emails = emails + repos[source_id] = authors + repo_commits[source_id] = emails + + # Now, figure out which repos share the same contributors + repo_links = {} + repo_notoriety = {} + repodatas = {} + repo_authors = {} + + # Grab data of all sources + for ID, repo in repos.items(): + mylinks = {} + if not session.DB.ES.exists(index=session.DB.dbname, doc_type="source", id=ID): + continue + repodatas[ID] = session.DB.ES.get( + index=session.DB.dbname, doc_type="source", id=ID + ) + + for ID, repo in repos.items(): + mylinks = {} + if ID not in repodatas: + continue + repodata = repodatas[ID] + oID = ID + if indata.get("collapse"): + m = re.search(indata.get("collapse"), repodata["_source"]["sourceURL"]) + if m: + ID = m.group(1) + else: + ID = re.sub(r"^.+/(?:list\.html\?)?", "", repodata["_source"]["sourceURL"]) + for xID, xrepo in repos.items(): + if xID in repodatas: + xrepodata = repodatas[xID] + if indata.get("collapse"): + m = re.search( + indata.get("collapse"), xrepodata["_source"]["sourceURL"] + ) + if m: + xID = m.group(1) + else: + xID = re.sub( + r"^.+/(?:list\.html\?)?", "", xrepodata["_source"]["sourceURL"] + ) + if xID != ID: + xlinks = [] + for author in xrepo: + if author in repo: + xlinks.append(author) + lname = "%s||%s" % (ID, xID) # Link name + rname = "%s||%s" % (xID, ID) # Reverse link name + if len(xlinks) >= minLinks and not rname in repo_links: + mylinks[xID] = len(xlinks) + repo_links[lname] = repo_links.get(lname, 0) + len( + xlinks + ) # How many contributors in common between project A and B? + if repo_links[lname] > max_shared: + max_shared = repo_links[lname] + if ID not in repo_notoriety: + repo_notoriety[ID] = set() + repo_notoriety[ID].update( + mylinks.keys() + ) # How many projects is this repo connected to? + + if ID not in repo_authors: + repo_authors[ID] = set() + repo_authors[ID].update(repo) # How many projects is this repo connected to? + + if ID != oID: + repo_commits[ID] = repo_commits.get(ID, 0) + repo_commits[oID] + if repo_commits[ID] > max_emails: + max_emails = repo_commits[ID] # Used for calculating max link thickness + if len(repo_notoriety[ID]) > max_links: + max_links = len(repo_notoriety[ID]) + if len(repo_authors[ID]) > max_authors: + max_authors = len( + repo_authors[ID] + ) # Used for calculating max sphere size in charts + + # Now, pull it all together! + nodes = [] + links = [] + existing_repos = [] + for source_id in repo_notoriety: + lsize = 0 + for k in repo_links: + fr, to = k.split("||") + if source_id in (fr, to): + lsize += 1 + asize = len(repo_authors[source_id]) + doc = { + "id": source_id, + "name": source_id, + "emails": repo_commits[source_id], + "authors": asize, + "links": lsize, + "size": max(5, (1 - abs(math.log10(asize / max_authors))) * 45), + "tooltip": "%u connections, %u contributors, %u emails" + % (lsize, asize, repo_commits[source_id]), + } + nodes.append(doc) + existing_repos.append(source_id) + + for k, s in repo_links.items(): + size = s + fr, to = k.split("||") + if fr in existing_repos and to in existing_repos: + doc = { + "source": fr, + "target": to, + "value": max(1, (size / max_shared) * 8), + "name": "%s ↔ %s" % (fr, to), + "tooltip": "%u contributors in common" % size, + } + links.append(doc) + + JSON_OUT = { + "maxLinks": max_links, + "maxShared": max_shared, + "widgetType": {"chartType": "link"}, # Recommendation for the UI + "links": links, + "nodes": nodes, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/api/pages/mail/retention.py b/kibble/api/pages/mail/retention.py similarity index 50% rename from api/pages/mail/retention.py rename to kibble/api/pages/mail/retention.py index 6734da11..65939ee6 100644 --- a/api/pages/mail/retention.py +++ b/kibble/api/pages/mail/retention.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ######################################################################## # OPENAPI-URI: /api/mail/retention ######################################################################## @@ -58,56 +59,54 @@ # - cookieAuth: [] # summary: Shows retention metrics for a set of mailing lists over a given period # of time -# +# ######################################################################## - - - """ This is the code contributor retention factor renderer for Kibble """ +import datetime import json import time -import re -import datetime + def run(API, environ, indata, session): - + # We need to be logged in for this! if not session.user: raise API.exception(403, "You must be logged in to use this API endpoint! %s") - + now = time.time() - + # First, fetch the view if we have such a thing enabled viewList = [] - if indata.get('view'): - viewList = session.getView(indata.get('view')) - if indata.get('subfilter'): - viewList = session.subFilter(indata.get('subfilter'), view = viewList) - - - hl = indata.get('span', 12) # By default, we define a contributor as active if having committer in the past year + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + hl = indata.get( + "span", 12 + ) # By default, we define a contributor as active if having committer in the past year tnow = datetime.date.today() nm = tnow.month - (tnow.month % 3) ny = tnow.year cy = ny ts = [] - + if nm < 1: nm += 12 ny = ny - 1 - + peopleSeen = {} activePeople = {} allPeople = {} - + ny = 1970 FoundSomething = False - while ny < cy or (ny == cy and (nm+3) <= tnow.month): + while ny < cy or (ny == cy and (nm + 3) <= tnow.month): d = datetime.date(ny, nm, 1) t = time.mktime(d.timetuple()) nm += 3 @@ -118,73 +117,48 @@ def run(API, environ, indata, session): break d = datetime.date(ny, nm, 1) tf = time.mktime(d.timetuple()) - - + #################################################################### #################################################################### - dOrg = session.user['defaultOrganisation'] or "apache" + dOrg = session.user["defaultOrganisation"] or "apache" query = { - 'query': { - 'bool': { - 'must': [ - {'range': - { - 'ts': { - 'from': t, - 'to': tf - } - } - }, - { - 'term': { - 'organisation': dOrg - } - } - ] - } - } + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": t, "to": tf}}}, + {"term": {"organisation": dOrg}}, + ] } + } + } # Source-specific or view-specific?? - if indata.get('source'): - query['query']['bool']['must'].append({'term': {'sourceID': indata.get('source')}}) + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) elif viewList: - query['query']['bool']['must'].append({'terms': {'sourceID': viewList}}) - + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + # Get an initial count of commits - res = session.DB.ES.count( - index=session.DB.dbname, - doc_type="email", - body = query - ) - - globcount = res['count'] + res = session.DB.ES.count(index=session.DB.dbname, doc_type="email", body=query) + + globcount = res["count"] if globcount == 0 and not FoundSomething: continue FoundSomething = True # Get top 1000 committers this period - query['aggs'] = { - 'by_author': { - 'terms': { - 'field': 'sender', - 'size': 200000 - } - } - } + query["aggs"] = {"by_author": {"terms": {"field": "sender", "size": 200000}}} res = session.DB.ES.search( - index=session.DB.dbname, - doc_type="email", - size = 0, - body = query - ) - - + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + retained = 0 added = 0 lost = 0 - + thisPeriod = [] - for bucket in res['aggregations']['by_author']['buckets']: - who = bucket['key'] + for bucket in res["aggregations"]["by_author"]["buckets"]: + who = bucket["key"] thisPeriod.append(who) if who not in peopleSeen: peopleSeen[who] = tf @@ -192,58 +166,61 @@ def run(API, environ, indata, session): activePeople[who] = tf if who not in allPeople: allPeople[who] = tf - + prune = [] for k, v in activePeople.items(): - if v < (t - (hl*30.45*86400)): + if v < (t - (hl * 30.45 * 86400)): prune.append(k) lost += 1 - + for who in prune: del activePeople[who] del peopleSeen[who] retained = len(activePeople) - added - - ts.append({ - 'date': tf, - 'People who (re)joined': added, - 'People who quit': lost, - 'People retained': retained, - 'Active people': added + retained - }) - + + ts.append( + { + "date": tf, + "People who (re)joined": added, + "People who quit": lost, + "People retained": retained, + "Active people": added + retained, + } + ) + groups = [ - ['More than 5 years', (5*365*86400)+1], - ['2 - 5 years', (2*365*86400)+1], - ['1 - 2 years', (365*86400)], - ['Less than a year', 1] + ["More than 5 years", (5 * 365 * 86400) + 1], + ["2 - 5 years", (2 * 365 * 86400) + 1], + ["1 - 2 years", (365 * 86400)], + ["Less than a year", 1], ] - + counts = {} totExp = 0 for person, age in activePeople.items(): totExp += time.time() - allPeople[person] - for el in sorted(groups, key = lambda x: x[1], reverse = True): + for el in sorted(groups, key=lambda x: x[1], reverse=True): if allPeople[person] <= time.time() - el[1]: counts[el[0]] = counts.get(el[0], 0) + 1 break - avgyr = (totExp / (86400*365)) / max(len(activePeople),1) - - ts = sorted(ts, key = lambda x: x['date']) - + avgyr = (totExp / (86400 * 365)) / max(len(activePeople), 1) + + ts = sorted(ts, key=lambda x: x["date"]) + avgm = "" yr = int(avgyr) - ym = round((avgyr-yr)*12) + ym = round((avgyr - yr) * 12) if yr >= 1: avgm += "%u year%s" % (yr, "s" if yr != 1 else "") if ym > 0: avgm += "%s%u month%s" % (", " if yr > 0 else "", ym, "s" if ym != 1 else "") JSON_OUT = { - 'text': "This shows Contributor retention as calculated over a %u month timespan. The average experience of currently active people is %s." % (hl, avgm), - 'timeseries': ts, - 'counts': counts, - 'averageYears': avgyr, - 'okay': True, - 'responseTime': time.time() - now, + "text": "This shows Contributor retention as calculated over a %u month timespan. The average experience of currently active people is %s." + % (hl, avgm), + "timeseries": ts, + "counts": counts, + "averageYears": avgyr, + "okay": True, + "responseTime": time.time() - now, } yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/timeseries-single.py b/kibble/api/pages/mail/timeseries-single.py new file mode 100644 index 00000000..a1bbb3b9 --- /dev/null +++ b/kibble/api/pages/mail/timeseries-single.py @@ -0,0 +1,140 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/timeseries-single +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows email sent over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows email sent over time +# +######################################################################## + + +""" +This is the email-only timeseries renderer for Kibble +unlike timeseries.py, this only shows mail sent, not topics or authors. +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [{"term": {"sender": indata.get("email")}}] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of committers, this period + query["aggs"] = { + "timeseries": {"date_histogram": {"field": "date", "interval": interval}} + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + timeseries = [] + for bucket in res["aggregations"]["timeseries"]["buckets"]: + ts = int(bucket["key"] / 1000) + timeseries.append({"date": ts, "emails": bucket["doc_count"]}) + + JSON_OUT = { + "widgetType": {"chartType": "bar"}, # Recommendation for the UI + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/timeseries.py b/kibble/api/pages/mail/timeseries.py new file mode 100644 index 00000000..d90ae655 --- /dev/null +++ b/kibble/api/pages/mail/timeseries.py @@ -0,0 +1,170 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/timeseries +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows email sent over time +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Timeseries' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows email sent over time +# +######################################################################## + + +""" +This is the email timeseries renderer for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + which = "committer_email" + role = "committer" + if indata.get("author", False): + which = "author_email" + role = "author" + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d 00:00:00", time.gmtime(dateFrom) + ), + "to": time.strftime( + "%Y/%m/%d 23:59:59", time.gmtime(dateTo) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["should"] = [{"term": {"sender": indata.get("email")}}] + query["query"]["bool"]["minimum_should_match"] = 1 + + # Get number of committers, this period + query["aggs"] = { + "timeseries": { + "date_histogram": {"field": "date", "interval": interval}, + "aggs": { + "email": {"sum": {"field": "emails"}}, + "topics": {"sum": {"field": "topics"}}, + "authors": {"sum": {"field": "authors"}}, + }, + } + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="mailstats", size=0, body=query + ) + + timeseries = [] + for bucket in res["aggregations"]["timeseries"]["buckets"]: + ts = int(bucket["key"] / 1000) + timeseries.append( + { + "date": ts, + "emails": bucket["email"]["value"], + "topics": bucket["topics"]["value"], + "authors": bucket["authors"]["value"], + } + ) + + JSON_OUT = { + "widgetType": {"chartType": "bar"}, # Recommendation for the UI + "timeseries": timeseries, + "interval": interval, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/top-authors.py b/kibble/api/pages/mail/top-authors.py new file mode 100644 index 00000000..cad69b8c --- /dev/null +++ b/kibble/api/pages/mail/top-authors.py @@ -0,0 +1,154 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/top-authors +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of email authors +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of email authors +# +######################################################################## + + +""" +This is the TopN committers list renderer for Kibble +""" + +import hashlib +import json +import re +import time + +ROBITS = r"(git|jira|jenkins|gerrit)@" + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Get top 25 committers this period + query["aggs"] = {"authors": {"terms": {"field": "sender", "size": 30}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + + people = {} + for bucket in res["aggregations"]["authors"]["buckets"]: + email = bucket["key"] + # By default, we want to see humans, not bots on this list! + if re.match(ROBITS, email): + continue + count = bucket["doc_count"] + sha = hashlib.sha1(("%s%s" % (dOrg, email)).encode("utf-8")).hexdigest() + if session.DB.ES.exists(index=session.DB.dbname, doc_type="person", id=sha): + pres = session.DB.ES.get(index=session.DB.dbname, doc_type="person", id=sha) + person = pres["_source"] + person["name"] = person.get("name", "unknown") + people[email] = person + people[email]["gravatar"] = hashlib.md5( + person.get("email", "unknown").encode("utf-8") + ).hexdigest() + people[email]["count"] = count + + topN = [] + for email, person in people.items(): + topN.append(person) + topN = sorted(topN, key=lambda x: x["count"], reverse=True) + + JSON_OUT = { + "topN": {"denoter": "emails", "items": topN}, + "sorted": people, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/top-topics.py b/kibble/api/pages/mail/top-topics.py new file mode 100644 index 00000000..456a6a8c --- /dev/null +++ b/kibble/api/pages/mail/top-topics.py @@ -0,0 +1,136 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/top-topics +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of email authors +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/CommitterList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows the top N of email authors +# +######################################################################## + + +""" +This is the TopN committers list renderer for Kibble +""" + + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + + interval = indata.get("interval", "month") + + #################################################################### + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"ts": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + }, + "sort": [{"emails": "desc"}], + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="mailtop", size=25, body=query + ) + + topN = [] + for bucket in res["hits"]["hits"]: + topN.append( + { + "source": bucket["_source"]["sourceURL"], + "name": bucket["_source"]["subject"], + "count": bucket["_source"]["emails"], + } + ) + + JSON_OUT = { + "topN": {"denoter": "emails", "items": topN, "icon": "envelope"}, + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/mail/trends.py b/kibble/api/pages/mail/trends.py new file mode 100644 index 00000000..3fa18325 --- /dev/null +++ b/kibble/api/pages/mail/trends.py @@ -0,0 +1,288 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/mail/trends +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a quick email trend summary of the past 6 months for your org +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a quick email trend summary of the past 6 months for your org +# +######################################################################## + + +""" +This is the Email trends renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + dOrg = session.user["defaultOrganisation"] or "apache" + + #################################################################### + # We start by doing all the queries for THIS period. # + # Then we reset the query, and change date to yonder-->from # + # and rerun the same queries. # + #################################################################### + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateFrom) + ), + "to": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateTo) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append({"term": {"sender": indata.get("email")}}) + + # Get number of threads and emails, this period + query["aggs"] = { + "topics": {"sum": {"field": "topics"}}, + "emails": {"sum": {"field": "emails"}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="mailstats", size=0, body=query + ) + no_topics = res["aggregations"]["topics"]["value"] + no_emails = res["aggregations"]["emails"]["value"] + + # Authors + + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateFrom) + ), + "to": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateTo) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append({"term": {"sender": indata.get("email")}}) + + # Get number of authors, this period + query["aggs"] = {"authors": {"cardinality": {"field": "sender"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + no_authors = res["aggregations"]["authors"]["value"] + + #################################################################### + # Change to PRIOR SPAN # + #################################################################### + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateYonder) + ), + "to": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateFrom - 1) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append({"term": {"sender": indata.get("email")}}) + + # Get number of threads and emails, this period + query["aggs"] = { + "topics": {"sum": {"field": "topics"}}, + "emails": {"sum": {"field": "emails"}}, + } + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="mailstats", size=0, body=query + ) + no_topics_before = res["aggregations"]["topics"]["value"] + no_emails_before = res["aggregations"]["emails"]["value"] + + # Authors + + query = { + "query": { + "bool": { + "must": [ + { + "range": { + "date": { + "from": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateYonder) + ), + "to": time.strftime( + "%Y/%m/%d %H:%M:%S", time.localtime(dateFrom - 1) + ), + } + } + }, + {"term": {"organisation": dOrg}}, + ] + } + } + } + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + if indata.get("email"): + query["query"]["bool"]["must"].append({"term": {"sender": indata.get("email")}}) + + # Get number of authors, this period + query["aggs"] = {"authors": {"cardinality": {"field": "sender"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="email", size=0, body=query + ) + no_authors_before = res["aggregations"]["authors"]["value"] + + trends = { + "authors": { + "before": no_authors_before, + "after": no_authors, + "title": "People sending email this period", + }, + "topics": { + "before": no_topics_before, + "after": no_topics, + "title": "Topics discussed this period", + }, + "email": { + "before": no_emails_before, + "after": no_emails, + "title": "Emails sent this period", + }, + } + + JSON_OUT = {"trends": trends, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/org/contributors.py b/kibble/api/pages/org/contributors.py new file mode 100644 index 00000000..0a21d5db --- /dev/null +++ b/kibble/api/pages/org/contributors.py @@ -0,0 +1,150 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/org/contributors +######################################################################## +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/contributorList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows contributors for the entire org or matching filters. +# +######################################################################## + + +""" +This is the contributor list renderer for Kibble +""" + +import hashlib +import json +import time + +cached_people = {} # Store people we know, so we don't have to fetch them again. + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + # Fetch all contributors for the org + dOrg = session.user["defaultOrganisation"] or "apache" + query = {"query": {"bool": {"must": [{"term": {"organisation": dOrg}}]}}} + + # Source-specific or view-specific?? + if indata.get("source"): + query["query"]["bool"]["must"].append( + {"term": {"sourceID": indata.get("source")}} + ) + elif viewList: + query["query"]["bool"]["must"].append({"terms": {"sourceID": viewList}}) + + # Date specific? + dateTo = indata.get("to", int(time.time())) + dateFrom = indata.get( + "from", dateTo - (86400 * 30 * 6) + ) # Default to a 6 month span + query["query"]["bool"]["must"].append( + {"range": {"ts": {"from": dateFrom, "to": dateTo}}} + ) + emails = [] + contribs = {} + + for field in ["sender", "author_email", "issueCreator", "issueCloser"]: + N = 0 + while N < 5: + query["aggs"] = { + "by_id": { + "terms": { + "field": field, + "size": 10000, + "include": {"partition": N, "num_partitions": 5}, + } + } + } + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="*,-*_code_commit,-*_file_history", + size=0, + body=query, + ) + # Break if we've found nothing more + # if len(res['aggregations']['by_id']['buckets']) == 0: + # break + # otherwise, add 'em to the pile + for k in res["aggregations"]["by_id"]["buckets"]: + if k["key"] not in emails: + emails.append(k["key"]) + contribs[k["key"]] = contribs.get(k["key"], 0) + k["doc_count"] + N += 1 + + people = [] + for email in emails: + pid = hashlib.sha1( + ("%s%s" % (dOrg, email)).encode("ascii", errors="replace") + ).hexdigest() + person = None + if pid in cached_people: + person = cached_people[pid] + else: + try: + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="person", id=pid + ) + cached_people[pid] = { + "name": doc["_source"]["name"], + "email": doc["_source"]["email"], + "gravatar": hashlib.md5( + email.encode("ascii", errors="replace") + ).hexdigest(), + } + person = cached_people[pid] + except: # pylint: disable=bare-except + pass # Couldn't find 'em, booo + if person: + person["contributions"] = contribs.get(email, 0) + people.append(person) + + JSON_OUT = {"people": people, "okay": True} + yield json.dumps(JSON_OUT) diff --git a/api/pages/org/list.py b/kibble/api/pages/org/list.py similarity index 55% rename from api/pages/org/list.py rename to kibble/api/pages/org/list.py index 28d6d6f5..0949f7de 100644 --- a/api/pages/org/list.py +++ b/kibble/api/pages/org/list.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ######################################################################## # OPENAPI-URI: /api/org/list ######################################################################## @@ -83,13 +84,10 @@ # security: # - cookieAuth: [] # summary: Create a new organisation -# +# ######################################################################## - - - """ This is the Org list renderer for Kibble """ @@ -97,82 +95,79 @@ import json import time + def run(API, environ, indata, session): now = time.time() # We need to be logged in for this! if not session.user: raise API.exception(403, "You must be logged in to use this API endpoint!") - - method = environ['REQUEST_METHOD'] + + method = environ["REQUEST_METHOD"] # Are we making a new org? if method == "PUT": - if session.user['userlevel'] == "admin": - orgname = indata.get('name', 'Foo') - orgdesc = indata.get('desc', '') - orgid = indata.get('id', str(int(time.time()))) - if session.DB.ES.exists(index=session.DB.dbname, doc_type='organisation', id = orgid): + if session.user["userlevel"] == "admin": + orgname = indata.get("name", "Foo") + orgdesc = indata.get("desc", "") + orgid = indata.get("id", str(int(time.time()))) + if session.DB.ES.exists( + index=session.DB.dbname, doc_type="organisation", id=orgid + ): raise API.exception(403, "Organisation ID already in use!") - - doc = { - 'id': orgid, - 'name': orgname, - 'description': orgdesc, - 'admins': [] - } - session.DB.ES.index(index=session.DB.dbname, doc_type='organisation', id = orgid, body = doc) + + doc = {"id": orgid, "name": orgname, "description": orgdesc, "admins": []} + session.DB.ES.index( + index=session.DB.dbname, doc_type="organisation", id=orgid, body=doc + ) time.sleep(1.5) yield json.dumps({"okay": True, "message": "Organisation created!"}) return else: - raise API.exception(403, "Only administrators can create new organisations.") - + raise API.exception( + 403, "Only administrators can create new organisations." + ) + #################################################################### orgs = [] - if session.user['userlevel'] == "admin": + if session.user["userlevel"] == "admin": res = session.DB.ES.search( index=session.DB.dbname, doc_type="organisation", - body = {'query': { 'match_all': {}}} + body={"query": {"match_all": {}}}, ) - for doc in res['hits']['hits']: - orgID = doc['_source']['id'] + for doc in res["hits"]["hits"]: + orgID = doc["_source"]["id"] numDocs = session.DB.ES.count( index=session.DB.dbname, - body = {'query': { 'term': {'organisation': orgID}}} - )['count'] + body={"query": {"term": {"organisation": orgID}}}, + )["count"] numSources = session.DB.ES.count( index=session.DB.dbname, doc_type="source", - body = {'query': { 'term': {'organisation': orgID}}} - )['count'] - doc['_source']['sourceCount'] = numSources - doc['_source']['docCount'] = numDocs - orgs.append(doc['_source']) + body={"query": {"term": {"organisation": orgID}}}, + )["count"] + doc["_source"]["sourceCount"] = numSources + doc["_source"]["docCount"] = numDocs + orgs.append(doc["_source"]) else: res = session.DB.ES.search( index=session.DB.dbname, doc_type="organisation", - body = {'query': { 'terms': {'id': session.user['organisations']}}} + body={"query": {"terms": {"id": session.user["organisations"]}}}, ) - for doc in res['hits']['hits']: - orgID = doc['_source']['id'] + for doc in res["hits"]["hits"]: + orgID = doc["_source"]["id"] numDocs = session.DB.ES.count( index=session.DB.dbname, - body = {'query': { 'term': {'organisation': orgID}}} - )['count'] + body={"query": {"term": {"organisation": orgID}}}, + )["count"] numSources = session.DB.ES.count( index=session.DB.dbname, doc_type="source", - body = {'query': { 'term': {'organisation': orgID}}} - )['count'] - doc['_source']['sourceCount'] = numSources - doc['_source']['docCount'] = numDocs - orgs.append(doc['_source']) - - - JSON_OUT = { - 'organisations': orgs, - 'okay': True, - 'responseTime': time.time() - now - } + body={"query": {"term": {"organisation": orgID}}}, + )["count"] + doc["_source"]["sourceCount"] = numSources + doc["_source"]["docCount"] = numDocs + orgs.append(doc["_source"]) + + JSON_OUT = {"organisations": orgs, "okay": True, "responseTime": time.time() - now} yield json.dumps(JSON_OUT) diff --git a/kibble/api/pages/org/members.py b/kibble/api/pages/org/members.py new file mode 100644 index 00000000..31a73f0f --- /dev/null +++ b/kibble/api/pages/org/members.py @@ -0,0 +1,318 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/org/members +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/OrgMembers' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Lists the members of an organisation +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# description: Nothing... +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# type: array # pylint: disable=syntax-error +# items: +# $ref: '#/components/schemas/OrgMembers' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Lists the members of an organisation +# put: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/UserAccountEdit' +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Invite a person to an organisation +# delete: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/UserAccountEdit' +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Remove a person from an organisation +# +######################################################################## + + +""" +This is the Org list renderer for Kibble +""" + + +import json +import time + + +def canInvite(session): + """ Determine if the user can edit sources in this org """ + if session.user["userlevel"] == "admin": + return True + + dOrg = session.user["defaultOrganisation"] or "apache" + if session.DB.ES.exists(index=session.DB.dbname, doc_type="organisation", id=dOrg): + xorg = session.DB.ES.get( + index=session.DB.dbname, doc_type="organisation", id=dOrg + )["_source"] + if session.user["email"] in xorg["admins"]: + return True + + +def run(API, environ, indata, session): + now = time.time() + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint!") + + method = environ["REQUEST_METHOD"] + + ################################################# + # Inviting a new member? # + ################################################# + if method == "PUT": + if canInvite(session): + newmember = indata.get("email") + isadmin = indata.get("admin", False) + orgid = session.user["defaultOrganisation"] or "apache" + # Make sure the org exists + if not session.DB.ES.exists( + index=session.DB.dbname, doc_type="organisation", id=orgid + ): + raise API.exception(403, "No such organisation!") + + # make sure the user account exists + if not session.DB.ES.exists( + index=session.DB.dbname, doc_type="useraccount", id=newmember + ): + raise API.exception(403, "No such user!") + + # Modify user account + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="useraccount", id=newmember + ) + if orgid not in doc["_source"]["organisations"]: # No duplicates, please + doc["_source"]["organisations"].append(orgid) + session.DB.ES.index( + index=session.DB.dbname, + doc_type="useraccount", + id=newmember, + body=doc["_source"], + ) + + # Get org doc from ES + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="organisation", id=orgid + ) + if isadmin: + if newmember not in doc["_source"]["admins"]: + doc["_source"]["admins"].append(newmember) + # Override old doc + session.DB.ES.index( + index=session.DB.dbname, + doc_type="organisation", + id=orgid, + body=doc["_source"], + ) + time.sleep(1) # Bleh!! + + # If an admin, and not us, and reinvited, we purge the admin bit + elif newmember in doc["_source"]["admins"]: + if newmember == session.user["email"]: + raise API.exception( + 403, "You can't remove yourself from an organisation." + ) + doc["_source"]["admins"].remove(newmember) + # Override old doc + session.DB.ES.index( + index=session.DB.dbname, + doc_type="organisation", + id=orgid, + body=doc["_source"], + ) + time.sleep(1) # Bleh!! + yield json.dumps({"okay": True, "message": "Member invited!!"}) + + return + else: + raise API.exception( + 403, + "Only administrators or organisation owners can invite new members.", + ) + + ################################################# + # DELETE: Remove a member # + ################################################# + if method == "DELETE": + if canInvite(session): + memberid = indata.get("email") + isadmin = indata.get("admin", False) + orgid = session.user["defaultOrganisation"] or "apache" + + # We can't remove ourselves! + if memberid == session.user["email"]: + raise API.exception( + 403, "You can't remove yourself from an organisation." + ) + + # Make sure the org exists + if not session.DB.ES.exists( + index=session.DB.dbname, doc_type="organisation", id=orgid + ): + raise API.exception(403, "No such organisation!") + + # make sure the user account exists + if not session.DB.ES.exists( + index=session.DB.dbname, doc_type="useraccount", id=memberid + ): + raise API.exception(403, "No such user!") + + # Modify user account + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="useraccount", id=memberid + ) + if orgid in doc["_source"]["organisations"]: # No duplicates, please + doc["_source"]["organisations"].remove(orgid) + session.DB.ES.index( + index=session.DB.dbname, + doc_type="useraccount", + id=memberid, + body=doc["_source"], + ) + + # Check is user is admin and remove if so + # Get org doc from ES + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="organisation", id=orgid + ) + if memberid in doc["_source"]["admins"]: + doc["_source"]["admins"].remove(memberid) + # Override old doc + session.DB.ES.index( + index=session.DB.dbname, + doc_type="organisation", + id=orgid, + body=doc["_source"], + ) + time.sleep(1) # Bleh!! + + yield json.dumps({"okay": True, "message": "Member removed!"}) + return + else: + raise API.exception( + 403, + "Only administrators or organisation owners can invite new members.", + ) + + ################################################# + # GET/POST: Display members # + ################################################# + if method in ["GET", "POST"]: + orgid = session.user["defaultOrganisation"] or "apache" + if not session.DB.ES.exists( + index=session.DB.dbname, doc_type="organisation", id=orgid + ): + raise API.exception(403, "No such organisation!") + + # Only admins should be able to view this! + if not canInvite(session): + raise API.exception(403, "Only organisation owners can view this list.") + + # Find everyone affiliated with this org + query = {"query": {"bool": {"must": [{"term": {"organisations": orgid}}]}}} + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="useraccount", + size=5000, # TO-DO: make this a scroll?? + body=query, + ) + members = [] + for doc in res["hits"]["hits"]: + members.append(doc["_id"]) + + # Get org doc from ES + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="organisation", id=orgid + ) + JSON_OUT = { + "members": members, + "admins": doc["_source"]["admins"], + "okay": True, + "responseTime": time.time() - now, + } + yield json.dumps(JSON_OUT) diff --git a/api/pages/org/sourcetypes.py b/kibble/api/pages/org/sourcetypes.py similarity index 60% rename from api/pages/org/sourcetypes.py rename to kibble/api/pages/org/sourcetypes.py index 9e5b8afe..e7f8eed3 100644 --- a/api/pages/org/sourcetypes.py +++ b/kibble/api/pages/org/sourcetypes.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ######################################################################## # OPENAPI-URI: /api/org/sourcetypes ######################################################################## @@ -56,24 +57,23 @@ # security: # - cookieAuth: [] # summary: Lists the available source types supported by Kibble -# +# ######################################################################## - - - """ This is the source types handler for Kibble """ +import json +import os import yaml -import json + +from kibble.settings import YAML_DIRECTORY + def run(API, environ, indata, session): - - types = yaml.load(open("yaml/sourcetypes.yaml")) - - yield json.dumps(types) + with open(os.path.join(YAML_DIRECTORY, "sourcetypes.yaml")) as f: + types = yaml.safe_load(f) - \ No newline at end of file + yield json.dumps(types) diff --git a/kibble/api/pages/org/trends.py b/kibble/api/pages/org/trends.py new file mode 100644 index 00000000..c34bdd3c --- /dev/null +++ b/kibble/api/pages/org/trends.py @@ -0,0 +1,169 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/org/trends +######################################################################## +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Trend' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a quick trend summary of the past 6 months for your org +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Sloc' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Shows a quick trend summary of the past 6 months for your org +# +######################################################################## + + +""" +This is the org trend renderer for Kibble +""" + +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + now = time.time() + + # First, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + if session.DB.ES.exists( + index=session.DB.dbname, doc_type="view", id=indata["view"] + ): + view = session.DB.ES.get( + index=session.DB.dbname, doc_type="view", id=indata["view"] + ) + viewList = view["_source"]["sourceList"] + + dateTo = int(time.time()) + dateFrom = dateTo - (86400 * 30 * 3) # Default to a quarter + if dateFrom < 0: + dateFrom = 0 + dateYonder = dateFrom - (dateTo - dateFrom) + + #################################################################### + # We start by doing all the queries for THIS period. # + # Then we reset the query, and change date to yonder-->from # + # and rerun the same queries. # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "kibbledemo" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateFrom, "to": dateTo}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + + # Get number of commits, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + no_commits = res["count"] + + # Get number of committers, this period + query["aggs"] = {"authors": {"cardinality": {"field": "author_email"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + no_authors = res["aggregations"]["authors"]["value"] + + #################################################################### + # Change to PRIOR SPAN # + #################################################################### + dOrg = session.user["defaultOrganisation"] or "apache" + query = { + "query": { + "bool": { + "must": [ + {"range": {"tsday": {"from": dateYonder, "to": dateFrom - 1}}}, + {"term": {"organisation": dOrg}}, + ] + } + } + } + + # Get number of commits, this period + res = session.DB.ES.count( + index=session.DB.dbname, doc_type="code_commit", body=query + ) + no_commits_before = res["count"] + + # Get number of committers, this period + query["aggs"] = {"authors": {"cardinality": {"field": "author_email"}}} + res = session.DB.ES.search( + index=session.DB.dbname, doc_type="code_commit", size=0, body=query + ) + no_authors_before = res["aggregations"]["authors"]["value"] + + trends = { + "authors": { + "before": no_authors_before, + "after": no_authors, + "title": "Contributors this quarter", + }, + "commits": { + "before": no_commits_before, + "after": no_commits, + "title": "Commits this quarter", + }, + } + + JSON_OUT = {"trends": trends, "okay": True, "responseTime": time.time() - now} + yield json.dumps(JSON_OUT) diff --git a/api/pages/session.py b/kibble/api/pages/session.py similarity index 54% rename from api/pages/session.py rename to kibble/api/pages/session.py index 425cd89a..05b0639d 100644 --- a/api/pages/session.py +++ b/kibble/api/pages/session.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ######################################################################## # OPENAPI-URI: /api/session ######################################################################## @@ -85,102 +86,114 @@ # $ref: '#/components/schemas/Error' # description: unexpected error # summary: Log in -# +# ######################################################################## - - - """ This is the user session handler for Kibble """ +import hashlib import json -import re import time -import bcrypt -import hashlib import uuid +import bcrypt + + def run(API, environ, indata, session): - - method = environ['REQUEST_METHOD'] - + + method = environ["REQUEST_METHOD"] + # Logging in? if method == "PUT": - u = indata['email'] - p = indata['password'] - if session.DB.ES.exists(index=session.DB.dbname, doc_type='useraccount', id = u): - doc = session.DB.ES.get(index=session.DB.dbname, doc_type='useraccount', id = u) - hp = doc['_source']['password'] - if bcrypt.hashpw(p.encode('utf-8'), hp.encode('utf-8')).decode('ascii') == hp: + u = indata["email"] + p = indata["password"] + if session.DB.ES.exists(index=session.DB.dbname, doc_type="useraccount", id=u): + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="useraccount", id=u + ) + hp = doc["_source"]["password"] + if ( + bcrypt.hashpw(p.encode("utf-8"), hp.encode("utf-8")).decode("ascii") + == hp + ): # If verification is enabled, make sure account is verified - if session.config['accounts'].get('verify'): - if doc['_source']['verified'] == False: - raise API.exception(403, "Your account needs to be verified first. Check your inbox!") + if session.config["accounts"].get("verify"): + if doc["_source"]["verified"] == False: + raise API.exception( + 403, + "Your account needs to be verified first. Check your inbox!", + ) sessionDoc = { - 'cid': u, - 'id': session.cookie, - 'timestamp': int(time.time()) + "cid": u, + "id": session.cookie, + "timestamp": int(time.time()), } - session.DB.ES.index(index=session.DB.dbname, doc_type='uisession', id = session.cookie, body = sessionDoc) + session.DB.ES.index( + index=session.DB.dbname, + doc_type="uisession", + id=session.cookie, + body=sessionDoc, + ) yield json.dumps({"message": "Logged in OK!"}) return - + # Fall back to a 403 if username and password did not match raise API.exception(403, "Wrong username or password supplied!") - - + # We need to be logged in for the rest of this! if not session.user: raise API.exception(403, "You must be logged in to use this API endpoint! %s") - + # Delete a session (log out) if method == "DELETE": - session.DB.ES.delete(index=session.DB.dbname, doc_type='uisession', id = session.cookie) + session.DB.ES.delete( + index=session.DB.dbname, doc_type="uisession", id=session.cookie + ) session.newCookie() yield json.dumps({"message": "Logged out, bye bye!"}) - + # Display the user data for this session if method == "GET": - + # Do we have an API key? If not, make one - if not session.user.get('token') or indata.get('newtoken'): + if not session.user.get("token") or indata.get("newtoken"): token = str(uuid.uuid4()) - session.user['token'] = token - session.DB.ES.index(index=session.DB.dbname, doc_type='useraccount', id = session.user['email'], body = session.user) - - # Run a quick search of all orgs we have. - res = session.DB.ES.search( + session.user["token"] = token + session.DB.ES.index( index=session.DB.dbname, - doc_type="organisation", - size = 100, - body = { - 'query': { - 'match_all': {} - } - } + doc_type="useraccount", + id=session.user["email"], + body=session.user, ) - + + # Run a quick search of all orgs we have. + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="organisation", + size=100, + body={"query": {"match_all": {}}}, + ) + orgs = [] - for hit in res['hits']['hits']: - doc = hit['_source'] + for hit in res["hits"]["hits"]: + doc = hit["_source"] orgs.append(doc) - + JSON_OUT = { - 'email': session.user['email'], - 'displayName': session.user['displayName'], - 'defaultOrganisation': session.user['defaultOrganisation'], - 'organisations': session.user['organisations'], - 'ownerships': session.user['ownerships'], - 'gravatar': hashlib.md5(session.user['email'].encode('utf-8')).hexdigest(), - 'userlevel': session.user['userlevel'], - 'token': session.user['token'] + "email": session.user["email"], + "displayName": session.user["displayName"], + "defaultOrganisation": session.user["defaultOrganisation"], + "organisations": session.user["organisations"], + "ownerships": session.user["ownerships"], + "gravatar": hashlib.md5(session.user["email"].encode("utf-8")).hexdigest(), + "userlevel": session.user["userlevel"], + "token": session.user["token"], } yield json.dumps(JSON_OUT) return - + # Finally, if we hit a method we don't know, balk! yield API.exception(400, "I don't know this request method!!") - \ No newline at end of file diff --git a/kibble/api/pages/sources.py b/kibble/api/pages/sources.py new file mode 100644 index 00000000..4802ae3d --- /dev/null +++ b/kibble/api/pages/sources.py @@ -0,0 +1,306 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/sources +######################################################################## +# delete: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/SourceID' +# description: Source ID info +# required: true +# security: +# - cookieAuth: [] +# summary: Delete an existing source +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/SourceList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Fetches a list of all sources for this organisation +# patch: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Source' +# description: New source data to set +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Edit an existing source +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/SourceList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Fetches a list of all sources for this organisation +# put: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/SourceListAdd' +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Add a new source +# +######################################################################## + + +""" +This is the source list handler for Kibble +""" + +import hashlib +import json +import os + +import yaml + +from kibble.settings import YAML_DIRECTORY + + +def canModifySource(session): + """ Determine if the user can edit sources in this org """ + + dOrg = session.user["defaultOrganisation"] or "apache" + if session.DB.ES.exists(index=session.DB.dbname, doc_type="organisation", id=dOrg): + xorg = session.DB.ES.get( + index=session.DB.dbname, doc_type="organisation", id=dOrg + )["_source"] + if session.user["email"] in xorg["admins"]: + return True + if session.user["userlevel"] == "admin": + return True + return False + + +def run(API, environ, indata, session): + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + method = environ["REQUEST_METHOD"] + dOrg = session.user["defaultOrganisation"] + + if method in ["GET", "POST"]: + # Fetch organisation data + + # Make sure we have a default/current org set + if ( + "defaultOrganisation" not in session.user + or not session.user["defaultOrganisation"] + ): + raise API.exception( + 400, + "You must specify an organisation as default/current in order to add sources.", + ) + + if session.DB.ES.exists( + index=session.DB.dbname, doc_type="organisation", id=dOrg + ): + org = session.DB.ES.get( + index=session.DB.dbname, doc_type="organisation", id=dOrg + )["_source"] + del org["admins"] + else: + raise API.exception(404, "No such organisation, '%s'" % (dOrg or "(None)")) + + sourceTypes = indata.get("types", []) + # Fetch all sources for default org + + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="source", + size=5000, + body={"query": {"term": {"organisation": dOrg}}}, + ) + + # Secondly, fetch the view if we have such a thing enabled + viewList = [] + if indata.get("view"): + viewList = session.getView(indata.get("view")) + if indata.get("subfilter") and indata.get("quick"): + viewList = session.subFilter(indata.get("subfilter"), view=viewList) + + sources = [] + for hit in res["hits"]["hits"]: + doc = hit["_source"] + if viewList and not doc["sourceID"] in viewList: + continue + if sourceTypes and not doc["type"] in sourceTypes: + continue + if indata.get("quick"): + xdoc = { + "sourceID": doc["sourceID"], + "type": doc["type"], + "sourceURL": doc["sourceURL"], + } + sources.append(xdoc) + else: + # Creds should be anonymous here + if "creds" in doc: + del doc["creds"] + sources.append(doc) + + JSON_OUT = {"sources": sources, "okay": True, "organisation": org} + yield json.dumps(JSON_OUT) + return + + # Add one or more sources + if method == "PUT": + if canModifySource(session): + new = 0 + old = 0 + with open(os.path.join(YAML_DIRECTORY, "sourcetypes.yaml")) as f: + stypes = yaml.safe_load(f) + for source in indata.get("sources", []): + sourceURL = source["sourceURL"] + sourceType = source["type"] + creds = {} + if sourceType not in stypes: + raise API.exception(400, "Attempt to add unknown source type!") + if "optauth" in stypes[sourceType]: + for el in stypes[sourceType]["optauth"]: + if el in source and len(source[el]) > 0: + creds[el] = source[el] + sourceID = hashlib.sha224( + ("%s-%s" % (sourceType, sourceURL)).encode("utf-8") + ).hexdigest() + + # Make sure we have a default/current org set + if ( + "defaultOrganisation" not in session.user + or not session.user["defaultOrganisation"] + ): + raise API.exception( + 400, + "You must first specify an organisation as default/current in order to add sources.", + ) + + doc = { + "organisation": dOrg, + "sourceURL": sourceURL, + "sourceID": sourceID, + "type": sourceType, + "creds": creds, + "steps": {}, + } + if session.DB.ES.exists( + index=session.DB.dbname, doc_type="source", id=sourceID + ): + old += 1 + else: + new += 1 + session.DB.ES.index( + index=session.DB.dbname, doc_type="source", id=sourceID, body=doc + ) + yield json.dumps( + {"message": "Sources added/updated", "added": new, "updated": old} + ) + else: + raise API.exception( + 403, "You don't have permission to add sources to this organisation." + ) + + # Delete a source + if method == "DELETE": + if canModifySource(session): + sourceID = indata.get("id") + if session.DB.ES.exists( + index=session.DB.dbname, doc_type="source", id=sourceID + ): + # Delete all data pertainig to this source + # For ES >= 6.x, use a glob for removing from all indices + if session.DB.ESversion > 5: + session.DB.ES.delete_by_query( + index=session.DB.dbname + "_*", + body={"query": {"match": {"sourceID": sourceID}}}, + ) + else: + # For ES <= 5.x, just remove from the main index + session.DB.ES.delete_by_query( + index=session.DB.dbname, + body={"query": {"match": {"sourceID": sourceID}}}, + ) + yield json.dumps({"message": "Source deleted"}) + else: + raise API.exception(404, "No such source item") + else: + raise API.exception(403, "You don't have permission to delete this source.") + + # Edit a source + if method == "PATCH": + pass diff --git a/kibble/api/pages/verify.py b/kibble/api/pages/verify.py new file mode 100644 index 00000000..15d5c133 --- /dev/null +++ b/kibble/api/pages/verify.py @@ -0,0 +1,90 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/verify/{email}/{vcode} +######################################################################## +# get: +# summary: Verify an account +# parameters: +# - name: email +# in: path +# description: Email address of account +# required: true +# schema: +# type: string # pylint: disable=syntax-error +# - name: vcode +# in: path +# description: Verification code +# required: true +# schema: +# type: string # pylint: disable=syntax-error +# responses: +# '200': +# description: 200 Response +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# default: +# description: unexpected error +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# +######################################################################## + + +""" +This is the user account verifier for Kibble. +""" + + +def run(API, environ, indata, session): + + # Get vocde, make sure it's 40 chars + vcode = indata.get("vcode") + if len(vcode) != 40: + raise API.exception(400, "Invalid verification code!") + + # Find the account with this vcode + email = indata.get("email") + if len(email) < 7: + raise API.exception(400, "Invalid email address presented.") + + if session.DB.ES.exists(index=session.DB.dbname, doc_type="useraccount", id=email): + doc = session.DB.ES.get( + index=session.DB.dbname, doc_type="useraccount", id=email + ) + # Do the codes match?? + if doc["_source"]["vcode"] == vcode: + doc["_source"]["verified"] = True + # Save account as verified + session.DB.ES.index( + index=session.DB.dbname, + doc_type="useraccount", + id=email, + body=doc["_source"], + ) + yield ("Your account has been verified, you can now log in!") + else: + raise API.exception(404, "Invalid verification code presented!") + else: + raise API.exception( + 404, "Invalid verification code presented!" + ) # Don't give away if such a user exists, pssst diff --git a/kibble/api/pages/views.py b/kibble/api/pages/views.py new file mode 100644 index 00000000..cfaf98ae --- /dev/null +++ b/kibble/api/pages/views.py @@ -0,0 +1,319 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +######################################################################## +# OPENAPI-URI: /api/views +######################################################################## +# delete: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/editView' +# description: View to delete +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Delete a new view +# get: +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ViewList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Fetches a list of all views (filters) for this user +# patch: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/editView' +# description: New source data to set +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Edit an existing source +# post: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/defaultWidgetArgs' +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ViewList' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Fetches a list of all views (filters) for this user +# put: +# requestBody: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/editView' +# description: New view data to add +# required: true +# responses: +# '200': +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/ActionCompleted' +# description: 200 Response +# default: +# content: +# application/json: +# schema: +# $ref: '#/components/schemas/Error' +# description: unexpected error +# security: +# - cookieAuth: [] +# summary: Add a new view +# +######################################################################## + + +""" +This is the views (filters) list handler for Kibble +""" + +import hashlib +import json +import time + + +def run(API, environ, indata, session): + + # We need to be logged in for this! + if not session.user: + raise API.exception(403, "You must be logged in to use this API endpoint! %s") + + method = environ["REQUEST_METHOD"] + dOrg = session.user["defaultOrganisation"] or "apache" + + # Are we adding a view? + if method == "PUT": + viewID = hashlib.sha224( + ("%s-%s-%s" % (time.time(), session.user["email"], dOrg)).encode("utf-8") + ).hexdigest() + sources = indata.get("sources", []) + name = indata.get("name", "unknown view") + public = indata.get("public", False) + if public: + if not ( + session.user["userlevel"] == "admin" + or dOrg in session.user["ownerships"] + ): + raise API.exception( + 403, "Only owners of an organisation may create public views." + ) + doc = { + "id": viewID, + "email": session.user["email"], + "organisation": dOrg, + "sourceList": sources, + "name": name, + "created": int(time.time()), + "publicView": public, + } + session.DB.ES.index( + index=session.DB.dbname, doc_type="view", id=viewID, body=doc + ) + yield json.dumps({"okay": True, "message": "View created"}) + + # Are we editing (patching) a view? + if method == "PATCH": + viewID = indata.get("id") + if viewID and session.DB.ES.exists( + index=session.DB.dbname, doc_type="view", id=viewID + ): + doc = session.DB.ES.get(index=session.DB.dbname, doc_type="view", id=viewID) + if ( + session.user["userlevel"] == "admin" + or doc["_source"]["email"] == session.user["email"] + ): + sources = indata.get("sources", []) + doc["_source"]["sourceList"] = sources + session.DB.ES.index( + index=session.DB.dbname, + doc_type="view", + id=viewID, + body=doc["_source"], + ) + yield json.dumps({"okay": True, "message": "View updated"}) + else: + raise API.exception(403, "You don't own this view, and cannot edit it.") + else: + raise API.exception(404, "We couldn't find a view with this ID.") + + # Removing a view? + if method == "DELETE": + viewID = indata.get("id") + if viewID and session.DB.ES.exists( + index=session.DB.dbname, doc_type="view", id=viewID + ): + doc = session.DB.ES.get(index=session.DB.dbname, doc_type="view", id=viewID) + if ( + session.user["userlevel"] == "admin" + or doc["_source"]["email"] == session.user["email"] + ): + session.DB.ES.delete( + index=session.DB.dbname, doc_type="view", id=viewID + ) + yield json.dumps({"okay": True, "message": "View deleted"}) + else: + raise API.exception( + 403, "You don't own this view, and cannot delete it." + ) + else: + raise API.exception(404, "We couldn't find a view with this ID.") + + if method in ["GET", "POST"]: + # Fetch all views for default org + + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="view", + size=5000, + body={"query": {"term": {"email": session.user["email"]}}}, + ) + + # Are we looking at someone elses view? + if indata.get("view"): + viewID = indata.get("view") + if session.DB.ES.exists( + index=session.DB.dbname, doc_type="view", id=viewID + ): + blob = session.DB.ES.get( + index=session.DB.dbname, doc_type="view", id=viewID + ) + if ( + blob["_source"]["email"] != session.user["email"] + and not blob["_source"]["publicView"] + ): + blob["_source"]["name"] += ( + " (shared by " + blob["_source"]["email"] + ")" + ) + res["hits"]["hits"].append(blob) + sources = [] + + # Include public views?? + if not indata.get("sources", False): + pres = session.DB.ES.search( + index=session.DB.dbname, + doc_type="view", + size=5000, + body={ + "query": { + "bool": { + "must": [ + {"term": {"publicView": True}}, + {"term": {"organisation": dOrg}}, + ] + } + } + }, + ) + for hit in pres["hits"]["hits"]: + if hit["_source"]["email"] != session.user["email"]: + hit["_source"]["name"] += " (shared view)" + res["hits"]["hits"].append(hit) + + for hit in res["hits"]["hits"]: + doc = hit["_source"] + if doc["organisation"] != dOrg: + continue + if indata.get("quick"): + xdoc = { + "id": doc["id"], + "name": doc["name"], + "organisation": doc["organisation"], + } + sources.append(xdoc) + else: + sources.append(doc) + + allsources = [] + if indata.get("sources", False): + res = session.DB.ES.search( + index=session.DB.dbname, + doc_type="source", + size=5000, + body={"query": {"term": {"organisation": dOrg}}}, + ) + for zdoc in res["hits"]["hits"]: + doc = zdoc["_source"] + xdoc = { + "sourceID": doc["sourceID"], + "type": doc["type"], + "sourceURL": doc["sourceURL"], + } + allsources.append(xdoc) + + JSON_OUT = { + "views": sources, + "sources": allsources, + "okay": True, + "organisation": dOrg, + } + yield json.dumps(JSON_OUT) diff --git a/api/pages/widgets.py b/kibble/api/pages/widgets.py similarity index 52% rename from api/pages/widgets.py rename to kibble/api/pages/widgets.py index cebda02a..a7cd843a 100644 --- a/api/pages/widgets.py +++ b/kibble/api/pages/widgets.py @@ -1,19 +1,20 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ######################################################################## # OPENAPI-URI: /api/widgets/{pageid} ######################################################################## @@ -45,23 +46,26 @@ """ This is the widget design handler for Kibble """ +import json +import os import yaml -import json + +from kibble.settings import YAML_DIRECTORY + def run(API, environ, indata, session): - + if not session.user: raise API.exception(403, "You must be logged in to use this API endpoint! %s") - - widgets = yaml.load(open("yaml/widgets.yaml")) - - page = indata['pageid'] - if not page or page == '0': - page = widgets.get('defaultWidget', 'repos') - if page in widgets['widgets']: - yield json.dumps(widgets['widgets'][page]) + + with open(os.path.join(YAML_DIRECTORY, "widgets.yaml")) as f: + widgets = yaml.safe_load(f) + + page = indata["pageid"] + if not page or page == "0": + page = widgets.get("defaultWidget", "repos") + if page in widgets["widgets"]: + yield json.dumps(widgets["widgets"][page]) else: raise API.exception(404, "Widget design not found!") - - \ No newline at end of file diff --git a/kibble/api/plugins/__init__.py b/kibble/api/plugins/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/api/plugins/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/api/plugins/database.py b/kibble/api/plugins/database.py new file mode 100644 index 00000000..a4bc6c59 --- /dev/null +++ b/kibble/api/plugins/database.py @@ -0,0 +1,142 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the ES library for Apache Kibble. +It stores the elasticsearch handler and config options. +""" + +import elasticsearch + +from kibble.configuration import KibbleConfigParser + + +class KibbleESWrapper: + """ + Class for rewriting old-style queries to the new ones, + where doc_type is an integral part of the DB name + """ + + def __init__(self, ES): + self.ES = ES + + def get(self, index, doc_type, id): + return self.ES.get(index=index + "_" + doc_type, doc_type="_doc", id=id) + + def exists(self, index, doc_type, id): + return self.ES.exists(index=index + "_" + doc_type, doc_type="_doc", id=id) + + def delete(self, index, doc_type, id): + return self.ES.delete(index=index + "_" + doc_type, doc_type="_doc", id=id) + + def index(self, index, doc_type, id, body): + return self.ES.index( + index=index + "_" + doc_type, doc_type="_doc", id=id, body=body + ) + + def update(self, index, doc_type, id, body): + return self.ES.update( + index=index + "_" + doc_type, doc_type="_doc", id=id, body=body + ) + + def scroll(self, scroll_id, scroll): + return self.ES.scroll(scroll_id=scroll_id, scroll=scroll) + + def delete_by_query(self, **kwargs): + return self.ES.delete_by_query(**kwargs) + + def search( + self, index, doc_type, size=100, scroll=None, _source_include=None, body=None + ): + return self.ES.search( + index=index + "_" + doc_type, + doc_type="_doc", + size=size, + scroll=scroll, + _source_include=_source_include, + body=body, + ) + + def count(self, index, doc_type="*", body=None): + return self.ES.count(index=index + "_" + doc_type, doc_type="_doc", body=body) + + +class KibbleESWrapperSeven: + """ + Class for rewriting old-style queries to the >= 7.x ones, + where doc_type is an integral part of the DB name and NO DOC_TYPE! + """ + + def __init__(self, ES): + self.ES = ES + + def get(self, index, doc_type, id): + return self.ES.get(index=index + "_" + doc_type, id=id) + + def exists(self, index, doc_type, id): + return self.ES.exists(index=index + "_" + doc_type, id=id) + + def delete(self, index, doc_type, id): + return self.ES.delete(index=index + "_" + doc_type, id=id) + + def index(self, index, doc_type, id, body): + return self.ES.index(index=index + "_" + doc_type, id=id, body=body) + + def update(self, index, doc_type, id, body): + return self.ES.update(index=index + "_" + doc_type, id=id, body=body) + + def scroll(self, scroll_id, scroll): + return self.ES.scroll(scroll_id=scroll_id, scroll=scroll) + + def delete_by_query(self, **kwargs): + return self.ES.delete_by_query(**kwargs) + + def search( + self, index, doc_type, size=100, scroll=None, _source_include=None, body=None + ): + return self.ES.search( + index=index + "_" + doc_type, + size=size, + scroll=scroll, + _source_includes=_source_include, + body=body, + ) + + def count(self, index, doc_type="*", body=None): + return self.ES.count(index=index + "_" + doc_type, body=body) + + +class KibbleDatabase: + def __init__(self, config: KibbleConfigParser): + self.config = config + self.dbname = config.get("elasticsearch", "dbname") + self.ES = elasticsearch.Elasticsearch( + [config.get("elasticsearch", "conn_uri")], + use_ssl=config.getboolean("elasticsearch", "ssl"), + verify_certs=False, + max_retries=5, + retry_on_timeout=True, + ) + + # IMPORTANT BIT: Figure out if this is ES < 6.x, 6.x or >= 7.x. + # If so, we're using the new ES DB mappings, and need to adjust ALL + # ES calls to match this. + self.ESversion = int(self.ES.info()["version"]["number"].split(".")[0]) + if self.ESversion >= 7: + self.ES = KibbleESWrapperSeven(self.ES) + elif self.ESversion >= 6: + self.ES = KibbleESWrapper(self.ES) diff --git a/kibble/api/plugins/openapi.py b/kibble/api/plugins/openapi.py new file mode 100644 index 00000000..08f58a4a --- /dev/null +++ b/kibble/api/plugins/openapi.py @@ -0,0 +1,325 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the OpenAPI validator library. +Validates input using the OpenAPI specification version 3 from +https://github.com/OAI/OpenAPI-Specification (a simplified version, ahem) +""" + +import functools +import json +import operator +import re + +import yaml + + +class OpenAPIException(Exception): + def __init__(self, message): + super().__init__() + self.message = message + + +# Python type names to JSON type names +py2JSON = { + "int": "integer", + "float": "float", + "str": "string", + "list": "array", + "dict": "object", + "bool": "boolean", +} + +mcolors = { + "PUT": "#fca130", + "DELETE": "#f93e3e", + "GET": "#61affe", + "POST": "#49cc5c", + "PATCH": "#d5a37e", +} + + +class OpenAPI: + def __init__(self, APIFile): + """ Instantiates an OpenAPI validator given a YAML specification""" + if APIFile.endswith(".json") or APIFile.endswith(".js"): + self.API = json.load(open(APIFile)) + else: + self.API = yaml.safe_load(open(APIFile)) + + @staticmethod + def validateType(field, value, ftype): + """ Validate a single field value against an expected type """ + + # Get type of value, convert to JSON name of type. + pyType = type(value).__name__ + jsonType = py2JSON[pyType] if pyType in py2JSON else pyType + + # Check if type matches + if ftype != jsonType: + raise OpenAPIException( + "OpenAPI mismatch: Field '%s' was expected to be %s, but was really %s!" + % (field, ftype, jsonType) + ) + + def validateSchema(self, pdef, formdata, schema=None): + """ Validate (sub)parameters against OpenAPI specs """ + + # allOf: list of schemas to validate against + if "allOf" in pdef: + for subdef in pdef["allOf"]: + self.validateSchema(subdef, formdata) + + where = "JSON body" + # Symbolic link?? + if "schema" in pdef: + schema = pdef["schema"]["$ref"] + if "$ref" in pdef: + schema = pdef["$ref"] + if schema: + # #/foo/bar/baz --> dict['foo']['bar']['baz'] + pdef = functools.reduce(operator.getitem, schema.split("/")[1:], self.API) + where = "item matching schema %s" % schema + + # Check that all required fields are present + if "required" in pdef: + for field in pdef["required"]: + if field not in formdata: + raise OpenAPIException( + "OpenAPI mismatch: Missing input field '%s' in %s!" + % (field, where) + ) + + # Now check for valid format of input data + for field in formdata: + if "properties" not in pdef or field not in pdef["properties"]: + raise OpenAPIException( + "Unknown input field '%s' in %s!" % (field, where) + ) + if "type" not in pdef["properties"][field]: + raise OpenAPIException( + "OpenAPI mismatch: Field '%s' was found in api.yaml, but no format was specified in specs!" + % field + ) + ftype = pdef["properties"][field]["type"] + self.validateType(field, formdata[field], ftype) + + # Validate sub-arrays + if ftype == "array" and "items" in pdef["properties"][field]: + for item in formdata[field]: + if "$ref" in pdef["properties"][field]["items"]: + self.validateSchema(pdef["properties"][field]["items"], item) + else: + self.validateType( + field, + formdata[field], + pdef["properties"][field]["items"]["type"], + ) + + # Validate sub-hashes + if ftype == "hash" and "schema" in pdef["properties"][field]: + self.validateSchema(pdef["properties"][field], formdata[field]) + + def validateParameters(self, defs, formdata): + # + pass + + def validate(self, method="GET", path="/foo", formdata=None): + """ Validate the request method and input data against the OpenAPI specification """ + + # Make sure we're not dealing with a dynamic URL. + # If we find /foo/{key}, we fold that into the form data + # and process as if it's a json input field for now. + if not self.API["paths"].get(path): + for xpath in self.API["paths"]: + pathRE = re.sub(r"\{(.+?)\}", r"(?P<\1>[^/]+)", xpath) + m = re.match(pathRE, path) + if m: + for k, v in m.groupdict().items(): + formdata[k] = v + path = xpath + break + + if self.API["paths"].get(path): + defs = self.API["paths"].get(path) + method = method.lower() + if method in defs: + mdefs = defs[method] + if formdata and "parameters" in mdefs: + self.validateParameters(mdefs["parameters"], formdata) + elif formdata and "requestBody" not in mdefs: + raise OpenAPIException( + "OpenAPI mismatch: JSON data is now allowed for this request type" + ) + elif ( + formdata + and "requestBody" in mdefs + and "content" in mdefs["requestBody"] + ): + + # SHORTCUT: We only care about JSON input for Kibble! Disregard other types + if "application/json" not in mdefs["requestBody"]["content"]: + raise OpenAPIException( + "OpenAPI mismatch: API endpoint accepts input, but no application/json definitions found in api.yaml!" + ) + jdefs = mdefs["requestBody"]["content"]["application/json"] + + # Check that required params are here + self.validateSchema(jdefs, formdata) + + else: + raise OpenAPIException( + "OpenAPI mismatch: Method %s is not registered for this API" + % method + ) + else: + raise OpenAPIException("OpenAPI mismatch: Unknown API path '%s'!" % path) + + def dumpExamples(self, pdef, array=False): + schema = None + if "schema" in pdef: + if "type" in pdef["schema"] and pdef["schema"]["type"] == "array": + array = True + schema = pdef["schema"]["items"]["$ref"] + else: + schema = pdef["schema"]["$ref"] + if "$ref" in pdef: + schema = pdef["$ref"] + if schema: + # #/foo/bar/baz --> dict['foo']['bar']['baz'] + pdef = functools.reduce(operator.getitem, schema.split("/")[1:], self.API) + js = {} + desc = {} + if "properties" in pdef: + for k, v in pdef["properties"].items(): + if "description" in v: + desc[k] = [v["type"], v["description"]] + if "example" in v: + js[k] = v["example"] + elif "items" in v: + if v["type"] == "array": + js[k], _ = self.dumpExamples(v["items"], True) + else: + js[k], _ = self.dumpExamples(v["items"]) + return [js if not array else [js], desc] + + def toHTML(self): + """ Blurps out the specs in a pretty HTML blob """ + print( + """ + + + + + +""" + ) + li = "

Overview:

" + print(li) + for path, spec in sorted(self.API["paths"].items()): + for method, mspec in sorted(spec.items()): + method = method.upper() + summary = mspec.get("summary", "No summary available") + resp = "" + inp = "" + inpvars = "" + linkname = "%s%s" % (method.lower(), path.replace("/", "-")) + if "responses" in mspec: + for code, cresp in sorted(mspec["responses"].items()): + for ctype, pdef in cresp["content"].items(): + xjs, desc = self.dumpExamples(pdef) + js = json.dumps(xjs, indent=4) + resp += ( + "
%s:\n%s
\n
\n" + % (code, js) + ) + + if "requestBody" in mspec: + for ctype, pdef in mspec["requestBody"]["content"].items(): + xjs, desc = self.dumpExamples(pdef) + if desc: + for k, v in desc.items(): + inpvars += ( + "%s: (%s) %s
\n" + % (k, v[0], v[1]) + ) + js = json.dumps(xjs, indent=4) + inp += ( + "

Input examples:

%s:\n%s
\n
" + % (ctype, js) + ) + + if inpvars: + inpvars = ( + "
%s
\n
" + % inpvars + ) + + print( + """ +
+
+ + +
%s
+ + + %s +
+ %s
+
+

JSON parameters:

+ %s +
+ %s +
+
+

Response examples:

+
%s
+
+
+
+ """ + % ( + linkname, + mcolors[method], + mcolors[method], + mcolors[method], + method, + path, + summary, + "block" if inp else "none", + inpvars, + inp, + resp, + ) + ) + # print("%s %s: %s" % (method.upper(), path, mspec['summary'])) + print("") diff --git a/kibble/api/plugins/session.py b/kibble/api/plugins/session.py new file mode 100644 index 00000000..338a9b99 --- /dev/null +++ b/kibble/api/plugins/session.py @@ -0,0 +1,198 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the session library for Apache Kibble. +It handles setting/getting cookies and user prefs +""" + +import http.cookies +import re +import time +import uuid + + +class KibbleSession: + def getView(self, viewID): + if self.DB.ES.exists(index=self.DB.dbname, doc_type="view", id=viewID): + view = self.DB.ES.get(index=self.DB.dbname, doc_type="view", id=viewID) + return view["_source"]["sourceList"] + return [] + + def subFilter(self, subfilter, view=None): + view = view or [] + if len(subfilter) == 0: + return view + d_org = self.user["defaultOrganisation"] or "apache" + res = self.DB.ES.search( + index=self.DB.dbname, + doc_type="source", + size=10000, + _source_include=["sourceURL", "sourceID"], + body={"query": {"bool": {"must": [{"term": {"organisation": d_org}}]}}}, + ) + sources = [] + for doc in res["hits"]["hits"]: + sid = doc["_source"]["sourceID"] + m = re.search(subfilter, doc["_source"]["sourceURL"], re.IGNORECASE) + if m and ((not view) or (sid in view)): + sources.append(sid) + if not sources: + sources = ["x"] # blank return to not show everything + return sources + + def subType(self, stype, view=None): + view = view or [] + if len(stype) == 0: + return view + if isinstance(stype, str): + stype = [stype] + dOrg = self.user["defaultOrganisation"] or "apache" + res = self.DB.ES.search( + index=self.DB.dbname, + doc_type="source", + size=10000, + _source_include=["sourceURL", "sourceID", "type"], + body={ + "query": { + "bool": { + "must": [ + {"term": {"organisation": dOrg}}, + {"terms": {"type": stype}}, + ] + } + } + }, + ) + sources = [] + for doc in res["hits"]["hits"]: + sid = doc["_source"]["sourceID"] + m = doc["_source"]["type"] in stype + if m and ((not view) or (sid in view)): + sources.append(sid) + if not sources: + sources = ["x"] # blank return to not show eeeeverything + return sources + + def logout(self): + """Log out user and wipe cookie""" + if self.user and self.cookie: + cookies = http.cookies.SimpleCookie() + cookies["kibble_session"] = "null" + self.headers.append( + ("Set-Cookie", cookies["kibble_session"].OutputString()) + ) + try: + self.DB.ES.delete( + index=self.DB.dbname, doc_type="uisession", id=self.cookie + ) + self.cookie = None + self.user = None + except: # pylint: disable=bare-except + pass + + def newCookie(self): + cookie = uuid.uuid4() + cookies = http.cookies.SimpleCookie() + cookies["kibble_session"] = cookie + cookies["kibble_session"]["expires"] = 86400 * 365 # Expire one year from now + self.headers.append(("Set-Cookie", cookies["kibble_session"].OutputString())) + + def __init__(self, DB, environ, config): + """ + Loads the current user session or initiates a new session if + none was found. + """ + self.config = config + self.user = None + self.DB = DB + self.headers = [("Content-Type", "application/json; charset=utf-8")] + self.cookie = None + + # Construct the URL we're visiting + self.url = "%s://%s" % ( + environ["wsgi.url_scheme"], + environ.get("HTTP_HOST", environ.get("SERVER_NAME")), + ) + self.url += environ.get("SCRIPT_NAME", "/") + + # Get Kibble cookie + cookie = None + cookies = None + if "HTTP_KIBBLE_TOKEN" in environ: + token = environ.get("HTTP_KIBBLE_TOKEN") + if re.match( + r"^[-a-f0-9]+$", token + ): # Validate token, must follow UUID4 specs + res = self.DB.ES.search( + index=self.DB.dbname, + doc_type="useraccount", + body={"query": {"match": {"token": token}}}, + ) + if res["hits"]["hits"]: + self.user = res["hits"]["hits"][0]["_source"] + self.newCookie() + else: + if "HTTP_COOKIE" in environ: + cookies = http.cookies.SimpleCookie(environ["HTTP_COOKIE"]) + if cookies and "kibble_session" in cookies: + cookie = cookies["kibble_session"].value + try: + if re.match( + r"^[-a-f0-9]+$", cookie + ): # Validate cookie, must follow UUID4 specs + doc = None + sdoc = self.DB.ES.get( + index=self.DB.dbname, doc_type="uisession", id=cookie + ) + if sdoc and "cid" in sdoc["_source"]: + doc = self.DB.ES.get( + index=self.DB.dbname, + doc_type="useraccount", + id=sdoc["_source"]["cid"], + ) + if doc and "_source" in doc: + # Make sure this cookie has been used in the past 7 days, else nullify it. + # Further more, run an update of the session if >1 hour ago since last update. + age = time.time() - sdoc["_source"]["timestamp"] + if age > (7 * 86400): + self.DB.ES.delete( + index=self.DB.dbname, + doc_type="uisession", + id=cookie, + ) + sdoc["_source"] = None # Wipe it! + doc = None + elif age > 3600: + sdoc["_source"]["timestamp"] = int( + time.time() + ) # Update timestamp in session DB + self.DB.ES.update( + index=self.DB.dbname, + doc_type="uisession", + id=cookie, + body={"doc": sdoc["_source"]}, + ) + if doc: + self.user = doc["_source"] + else: + cookie = None + except Exception as err: + print(err) + if not cookie: + self.newCookie() + self.cookie = cookie diff --git a/kibble/api/yaml/kibble.yaml.tmp b/kibble/api/yaml/kibble.yaml.tmp new file mode 100644 index 00000000..71a90c01 --- /dev/null +++ b/kibble/api/yaml/kibble.yaml.tmp @@ -0,0 +1,15 @@ +accounts: + allowSignup: true + verify: true +api: + database: 2 + version: 0.1.0 +elasticsearch: + dbname: kibble + host: elasticsearch + port: 9200 + ssl: false +mail: + mailhost: localhost + mailport: 25 + sender: Kibble diff --git a/api/yaml/openapi.yaml b/kibble/api/yaml/openapi.yaml similarity index 100% rename from api/yaml/openapi.yaml rename to kibble/api/yaml/openapi.yaml diff --git a/api/yaml/openapi/combine.py b/kibble/api/yaml/openapi/combine.py similarity index 59% rename from api/yaml/openapi/combine.py rename to kibble/api/yaml/openapi/combine.py index 11afc958..0b04380a 100644 --- a/api/yaml/openapi/combine.py +++ b/kibble/api/yaml/openapi/combine.py @@ -1,27 +1,27 @@ -#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. -import yaml import os -import sys import re +import sys -license = """#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" +import yaml + +from kibble.settings import YAML_DIRECTORY baseyaml = """ # THIS IS PULLED FROM SCRIPTS AND AUTOGENERATED! @@ -40,10 +40,10 @@ def deconstruct(): - yml = yaml.load(open(bpath + "/../openapi.yaml")) + yml = yaml.safe_load(open(bpath + "/../openapi.yaml")) noDefs = 0 print("Dumping paths into pages...") - for endpoint, defs in yml['paths'].items(): + for endpoint, defs in yml["paths"].items(): noDefs += 1 xendpoint = endpoint.replace("/api/", "") ypath = os.path.abspath("%s/../../pages/%s.py" % (bpath, xendpoint)) @@ -51,41 +51,53 @@ def deconstruct(): if os.path.isfile(ypath): print("Editing %s" % ypath) contents = open(ypath, "r").read() - contents = re.sub(r"^([#\n](?!\s*\"\"\")[^\r\n]*\n?)+", "", contents, re.MULTILINE) + contents = re.sub( + r"^([#\n](?!\s*\"\"\")[^\r\n]*\n?)+", "", contents, re.MULTILINE + ) odefs = yaml.dump(defs, default_flow_style=False) odefs = "\n".join(["# %s" % line for line in odefs.split("\n")]) with open(ypath, "w") as f: - f.write(license) - f.write("########################################################################\n") + f.write( + "########################################################################\n" + ) f.write("# OPENAPI-URI: %s\n" % endpoint) - f.write("########################################################################\n") + f.write( + "########################################################################\n" + ) f.write(odefs) - f.write("\n########################################################################\n") + f.write( + "\n########################################################################\n" + ) f.write("\n\n") f.write(contents) f.close() - + print("Dumping security components...") - for basetype, bdefs in yml['components'].items(): + for basetype, bdefs in yml["components"].items(): for schema, defs in bdefs.items(): noDefs += 1 ypath = "%s/components/%s/%s.yaml" % (bpath, basetype, schema) ydir = os.path.dirname(ypath) if not os.path.isdir(ydir): print("Making directory %s" % ydir) - os.makedirs(ydir, exist_ok = True) + os.makedirs(ydir, exist_ok=True) with open(ypath, "w") as f: - f.write("########################################################################\n") - f.write("# %-68s #\n" % defs.get('summary', schema)) - f.write("########################################################################\n") + f.write( + "########################################################################\n" + ) + f.write("# %-68s #\n" % defs.get("summary", schema)) + f.write( + "########################################################################\n" + ) f.write(yaml.dump(defs, default_flow_style=False)) f.close() print("Dumped %u definitions." % noDefs) - + + def construct(): yml = {} - yml['paths'] = {} - yml['components'] = {} + yml["paths"] = {} + yml["components"] = {} apidir = os.path.abspath("%s/../../pages/" % bpath) print("Scanning %s" % apidir) for d in os.listdir(apidir): @@ -103,22 +115,23 @@ def construct(): cyml = m.group(2) print("Weaving in API path %s" % apath) cyml = "\n".join([line[2:] for line in cyml.split("\n")]) - defs = yaml.load(cyml) - yml['paths'][apath] = defs + defs = yaml.safe_load(cyml) + yml["paths"][apath] = defs else: fname = d if fname.endswith(".py"): fpath = "%s/%s" % (apidir, fname) print("Scanning %s" % fpath) - contents = open(fpath, "r").read() + with open(fpath, "r") as f: + contents = f.read() m = re.search(r"OPENAPI-URI: (\S+)\n##+\n([\s\S]+?)##+", contents) if m: apath = m.group(1) cyml = m.group(2) print("Weaving in API path %s" % apath) cyml = "\n".join([line[2:] for line in cyml.split("\n")]) - defs = yaml.load(cyml) - yml['paths'][apath] = defs + defs = yaml.safe_load(cyml) + yml["paths"][apath] = defs apidir = os.path.abspath("%s/components" % bpath) print("Scanning %s" % apidir) for d in os.listdir(apidir): @@ -127,19 +140,20 @@ def construct(): print("Scanning %s" % cdir) for fname in os.listdir(cdir): if fname.endswith(".yaml"): - yml['components'][d] = yml['components'].get(d, {}) + yml["components"][d] = yml["components"].get(d, {}) fpath = "%s/%s" % (cdir, fname) print("Scanning %s" % fpath) - defs = yaml.load(open(fpath)) - yml['components'][d][fname.replace(".yaml", "")] = defs - ypath = os.path.abspath("%s/../openapi.yaml" % bpath) + defs = yaml.safe_load(open(fpath)) + yml["components"][d][fname.replace(".yaml", "")] = defs + ypath = os.path.join(YAML_DIRECTORY, "openapi.yaml") with open(ypath, "w") as f: f.write(baseyaml) f.write(yaml.dump(yml, default_flow_style=False)) f.close() print("All done!") - -if len(sys.argv) > 1 and sys.argv[1] == 'deconstruct': + + +if len(sys.argv) > 1 and sys.argv[1] == "deconstruct": deconstruct() else: construct() diff --git a/api/yaml/openapi/components/schemas/ActionCompleted.yaml b/kibble/api/yaml/openapi/components/schemas/ActionCompleted.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/ActionCompleted.yaml rename to kibble/api/yaml/openapi/components/schemas/ActionCompleted.yaml diff --git a/api/yaml/openapi/components/schemas/Biography.yaml b/kibble/api/yaml/openapi/components/schemas/Biography.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Biography.yaml rename to kibble/api/yaml/openapi/components/schemas/Biography.yaml diff --git a/api/yaml/openapi/components/schemas/CommitterList.yaml b/kibble/api/yaml/openapi/components/schemas/CommitterList.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/CommitterList.yaml rename to kibble/api/yaml/openapi/components/schemas/CommitterList.yaml diff --git a/api/yaml/openapi/components/schemas/Empty.yaml b/kibble/api/yaml/openapi/components/schemas/Empty.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Empty.yaml rename to kibble/api/yaml/openapi/components/schemas/Empty.yaml diff --git a/api/yaml/openapi/components/schemas/Error.yaml b/kibble/api/yaml/openapi/components/schemas/Error.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Error.yaml rename to kibble/api/yaml/openapi/components/schemas/Error.yaml diff --git a/api/yaml/openapi/components/schemas/Factor.yaml b/kibble/api/yaml/openapi/components/schemas/Factor.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Factor.yaml rename to kibble/api/yaml/openapi/components/schemas/Factor.yaml diff --git a/api/yaml/openapi/components/schemas/NewOrg.yaml b/kibble/api/yaml/openapi/components/schemas/NewOrg.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/NewOrg.yaml rename to kibble/api/yaml/openapi/components/schemas/NewOrg.yaml diff --git a/api/yaml/openapi/components/schemas/OrgMembers.yaml b/kibble/api/yaml/openapi/components/schemas/OrgMembers.yaml similarity index 99% rename from api/yaml/openapi/components/schemas/OrgMembers.yaml rename to kibble/api/yaml/openapi/components/schemas/OrgMembers.yaml index 588e89fa..c4196708 100644 --- a/api/yaml/openapi/components/schemas/OrgMembers.yaml +++ b/kibble/api/yaml/openapi/components/schemas/OrgMembers.yaml @@ -13,5 +13,3 @@ properties: required: - admins - members - - diff --git a/api/yaml/openapi/components/schemas/Organisation.yaml b/kibble/api/yaml/openapi/components/schemas/Organisation.yaml similarity index 99% rename from api/yaml/openapi/components/schemas/Organisation.yaml rename to kibble/api/yaml/openapi/components/schemas/Organisation.yaml index abd66cc5..3c7c8a9a 100644 --- a/api/yaml/openapi/components/schemas/Organisation.yaml +++ b/kibble/api/yaml/openapi/components/schemas/Organisation.yaml @@ -29,4 +29,3 @@ properties: required: - id - name - diff --git a/api/yaml/openapi/components/schemas/Phrase.yaml b/kibble/api/yaml/openapi/components/schemas/Phrase.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Phrase.yaml rename to kibble/api/yaml/openapi/components/schemas/Phrase.yaml diff --git a/api/yaml/openapi/components/schemas/PhraseList.yaml b/kibble/api/yaml/openapi/components/schemas/PhraseList.yaml similarity index 99% rename from api/yaml/openapi/components/schemas/PhraseList.yaml rename to kibble/api/yaml/openapi/components/schemas/PhraseList.yaml index e2df69b2..701294ef 100644 --- a/api/yaml/openapi/components/schemas/PhraseList.yaml +++ b/kibble/api/yaml/openapi/components/schemas/PhraseList.yaml @@ -12,4 +12,3 @@ properties: required: - okay - phrases - diff --git a/api/yaml/openapi/components/schemas/Sloc.yaml b/kibble/api/yaml/openapi/components/schemas/Sloc.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Sloc.yaml rename to kibble/api/yaml/openapi/components/schemas/Sloc.yaml diff --git a/api/yaml/openapi/components/schemas/Source.yaml b/kibble/api/yaml/openapi/components/schemas/Source.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Source.yaml rename to kibble/api/yaml/openapi/components/schemas/Source.yaml diff --git a/api/yaml/openapi/components/schemas/SourceID.yaml b/kibble/api/yaml/openapi/components/schemas/SourceID.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/SourceID.yaml rename to kibble/api/yaml/openapi/components/schemas/SourceID.yaml diff --git a/api/yaml/openapi/components/schemas/SourceList.yaml b/kibble/api/yaml/openapi/components/schemas/SourceList.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/SourceList.yaml rename to kibble/api/yaml/openapi/components/schemas/SourceList.yaml diff --git a/api/yaml/openapi/components/schemas/SourceListAdd.yaml b/kibble/api/yaml/openapi/components/schemas/SourceListAdd.yaml similarity index 99% rename from api/yaml/openapi/components/schemas/SourceListAdd.yaml rename to kibble/api/yaml/openapi/components/schemas/SourceListAdd.yaml index 8d82be0b..c1629dd6 100644 --- a/api/yaml/openapi/components/schemas/SourceListAdd.yaml +++ b/kibble/api/yaml/openapi/components/schemas/SourceListAdd.yaml @@ -29,7 +29,7 @@ properties: "cookie": "ponycookie" } } - + ] } required: diff --git a/api/yaml/openapi/components/schemas/SourceType.yaml b/kibble/api/yaml/openapi/components/schemas/SourceType.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/SourceType.yaml rename to kibble/api/yaml/openapi/components/schemas/SourceType.yaml diff --git a/api/yaml/openapi/components/schemas/SourceTypes.yaml b/kibble/api/yaml/openapi/components/schemas/SourceTypes.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/SourceTypes.yaml rename to kibble/api/yaml/openapi/components/schemas/SourceTypes.yaml diff --git a/api/yaml/openapi/components/schemas/Timeseries.yaml b/kibble/api/yaml/openapi/components/schemas/Timeseries.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Timeseries.yaml rename to kibble/api/yaml/openapi/components/schemas/Timeseries.yaml diff --git a/api/yaml/openapi/components/schemas/TimeseriesObject.yaml b/kibble/api/yaml/openapi/components/schemas/TimeseriesObject.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/TimeseriesObject.yaml rename to kibble/api/yaml/openapi/components/schemas/TimeseriesObject.yaml diff --git a/api/yaml/openapi/components/schemas/TopList.yaml b/kibble/api/yaml/openapi/components/schemas/TopList.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/TopList.yaml rename to kibble/api/yaml/openapi/components/schemas/TopList.yaml diff --git a/api/yaml/openapi/components/schemas/Trend.yaml b/kibble/api/yaml/openapi/components/schemas/Trend.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/Trend.yaml rename to kibble/api/yaml/openapi/components/schemas/Trend.yaml diff --git a/api/yaml/openapi/components/schemas/UserAccount.yaml b/kibble/api/yaml/openapi/components/schemas/UserAccount.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/UserAccount.yaml rename to kibble/api/yaml/openapi/components/schemas/UserAccount.yaml diff --git a/api/yaml/openapi/components/schemas/UserAccountEdit.yaml b/kibble/api/yaml/openapi/components/schemas/UserAccountEdit.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/UserAccountEdit.yaml rename to kibble/api/yaml/openapi/components/schemas/UserAccountEdit.yaml diff --git a/api/yaml/openapi/components/schemas/UserCredentials.yaml b/kibble/api/yaml/openapi/components/schemas/UserCredentials.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/UserCredentials.yaml rename to kibble/api/yaml/openapi/components/schemas/UserCredentials.yaml diff --git a/api/yaml/openapi/components/schemas/UserData.yaml b/kibble/api/yaml/openapi/components/schemas/UserData.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/UserData.yaml rename to kibble/api/yaml/openapi/components/schemas/UserData.yaml diff --git a/api/yaml/openapi/components/schemas/UserName.yaml b/kibble/api/yaml/openapi/components/schemas/UserName.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/UserName.yaml rename to kibble/api/yaml/openapi/components/schemas/UserName.yaml diff --git a/api/yaml/openapi/components/schemas/View.yaml b/kibble/api/yaml/openapi/components/schemas/View.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/View.yaml rename to kibble/api/yaml/openapi/components/schemas/View.yaml diff --git a/api/yaml/openapi/components/schemas/ViewList.yaml b/kibble/api/yaml/openapi/components/schemas/ViewList.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/ViewList.yaml rename to kibble/api/yaml/openapi/components/schemas/ViewList.yaml diff --git a/api/yaml/openapi/components/schemas/WidgetApp.yaml b/kibble/api/yaml/openapi/components/schemas/WidgetApp.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/WidgetApp.yaml rename to kibble/api/yaml/openapi/components/schemas/WidgetApp.yaml diff --git a/api/yaml/openapi/components/schemas/WidgetDesign.yaml b/kibble/api/yaml/openapi/components/schemas/WidgetDesign.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/WidgetDesign.yaml rename to kibble/api/yaml/openapi/components/schemas/WidgetDesign.yaml diff --git a/api/yaml/openapi/components/schemas/WidgetRow.yaml b/kibble/api/yaml/openapi/components/schemas/WidgetRow.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/WidgetRow.yaml rename to kibble/api/yaml/openapi/components/schemas/WidgetRow.yaml diff --git a/api/yaml/openapi/components/schemas/defaultWidgetArgs.yaml b/kibble/api/yaml/openapi/components/schemas/defaultWidgetArgs.yaml similarity index 99% rename from api/yaml/openapi/components/schemas/defaultWidgetArgs.yaml rename to kibble/api/yaml/openapi/components/schemas/defaultWidgetArgs.yaml index 433b9262..8b74cd7c 100644 --- a/api/yaml/openapi/components/schemas/defaultWidgetArgs.yaml +++ b/kibble/api/yaml/openapi/components/schemas/defaultWidgetArgs.yaml @@ -79,4 +79,3 @@ properties: description: Enables relative comparison mode for API endpoints that have this feature. type: boolean example: false - \ No newline at end of file diff --git a/api/yaml/openapi/components/schemas/editView.yaml b/kibble/api/yaml/openapi/components/schemas/editView.yaml similarity index 100% rename from api/yaml/openapi/components/schemas/editView.yaml rename to kibble/api/yaml/openapi/components/schemas/editView.yaml diff --git a/api/yaml/openapi/components/securitySchemes/cookieAuth.yaml b/kibble/api/yaml/openapi/components/securitySchemes/cookieAuth.yaml similarity index 100% rename from api/yaml/openapi/components/securitySchemes/cookieAuth.yaml rename to kibble/api/yaml/openapi/components/securitySchemes/cookieAuth.yaml diff --git a/api/yaml/sourcetypes.yaml b/kibble/api/yaml/sourcetypes.yaml similarity index 98% rename from api/yaml/sourcetypes.yaml rename to kibble/api/yaml/sourcetypes.yaml index 9683c51c..d2b4e018 100644 --- a/api/yaml/sourcetypes.yaml +++ b/kibble/api/yaml/sourcetypes.yaml @@ -6,7 +6,7 @@ git: optauth: - username - password - + github: title: "GitHub repository (plus issues/PRs)" description: "This is GitHub repositories with issues and pull requests. For non-GitHub repos, please use the plain 'git' source type" @@ -25,7 +25,7 @@ jira: authrequired: true optauth: - username - - password + - password bugzilla: title: "BugZilla Project" @@ -92,7 +92,7 @@ twitter: - token_secret - consumer_key - consumer_secret - + discourse: title: Discourse description: A Discourse Forum System. @@ -100,4 +100,4 @@ discourse: example: https://discourse.example.com/ optauth: - username - - password \ No newline at end of file + - password diff --git a/api/yaml/widgets.yaml b/kibble/api/yaml/widgets.yaml similarity index 100% rename from api/yaml/widgets.yaml rename to kibble/api/yaml/widgets.yaml diff --git a/kibble/cli/__init__.py b/kibble/cli/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/cli/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/cli/make_account_command.py b/kibble/cli/make_account_command.py new file mode 100644 index 00000000..e0a59f68 --- /dev/null +++ b/kibble/cli/make_account_command.py @@ -0,0 +1,79 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from urllib.parse import urlparse + +import bcrypt +import elasticsearch + +from kibble.configuration import conf + + +class ESDatabase: + def __init__(self): + self.dbname = conf.get("elasticsearch", "dbname") + parsed = urlparse(conf.get("elasticsearch", "conn_uri")) + es_host = { + "host": parsed.hostname, + "port": parsed.port, + "use_ssl": conf.getboolean("elasticsearch", "ssl"), + "verify_certs": False, + "url_prefix": conf.get("elasticsearch", "uri"), + "http_auth": conf.get("elasticsearch", "auth") or None, + } + self.es = elasticsearch.Elasticsearch( + hosts=[es_host], max_retries=5, retry_on_timeout=True + ) + + def create_index(self, doc_type: str, id_: str, body: dict): + self.es.index(index=self.dbname, doc_type=doc_type, id=id_, body=body) + + +def make_account_cmd( + username: str, + password: str, + admin: bool = False, + adminorg: bool = False, + org: str = None, +) -> None: + """ + Create user kibble account. + + :param username: username for login for example email + :param password: password used for login + :param admin: set to true if created user should has admin access level + :param adminorg: organization user owns + :param org: organisation user belongs to + """ + orgs = [org] or [] + aorgs = [adminorg] if adminorg else [] + + salt = bcrypt.gensalt() + pwd = bcrypt.hashpw(password.encode("utf-8"), salt).decode("ascii") + doc = { + "email": username, + "password": pwd, + "displayName": username, + "organisations": orgs, + "ownerships": aorgs, + "defaultOrganisation": None, # Default org for user + "verified": True, # Account verified via email? + "userlevel": "admin" if admin else "user", + } + db = ESDatabase() + db.create_index(doc_type="useraccount", id_=username, body=doc) + print("Account created!") diff --git a/kibble/cli/scanner_command.py b/kibble/cli/scanner_command.py new file mode 100644 index 00000000..ea89d224 --- /dev/null +++ b/kibble/cli/scanner_command.py @@ -0,0 +1,151 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import multiprocessing +import threading +import time +from typing import List + +from kibble.configuration import conf +from kibble.scanners.brokers import kibbleES + +PENDING_OBJECTS = [] +BIG_LOCK = threading.Lock() + + +def is_mine(id_): + balance = conf.get("scanner", "balance") + if not balance: + return False + node_no, num_nodes = balance.split("/") + node_no, num_nodes = int(node_no), int(num_nodes) + if num_nodes == 0: + return True + bignum = int(id_, 16) % num_nodes + if bignum == node_no - 1: + return True + return False + + +class ScanThread(threading.Thread): + """ + A thread object that grabs an item from the queue and processes + it, using whatever plugins will come out to play. + """ + + def __init__(self, broker, org, i, t=None, e=None): + super().__init__() + self.broker = broker + self.org = org + self.id = i + self.bit = self.broker.bitClass(self.broker, self.org, i) + self.stype = t + self.exclude = e + print("Initialized thread %i" % i) + + def run(self): + # pylint: disable=import-outside-toplevel + from kibble.scanners import scanners + + global BIG_LOCK, PENDING_OBJECTS # pylint: disable=global-statement + time.sleep(0.5) # Primarily to align printouts. + # While there are objects to snag + while PENDING_OBJECTS: + BIG_LOCK.acquire(blocking=True) + try: + # Try grabbing an object (might not be any left!) + obj = PENDING_OBJECTS.pop(0) + # If load balancing jobs, make sure this one is ours + if is_mine(obj["sourceID"]): + # Run through list of scanners in order, apply when useful + for sid, scanner in scanners.enumerate(): + if scanner.accepts(obj): + self.bit.pluginname = "plugins/scanners/" + sid + # Excluded scanner type? + if self.exclude and sid in self.exclude: + continue + # Specific scanner type or no types mentioned? + if not self.stype or self.stype == sid: + scanner.scan(self.bit, obj) + except: # pylint: disable=bare-except + break + finally: + BIG_LOCK.release() + self.bit.pluginname = "core" + self.bit.pprint("No more objects, exiting!") + + +def scan_cmd( + scanners: List[str] = None, + exclude: List[str] = None, + org: str = None, + age: int = None, + source: str = None, + view: str = None, +): + global PENDING_OBJECTS # pylint: disable=global-statement + + print("Kibble Scanner starting") + print("Using direct ElasticSearch broker model") + broker = kibbleES.Broker() + + org_no = 0 + source_no = 0 + for org_item in broker.organisations(): + if not org or org == org_item.id: + print(f"Processing organisation {org_item.id}") + org_no += 1 + + # Compile source list + # If --age is passed, only append source that either + # have never been scanned, or have been scanned more than + # N hours ago by any scanner. + if age: + minAge = time.time() - int(age) * 3600 + for source_item in org_item.sources(view=view): + tooNew = False + if "steps" in source_item: + for _, step in source_item["steps"].items(): + if "time" in step and step["time"] >= minAge: + tooNew = True + break + if not tooNew: + if not source or (source == source_item["sourceID"]): + PENDING_OBJECTS.append(source) + else: + PENDING_OBJECTS = [] + for source_item in org_item.sources(view=view): + if not source or (source == source_item["sourceID"]): + PENDING_OBJECTS.append(source_item) + source_no += len(PENDING_OBJECTS) + + # Start up some threads equal to number of cores on the box, + # but no more than 4. We don't want an IOWait nightmare. + threads = [] + core_count = min((4, int(multiprocessing.cpu_count()))) + for i in range(1, core_count): + s_thread = ScanThread(broker, org_item, i + 1, scanners, exclude) + s_thread.start() + threads.append(s_thread) + + # Wait for them all to finish. + for t in threads: + t.join() + + print( + f"All done scanning for now, found {org_no} organisations and {source_no} sources to process." + ) diff --git a/kibble/cli/setup_command.py b/kibble/cli/setup_command.py new file mode 100644 index 00000000..ef2ebb41 --- /dev/null +++ b/kibble/cli/setup_command.py @@ -0,0 +1,207 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import json +import logging +import os +import sys +from getpass import getpass + +import bcrypt +import tenacity +from elasticsearch import Elasticsearch + +from kibble.configuration import conf +from kibble.settings import MAPPING_DIRECTORY + +KIBBLE_VERSION = conf.get("api", "version") +KIBBLE_DB_VERSION = conf.get("api", "database") + + +def get_user_input(msg: str, secure: bool = False): + value = None + while not value: + value = getpass(msg) if secure else input(msg) + return value + + +def create_es_index( + conn_uri: str, + dbname: str, + shards: int, + replicas: int, + admin_name: str, + admin_pass: str, + skiponexist: bool, +): + """Creates Elasticsearch index used by Kibble""" + + # elasticsearch logs lots of warnings on retries/connection failure + logging.getLogger("elasticsearch").setLevel(logging.ERROR) + + mappings_json = os.path.join(MAPPING_DIRECTORY, "mappings.json") + with open(mappings_json, "r") as f: + mappings = json.load(f) + + es = Elasticsearch([conn_uri], max_retries=5, retry_on_timeout=True) + print(es.info()) + + es_version = es.info()["version"]["number"] + es6 = int(es_version.split(".")[0]) >= 6 + es7 = int(es_version.split(".")[0]) >= 7 + + if not es6: + print( + f"New Kibble installations require ElasticSearch 6.x or newer! " + f"You appear to be running {es_version}!" + ) + sys.exit(-1) + + # If ES >= 7, _doc is invalid and mapping should be rooted + if es7: + mappings["mappings"] = mappings["mappings"]["_doc"] + + # Check if index already exists + if es.indices.exists(dbname + "_api"): + # Skip this is DB exists and -k added + if skiponexist: + print("DB prefix exists, but --skiponexist used, skipping this step.") + return + print("Error: ElasticSearch DB prefix '%s' already exists!" % dbname) + sys.exit(-1) + + types = [ + "api", + # ci_*: CI service stats + "ci_build", + "ci_queue", + # code_* + evolution + file_history: git repo stats + "code_commit", + "code_commit_unique", + "code_modification", + "evolution", + "file_history", + # forum_*: forum stats (SO, Discourse, Askbot etc) + "forum_post", + "forum_topic", + # GitHub stats + "ghstats", + # im_*: Instant messaging stats + "im_stats", + "im_ops", + "im_msg", + "issue", + "logstats", + # email, mail*: Email statitics + "email", + "mailstats", + "mailtop", + # organisation, view, source, publish: UI Org DB + "organisation", + "view", + "publish", + "source", + # stats: Miscellaneous stats + "stats", + # social_*: Twitter, Mastodon, Facebook etc + "social_follow", + "social_followers", + "social_follower", + "social_person", + # uisession, useraccount, message: UI user DB + "uisession", + "useraccount", + "message", + # person: contributor DB + "person", + ] + + for t in types: + iname = f"{dbname}_{t}" + print(f"Creating index {iname}") + + settings = {"number_of_shards": shards, "number_of_replicas": replicas} + es.indices.create( + index=iname, body={"mappings": mappings["mappings"], "settings": settings} + ) + print("Indices created!") + print() + + salt = bcrypt.gensalt() + pwd = bcrypt.hashpw(admin_pass.encode("utf-8"), salt).decode("ascii") + print("Creating administrator account") + doc = { + "email": admin_name, # Username (email) + "password": pwd, # Hashed password + "displayName": "Administrator", # Display Name + "organisations": [], # Orgs user belongs to (default is none) + "ownerships": [], # Orgs user owns (default is none) + "defaultOrganisation": None, # Default org for user + "verified": True, # Account verified via email? + "userlevel": "admin", # User level (user/admin) + } + dbdoc = { + "apiversion": KIBBLE_VERSION, # Log current API version + "dbversion": KIBBLE_DB_VERSION, # Log the database revision we accept (might change!) + } + es.index(index=dbname + "_useraccount", doc_type="_doc", id=admin_name, body=doc) + es.index(index=dbname + "_api", doc_type="_doc", id="current", body=dbdoc) + print("Account created!") + + +def do_setup( + uri: str, + dbname: str, + shards: str, + replicas: str, + autoadmin: bool, + skiponexist: bool, +): + print("Welcome to the Apache Kibble setup script!") + + admin_name = "admin@kibble" + admin_pass = "kibbleAdmin" + if not autoadmin: + admin_name = get_user_input( + "Enter an email address for the administrator account: " + ) + admin_pass = get_user_input( + "Enter a password for the administrator account: ", secure=True + ) + + # Create Elasticsearch index + # Retry in case ES is not yet up + print(f"Elasticsearch: {uri}") + for attempt in tenacity.Retrying( + retry=tenacity.retry_if_exception_type(exception_types=Exception), + wait=tenacity.wait_fixed(10), + stop=tenacity.stop_after_attempt(10), + reraise=True, + ): + with attempt: + print("Trying to create ES index...") + create_es_index( + conn_uri=uri, + dbname=dbname, + shards=int(shards), + replicas=int(replicas), + admin_name=admin_name, + admin_pass=admin_pass, + skiponexist=skiponexist, + ) + print() + print("All done, Kibble should...work now :)") diff --git a/kibble/configuration.py b/kibble/configuration.py new file mode 100644 index 00000000..8c043ca1 --- /dev/null +++ b/kibble/configuration.py @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +from configparser import ConfigParser + +DEFAULT_KIBBLE_CONFIG_LOCATION = os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, "kibble.ini" +) + + +class KibbleConfigParser(ConfigParser): # pylint: disable=too-many-ancestors + """Custom Kibble config parser""" + + def __init__(self): + super().__init__() + + +conf = KibbleConfigParser() +conf.read(DEFAULT_KIBBLE_CONFIG_LOCATION) diff --git a/setup/mappings.json b/kibble/mappings/mappings.json similarity index 100% rename from setup/mappings.json rename to kibble/mappings/mappings.json diff --git a/kibble/scanners/README.md b/kibble/scanners/README.md new file mode 100644 index 00000000..5746a118 --- /dev/null +++ b/kibble/scanners/README.md @@ -0,0 +1,62 @@ +# Kibble Scanner Application +The Kibble Scanners collect information for the Kibble Suite. + +## Setup instructions: + + - Edit kibble.ini to match your Kibble service + +## How to run: + + - On a daily/weekly/whatever basis, run: `kibble scan`. + +### Command line options: + +``` +Usage: kibble scan [OPTIONS] + +Options: + -t, --type TEXT Specific type of scanner to run (default is run all + scanners) + + -e, --exclude TEXT Specific type of scanner(s) to exclude + -o, --org TEXT The organisation to gather stats for. If left out, all + organisations will be scanned. + + -a, --age TEXT Minimum age in hours before performing a new scan on an + already processed source. --age 12 will not process any + source that was processed less than 12 hours ago, but + will process new sources. + + -s, --source TEXT Specific source (wildcard) to run scans on. + -v, --view TEXT Specific source view to scan (default is scan all + sources) + + --help Show this message and exit. +``` + +## Currently available scanner plugins: + + - Apache Pony Mail (`scanners/ponymail.py`) + - Atlassian JIRA (`scanners/jira.py`) + - BugZilla Issue Tracker (`scanners/bugzilla.py`) + - BuildBot (`scanners/buildbot.py`) + - Discourse (`scanners/discourse.py`) + - Gerrit Code Review (`scanners/gerrit.py`) + - Git Repository Fetcher (`scanners/git-sync.py`) + - Git Census Counter (`scanners/git-census.py`) + - Git Code Evolution Counter (`scanners/git-evolution.py`) + - Git SLoC Counter (`scanners/git-sloc.py`) + - GitHub Issues/PRs (`scanners/github.py`) + - GitHub Traffic Statistics (`scanners/github-stats.py`) + - GNU Mailman Pipermail (`scanners/pipermail.py`) + - Jenkins (`scanners/jenkins.py`) + - Travis CI (`scanners/travis.py`) + +## Requirements: + + - [cloc](https://github.com/AlDanial/cloc) version 1.76 or later `(optional)` + - git binaries + - python3 (3.3 or later) + - python3-elasticsearch + - python3-certifi + - python3-yaml diff --git a/kibble/scanners/__init__.py b/kibble/scanners/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/scanners/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/scanners/brokers/__init__.py b/kibble/scanners/brokers/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/scanners/brokers/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/scanners/brokers/kibbleES.py b/kibble/scanners/brokers/kibbleES.py new file mode 100644 index 00000000..975e669f --- /dev/null +++ b/kibble/scanners/brokers/kibbleES.py @@ -0,0 +1,352 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import sys +from urllib.parse import urlparse + +import elasticsearch +import elasticsearch.helpers + +from kibble.configuration import conf + +KIBBLE_DB_VERSION = 2 # Current DB struct version +ACCEPTED_DB_VERSIONS = [1, 2] # Versions we know how to work with. + + +class KibbleESWrapper: + """ + Class for rewriting old-style queries to the new ones, + where doc_type is an integral part of the DB name + """ + + def __init__(self, ES): + self.ES = ES + self.indices = self.indicesClass(ES) + + def get(self, index, doc_type, id): + return self.ES.get(index=index + "_" + doc_type, doc_type="_doc", id=id) + + def exists(self, index, doc_type, id): + return self.ES.exists(index=index + "_" + doc_type, doc_type="_doc", id=id) + + def delete(self, index, doc_type, id): + return self.ES.delete(index=index + "_" + doc_type, doc_type="_doc", id=id) + + def index(self, index, doc_type, id, body): + return self.ES.index( + index=index + "_" + doc_type, doc_type="_doc", id=id, body=body + ) + + def update(self, index, doc_type, id, body): + return self.ES.update( + index=index + "_" + doc_type, doc_type="_doc", id=id, body=body + ) + + def search(self, index, doc_type, size=100, body=None): + return self.ES.search( + index=index + "_" + doc_type, doc_type="_doc", size=size, body=body + ) + + def count(self, index, doc_type, body=None): + return self.ES.count(index=index + "_" + doc_type, doc_type="_doc", body=body) + + class indicesClass: + """ Indices helper class """ + + def __init__(self, ES): + self.ES = ES + + def exists(self, index): + return self.ES.indices.exists(index=index) + + +class KibbleESWrapperSeven: + """ + Class for rewriting old-style queries to the new ones, + where doc_type is an integral part of the DB name and NOT USED (>= 7.x) + """ + + def __init__(self, ES): + self.ES = ES + self.indices = self.indicesClass(ES) + + def get(self, index, doc_type, id): + return self.ES.get(index=index + "_" + doc_type, id=id) + + def exists(self, index, doc_type, id): + return self.ES.exists(index=index + "_" + doc_type, id=id) + + def delete(self, index, doc_type, id): + return self.ES.delete(index=index + "_" + doc_type, id=id) + + def index(self, index, doc_type, id, body): + return self.ES.index(index=index + "_" + doc_type, id=id, body=body) + + def update(self, index, doc_type, id, body): + return self.ES.update(index=index + "_" + doc_type, id=id, body=body) + + def search(self, index, doc_type, size=100, body=None): + return self.ES.search(index=index + "_" + doc_type, size=size, body=body) + + def count(self, index, doc_type, body=None): + return self.ES.count(index=index + "_" + doc_type, body=body) + + class indicesClass: + """ Indices helper class """ + + def __init__(self, ES): + self.ES = ES + + def exists(self, index): + return self.ES.indices.exists(index=index) + + +class KibbleBit: + """ KibbleBit class with direct ElasticSearch access """ + + def __init__(self, broker, organisation, tid): + self.organisation = organisation + self.broker = broker + self.json_queue = [] + self.queueMax = 1000 # Entries to keep before bulk pushing + self.pluginname = "" + self.tid = tid + self.dbname = conf.get("elasticsearch", "database") + + def __del__(self): + """ On unload/delete, push the last chunks of data to ES """ + if self.json_queue: + print("Pushing stragglers") + self.bulk() + + def pprint(self, string, err=False): + line = "[thread#%i:%s]: %s" % (self.tid, self.pluginname, string) + if err: + sys.stderr.write(line + "\n") + else: + print(line) + + def update_source(self, source): + """ Updates a source document, usually with a status update """ + self.broker.DB.index( + index=self.dbname, + doc_type="source", + id=source["sourceID"], + body=source, + ) + + def get(self, doctype, docid): + """ Fetches a document from the DB """ + doc = self.broker.DB.get( + index=self.dbname, + doc_type=doctype, + id=docid, + ) + if doc: + return doc["_source"] + return None + + def exists(self, doctype, docid): + """ Checks whether a document already exists or not """ + return self.broker.DB.exists( + index=self.dbname, + doc_type=doctype, + id=docid, + ) + + def index(self, doctype, docid, document): + """ Adds a new document to the index """ + dbname = self.dbname + self.broker.DB.index(index=dbname, doc_type=doctype, id=docid, body=document) + + def append(self, t, doc): + """ Append a document to the bulk push queue """ + if "id" not in doc: + sys.stderr.write("No doc ID specified!\n") + return + doc["doctype"] = t + self.json_queue.append(doc) + # If we've crossed the bulk limit, do a push + if len(self.json_queue) > self.queueMax: + print("Bulk push forced") + self.bulk() + + def bulk(self): + """ Push pending JSON objects in the queue to ES""" + xjson = self.json_queue + js_arr = [] + self.json_queue = [] + for entry in xjson: + js = entry + doc = js + js["@version"] = 1 + dbname = self.dbname + if self.broker.noTypes: + dbname += "_%s" % js["doctype"] + js_arr.append( + { + "_op_type": "update" if js.get("upsert") else "index", + "_index": dbname, + "_type": "_doc", + "_id": js["id"], + "doc" if js.get("upsert") else "_source": doc, + "doc_as_upsert": True, + } + ) + else: + js_arr.append( + { + "_op_type": "update" if js.get("upsert") else "index", + "_index": dbname, + "_type": js["doctype"], + "_id": js["id"], + "doc" if js.get("upsert") else "_source": doc, + "doc_as_upsert": True, + } + ) + try: + elasticsearch.helpers.bulk(self.broker.oDB, js_arr) + except Exception as err: + print("Warning: Could not bulk insert: %s" % err) + + +class KibbleOrganisation: + """ KibbleOrg with direct ElasticSearch access """ + + def __init__(self, broker, org): + """ Init an org, set up ElasticSearch for KibbleBits later on """ + + self.broker = broker + self.id = org + self.dbname = conf.get("elasticsearch", "database") + + def sources(self, sourceType=None, view=None): + """ Get all sources or sources of a specific type for an org """ + s = [] + # Search for all sources of this organisation + mustArray = [{"term": {"organisation": self.id}}] + if view: + res = self.broker.DB.get( + index=self.dbname, + doc_type="view", + id=view, + ) + if res: + mustArray.append({"terms": {"sourceID": res["_source"]["sourceList"]}}) + # If we want a specific source type, amend the search criteria + if sourceType: + mustArray.append({"term": {"type": sourceType}}) + # Run the search, fetch all results, 9999 max. TODO: Scroll??? + res = self.broker.DB.search( + index=self.dbname, + doc_type="source", + size=9999, + body={"query": {"bool": {"must": mustArray}}, "sort": {"sourceURL": "asc"}}, + ) + + for hit in res["hits"]["hits"]: + if sourceType is None or hit["_source"]["type"] == sourceType: + s.append(hit["_source"]) + return s + + +class Broker: + """Master Kibble Broker Class for direct ElasticSearch access.""" + + def __init__(self): + conn_uri = conf.get("elasticsearch", "conn_uri") + parsed = urlparse(conf.get("elasticsearch", "conn_uri")) + self.dbname = conf.get("elasticsearch", "dbname") + + user = conf.get("elasticsearch", "user", fallback=None) + password = conf.get("elasticsearch", "password", fallback=None) + auth = (user, password) if user else None + + print(f"Connecting to ElasticSearch database at {conn_uri}") + es = elasticsearch.Elasticsearch( + [ + { + "host": parsed.hostname, + "port": parsed.port, + "use_ssl": conf.getboolean("elasticsearch", "ssl"), + "verify_certs": False, + "url_prefix": conf.get("elasticsearch", "uri"), + "http_auth": auth, + } + ], + max_retries=5, + retry_on_timeout=True, + ) + es_info = es.info() + print("Connected!") + self.DB = es + self.oDB = es # Original ES class, always. the .DB may change + self.bitClass = KibbleBit + # This bit is required since ES 6.x and above don't like document types + self.noTypes = int(es_info["version"]["number"].split(".")[0]) >= 6 + self.seven = int(es_info["version"]["number"].split(".")[0]) >= 7 + if self.noTypes: + print("This is a type-less DB, expanding database names instead.") + if self.seven: + print("We're using ES >= 7.x, NO DOC_TYPE!") + es = KibbleESWrapperSeven(es) + else: + es = KibbleESWrapper(es) + self.DB = es + if not es.indices.exists(index=self.dbname + "_api"): + raise SystemExit( + f"Could not find database group {self.dbname}_* in ElasticSearch!" + ) + else: + print("This DB supports types, utilizing..") + if not es.indices.exists(index=self.dbname): + raise SystemExit( + f"Could not find database {self.dbname} in ElasticSearch!" + ) + apidoc = es.get(index=self.dbname, doc_type="api", id="current")["_source"] + apidoc_db_version = int(apidoc["dbversion"]) + + # We currently accept and know how to use DB versions 1 and 2. + if apidoc_db_version not in ACCEPTED_DB_VERSIONS: + if apidoc_db_version > KIBBLE_DB_VERSION: + raise SystemExit( + "The database '%s' uses a newer structure format (version %u) than the scanners " + "(version %u). Please upgrade your scanners.\n" + % (self.dbname, apidoc_db_version, KIBBLE_DB_VERSION) + ) + if apidoc_db_version < KIBBLE_DB_VERSION: + raise SystemExit( + "The database '%s' uses an older structure format (version %u) than the scanners " + "(version %u). Please upgrade your main Kibble server.\n" + % (self.dbname, apidoc_db_version, KIBBLE_DB_VERSION) + ) + + def organisations(self): + """ Return a list of all organisations """ + + # Run the search, fetch all orgs, 9999 max. TODO: Scroll??? + res = self.DB.search( + index=self.dbname, + doc_type="organisation", + size=9999, + body={"query": {"match_all": {}}}, + ) + + for hit in res["hits"]["hits"]: + org = hit["_source"]["id"] + orgClass = KibbleOrganisation(self, org) + yield orgClass diff --git a/kibble/scanners/mapping.json b/kibble/scanners/mapping.json new file mode 100644 index 00000000..04ca3f59 --- /dev/null +++ b/kibble/scanners/mapping.json @@ -0,0 +1,455 @@ +{ + "mappings": { + "email": { + "properties": { + "@version": { + "type": "long" + }, + "address": { + "type": "string", + "index": "not_analyzed" + }, + "date": { + "type": "date", + "store": true, + "format": "yyyy/MM/dd HH:mm:ss" + }, + "hash": { + "type": "string", + "index": "not_analyzed" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sender": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "ts": { + "type": "long" + } + } + }, + "account": { + "properties": { + "cookie": { + "type": "string", + "index": "not_analyzed" + }, + "email": { + "type": "string", + "index": "not_analyzed" + }, + "fullname": { + "type": "string" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "orgs": { + "type": "string" + }, + "password": { + "type": "string", + "index": "not_analyzed" + }, + "request_id": { + "type": "string", + "index": "not_analyzed" + }, + "screenname": { + "type": "string", + "index": "not_analyzed" + }, + "tag": { + "type": "string", + "index": "not_analyzed" + }, + "verified": { + "type": "boolean" + } + } + }, + "code_commit": { + "properties": { + "@version": { + "type": "long" + }, + "author_email": { + "type": "string", + "index": "not_analyzed" + }, + "author_name": { + "type": "string", + "index": "not_analyzed" + }, + "committer_email": { + "type": "string", + "index": "not_analyzed" + }, + "committer_name": { + "type": "string", + "index": "not_analyzed" + }, + "date": { + "type": "date", + "store": true, + "format": "yyyy/MM/dd HH:mm:ss" + }, + "deletions": { + "type": "long" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "insertions": { + "type": "long" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "source": { + "type": "string", + "index": "not_analyzed" + }, + "ts": { + "type": "long" + }, + "tsday": { + "type": "long" + }, + "vcs": { + "type": "string", + "index": "not_analyzed" + } + } + }, + "code_commit_unique": { + "properties": { + "@version": { + "type": "long" + }, + "author_email": { + "type": "string", + "index": "not_analyzed" + }, + "author_name": { + "type": "string", + "index": "not_analyzed" + }, + "committer_email": { + "type": "string", + "index": "not_analyzed" + }, + "committer_name": { + "type": "string", + "index": "not_analyzed" + }, + "date": { + "type": "date", + "store": true, + "format": "yyyy/MM/dd HH:mm:ss" + }, + "deletions": { + "type": "long" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "insertions": { + "type": "long" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "source": { + "type": "string", + "index": "not_analyzed" + }, + "ts": { + "type": "long" + }, + "tsday": { + "type": "long" + }, + "vcs": { + "type": "string", + "index": "not_analyzed" + } + } + }, + "org": { + "properties": { + "admins": { + "type": "string" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "name": { + "type": "string", + "index": "not_analyzed" + }, + "request_id": { + "type": "string", + "index": "not_analyzed" + } + } + }, + "mailstats": { + "properties": { + "authors": { + "type": "long" + }, + "date": { + "type": "date", + "store": true, + "format": "yyyy/MM/dd HH:mm:ss" + }, + "emails": { + "type": "long" + }, + "hash": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "topics": { + "type": "long" + } + } + }, + "mailtop": { + "properties": { + "date": { + "type": "date", + "store": true, + "format": "yyyy/MM/dd HH:mm:ss" + }, + "emails": { + "type": "long" + }, + "hash": { + "type": "string", + "index": "not_analyzed" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "shash": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "subject": { + "type": "string" + }, + "ts": { + "type": "long" + } + } + }, + "source": { + "properties": { + "default_branch": { + "type": "string" + }, + "exception": { + "type": "string" + }, + "good": { + "type": "boolean" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "sourceURL": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sync": { + "type": "double" + }, + "tag": { + "type": "string", + "index": "not_analyzed" + }, + "type": { + "type": "string", + "index": "not_analyzed" + } + } + }, + "person": { + "properties": { + "@version": { + "type": "long" + }, + "address": { + "type": "string", + "index": "not_analyzed" + }, + "email": { + "type": "string", + "index": "not_analyzed" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "name": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + } + } + }, + "issue": { + "properties": { + "assignee": { + "type": "string", + "index": "not_analyzed" + }, + "changeDate": { + "type": "date", + "store": true, + "format": "yyyy/MM/dd HH:mm:ss" + }, + "closed": { + "type": "double" + }, + "closedDate": { + "type": "date", + "format": "yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis" + }, + "comments": { + "type": "long" + }, + "created": { + "type": "double" + }, + "createdDate": { + "type": "date", + "format": "yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis" + }, + "id": { + "type": "string", + "index": "not_analyzed" + }, + "issueCloser": { + "type": "string", + "index": "not_analyzed" + }, + "issueCreator": { + "type": "string", + "index": "not_analyzed" + }, + "key": { + "type": "string", + "index": "not_analyzed" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "status": { + "type": "string", + "index": "not_analyzed" + }, + "title": { + "type": "string", + "index": "not_analyzed" + }, + "url": { + "type": "string", + "index": "not_analyzed" + } + } + }, + "evolution": { + "properties": { + "blank": { + "type": "long" + }, + "comments": { + "type": "long" + }, + "cost": { + "type": "double" + }, + "loc": { + "type": "long" + }, + "organisation": { + "type": "string", + "index": "not_analyzed" + }, + "sourceID": { + "type": "string", + "index": "not_analyzed" + }, + "time": { + "type": "double" + }, + "years": { + "type": "double" + } + } + } + } + +} diff --git a/kibble/scanners/scanners/__init__.py b/kibble/scanners/scanners/__init__.py new file mode 100644 index 00000000..771ba57d --- /dev/null +++ b/kibble/scanners/scanners/__init__.py @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This file contains, in execution order, a list of the available +scanners that Kibble has. +""" + +import importlib + +# Define, in order of priority, all scanner plugins we have +__all__ = [ + "git-sync", # This needs to precede other VCS scanners! + "git-census", + "git-sloc", + "git-evolution", + "jira", + "ponymail", + "ponymail-tone", + "ponymail-kpe", + "pipermail", + "github-issues", + "bugzilla", + "gerrit", + "jenkins", + "buildbot", + "travis", + "discourse", +] + +# Import each plugin into a hash called 'scanners' +scanners = {} + +for p in __all__: + scanner = importlib.import_module("kibble.scanners.scanners.%s" % p) + scanners[p] = scanner + # This should ideally be pprint, meh + print( + "[core]: Loaded plugins/scanners/%s v/%s (%s)" + % (p, scanner.version, scanner.title) + ) + + +def enumerate(): + """ Returns the scanners as a dictionary, sorted by run-order """ + for p in __all__: + yield p, scanners[p] diff --git a/kibble/scanners/scanners/bugzilla.py b/kibble/scanners/scanners/bugzilla.py new file mode 100644 index 00000000..a6ec2381 --- /dev/null +++ b/kibble/scanners/scanners/bugzilla.py @@ -0,0 +1,465 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" This is the BugZilla scanner plugin for Kibble """ + +import hashlib +import json +import re +import time +import urllib +from threading import Lock, Thread + +from kibble.scanners.utils import jsonapi + +title = "Scanner for BugZilla" +version = "0.1.0" + + +def accepts(source): + """ Determine if this is a BugZilla source """ + if source["type"] == "bugzilla": + return True + if source["type"] == "issuetracker": + bz = re.match( + r"(https?://\S+?)(/jsonrpc\.cgi)?[\s:?]+(.+)", source["sourceURL"] + ) + if bz: + return True + return False + + +def get_time(string): + return time.mktime( + time.strptime(re.sub(r"[zZ]", "", str(string)), "%Y-%m-%dT%H:%M:%S") + ) + + +def assigned(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "assignee": + return True + return False + + +def wfi(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "status" and item["toString"] == "Waiting for Infra": + return True + return False + + +def wfu(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "status" and item["toString"] == "Waiting for user": + return True + return False + + +def moved(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "Key" and item["toString"].find("INFRA-") != -1: + return True + return False + + +def was_closed(js): + if "changelog" in js: + cjs = js["changelog"]["histories"] + for citem in cjs: + if "items" in citem: + for item in citem["items"]: + if item["field"] == "status" and ( + item["toString"] == "Closed" or item["toString"] == "Resolved" + ): + return True, citem["author"] + else: + if "items" in js: + for item in js["items"]: + if item["field"] == "status" and item["toString"] == "Closed": + return True, None + return False, None + + +def resolved(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "resolution" and ( + item["toString"] != "Pending Closed" + and item["toString"] != "Unresolved" + ): + return True + return False + + +def pchange(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "priority": + return True + return False + + +def scan_ticket(bug, kibble_bit, source, open_tickets, u, dom): + try: + key = bug["id"] + dhash = hashlib.sha224( + ("%s-%s-%s" % (source["organisation"], source["sourceURL"], key)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + found = kibble_bit.exists("issue", dhash) + parseIt = False + if not found: + parseIt = True + else: + ticket = kibble_bit.get("issue", dhash) + if ticket["status"] == "closed" and key in open_tickets: + kibble_bit.pprint("Ticket was reopened, reparsing") + parseIt = True + elif ticket["status"] == "open" and not key in open_tickets: + kibble_bit.pprint("Ticket was recently closed, parsing it") + parseIt = True + else: + pass + # print("Ticket hasn't changed, ignoring...") + + if parseIt: + kibble_bit.pprint("Parsing data from BugZilla for #%s" % key) + + params = {"ids": [int(key)], "limit": 0} + if ( + source["creds"] + and "username" in source["creds"] + and source["creds"]["username"] + and len(source["creds"]["username"]) > 0 + ): + params["Bugzilla_login"] = source["creds"]["username"] + params["Bugzilla_password"] = source["creds"]["password"] + ticketsURL = "%s?method=Bug.get¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + + js = jsonapi.get(ticketsURL) + js = js["result"]["bugs"][0] + creator = {"name": bug["creator"], "email": js["creator"]} + closer = {} + cd = get_time(js["creation_time"]) + rd = None + status = "open" + if js["status"] in ["CLOSED", "RESOLVED"]: + status = "closed" + kibble_bit.pprint("%s was closed, finding out who did that" % key) + ticketsURL = "%s?method=Bug.history¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + hjs = jsonapi.get(ticketsURL) + history = hjs["result"]["bugs"][0]["history"] + for item in history: + for change in item["changes"]: + if ( + change["field_name"] == "status" + and "added" in change + and change["added"] in ["CLOSED", "RESOLVED"] + ): + rd = get_time(item["when"]) + closer = {"name": item["who"], "email": item["who"]} + break + kibble_bit.pprint("Counting comments for %s..." % key) + ticketsURL = "%s?method=Bug.comments¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + hjs = jsonapi.get(ticketsURL) + comments = len(hjs["result"]["bugs"][str(key)]["comments"]) + + title = bug["summary"] + del params["ids"] + if closer: + + pid = hashlib.sha1( + ("%s%s" % (source["organisation"], closer["email"])).encode( + "ascii", errors="replace" + ) + ).hexdigest() + found = kibble_bit.exists("person", pid) + if not found: + params["names"] = [closer["email"]] + ticketsURL = "%s?method=User.get¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + + try: + ujs = jsonapi.get(ticketsURL) + displayName = ujs["result"]["users"][0]["real_name"] + except: # pylint: disable=bare-except # pylint: disable=bare-except + displayName = closer["email"] + if displayName and len(displayName) > 0: + # Add to people db + + jsp = { + "name": displayName, + "email": closer["email"], + "organisation": source["organisation"], + "id": pid, + } + # print("Updating person DB for closer: %s (%s)" % (displayName, closerEmail)) + kibble_bit.index("person", pid, jsp) + + if creator: + pid = hashlib.sha1( + ("%s%s" % (source["organisation"], creator["email"])).encode( + "ascii", errors="replace" + ) + ).hexdigest() + found = kibble_bit.exists("person", pid) + if not found: + if not creator["name"]: + params["names"] = [creator["email"]] + ticketsURL = "%s?method=User.get¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + try: + ujs = jsonapi.get(ticketsURL) + creator["name"] = ujs["result"]["users"][0]["real_name"] + except: # pylint: disable=bare-except # pylint: disable=bare-except + creator["name"] = creator["email"] + if creator["name"] and len(creator["name"]) > 0: + # Add to people db + + jsp = { + "name": creator["name"], + "email": creator["email"], + "organisation": source["organisation"], + "id": pid, + } + kibble_bit.index("person", pid, jsp) + + jso = { + "id": dhash, + "key": key, + "organisation": source["organisation"], + "sourceID": source["sourceID"], + "url": "%s/show_bug.cgi?id=%s" % (dom, key), + "status": status, + "created": cd, + "closed": rd, + "issuetype": "issue", + "issueCloser": closer["email"] if "email" in closer else None, + "createdDate": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(cd)), + "closedDate": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(rd)) + if rd + else None, + "changeDate": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(rd if rd else cd) + ), + "assignee": None, + "issueCreator": creator["email"], + "comments": comments, + "title": title, + } + kibble_bit.append("issue", jso) + time.sleep(0.5) # BugZilla is notoriously slow. Maybe remove this later + return True + except Exception as err: + kibble_bit.pprint(err) + return False + + +class BzThread(Thread): + def __init__(self, KibbleBit, source, block, pt, ot, u, dom): + super().__init__() + self.KibbleBit = KibbleBit + self.source = source + self.block = block + self.pendingTickets = pt + self.openTickets = ot + self.u = u + self.dom = dom + + def run(self): + bad_ones = 0 + + while len(self.pendingTickets) > 0 and bad_ones <= 50: + if len(self.pendingTickets) % 10 == 0: + self.KibbleBit.pprint( + "%u elements left to count" % len(self.pendingTickets) + ) + self.block.acquire() + try: + rl = self.pendingTickets.pop(0) + except Exception: # list empty, likely + self.block.release() + return + if not rl: + self.block.release() + return + self.block.release() + if not scan_ticket( + rl, self.KibbleBit, self.source, self.openTickets, self.u, self.dom + ): + self.KibbleBit.pprint("Ticket %s seems broken, skipping" % rl["id"]) + bad_ones += 1 + if bad_ones > 50: + self.KibbleBit.pprint("Too many errors, bailing!") + self.source["steps"]["issues"] = { + "time": time.time(), + "status": "Too many errors while parsing at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": False, + } + self.KibbleBit.update_source(self.source) + return + else: + bad_ones = 0 + + +def scan(kibble_bit, source): + url = source["sourceURL"] + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Parsing BugZilla changes...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + bz = re.match(r"(https?://\S+?)(/jsonrpc\.cgi)?[\s:?]+(.+)", url) + if bz: + if ( + source["creds"] + and "username" in source["creds"] + and source["creds"]["username"] + and len(source["creds"]["username"]) > 0 + ): + creds = "%s:%s" % (source["creds"]["username"], source["creds"]["password"]) + pending_tickets = [] + open_tickets = [] + + # Get base URL, list and domain to parse + dom = bz.group(1) + dom = re.sub(r"/+$", "", dom) + u = "%s/jsonrpc.cgi" % dom + instance = bz.group(3) + + params = { + "product": [instance], + "status": [ + "RESOLVED", + "CLOSED", + "NEW", + "UNCOMFIRMED", + "ASSIGNED", + "REOPENED", + "VERIFIED", + ], + "include_fields": ["id", "creation_time", "status", "summary", "creator"], + "limit": 10000, + "offset": 1, + } + # If * is requested, just omit the product name + if instance == "*": + params = { + "status": [ + "RESOLVED", + "CLOSED", + "NEW", + "UNCOMFIRMED", + "ASSIGNED", + "REOPENED", + "VERIFIED", + ], + "include_fields": [ + "id", + "creation_time", + "status", + "summary", + "creator", + ], + "limit": 10000, + "offset": 1, + } + + tickets_url = "%s?method=Bug.search¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + + while True: + try: + js = jsonapi.get(tickets_url, auth=creds) + except: # pylint: disable=bare-except # pylint: disable=bare-except + kibble_bit.pprint("Couldn't fetch more tickets, bailing") + break + + if len(js["result"]["bugs"]) > 0: + kibble_bit.pprint( + "%s: Found %u tickets..." + % ( + source["sourceURL"], + ((params.get("offset", 1) - 1) + len(js["result"]["bugs"])), + ) + ) + for bug in js["result"]["bugs"]: + pending_tickets.append(bug) + if bug["status"] not in ["RESOLVED", "CLOSED"]: + open_tickets.append(bug["id"]) + params["offset"] += 10000 + tickets_url = "%s?method=Bug.search¶ms=[%s]" % ( + u, + urllib.parse.quote(json.dumps(params)), + ) + else: + kibble_bit.pprint("No more tickets left to scan") + break + + kibble_bit.pprint( + "Found %u open tickets, %u closed." + % (len(open_tickets), len(pending_tickets) - len(open_tickets)) + ) + + block = Lock() + threads = [] + # TODO: Fix this loop + for i in range(0, 4): + t = BzThread( + kibble_bit, source, block, pending_tickets, open_tickets, u, dom + ) + threads.append(t) + t.start() + + for t in threads: + t.join() + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Issue tracker (BugZilla) successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/buildbot.py b/kibble/scanners/scanners/buildbot.py new file mode 100644 index 00000000..194206bc --- /dev/null +++ b/kibble/scanners/scanners/buildbot.py @@ -0,0 +1,274 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the Kibble Buildbot scanner plugin. +""" + +import datetime +import hashlib +import re +import threading +import time + +from kibble.scanners.utils import jsonapi + +title = "Scanner for Buildbot" +version = "0.1.0" + + +def accepts(source): + """ Determines whether we want to handle this source """ + if source["type"] == "buildbot": + return True + return False + + +def scan_job(KibbleBit, source, job, creds): + """ Scans a single job for activity """ + dhash = hashlib.sha224( + ("%s-%s-%s" % (source["organisation"], source["sourceID"], job)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + doc = None + found = KibbleBit.exists("cijob", dhash) + + jobURL = "%s/json/builders/%s/builds/_all" % (source["sourceURL"], job) + KibbleBit.pprint(jobURL) + jobjson = jsonapi.get(jobURL, auth=creds) + + # If valid JSON, ... + if jobjson: + for buildno, data in jobjson.items(): + buildhash = hashlib.sha224( + ( + "%s-%s-%s-%s" + % (source["organisation"], source["sourceID"], job, buildno) + ).encode("ascii", errors="replace") + ).hexdigest() + builddoc = None + try: + builddoc = KibbleBit.get("ci_build", buildhash) + except: # pylint: disable=bare-except # pylint: disable=bare-except + pass + + # If this build already completed, no need to parse it again + if builddoc and builddoc.get("completed", False): + continue + + KibbleBit.pprint( + "[%s-%s] This is new or pending, analyzing..." % (job, buildno) + ) + + completed = "currentStep" in data + + # Get build status (success, failed, canceled etc) + status = "building" + if "successful" in data.get("text", []): + status = "success" + if "failed" in data.get("text", []): + status = "failed" + if "exception" in data.get("text", []): + status = "aborted" + + DUR = 0 + # Calc when the build finished + if completed and len(data.get("times", [])) == 2 and data["times"][1]: + FIN = data["times"][1] + DUR = FIN - data["times"][0] + else: + FIN = 0 + + doc = { + # Build specific data + "id": buildhash, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(FIN)), + "buildID": buildno, + "completed": completed, + "duration": DUR * 1000, # Buildbot does seconds, not milliseconds + "job": job, + "jobURL": "%s/builders/%s" % (source["sourceURL"], job), + "status": status, + "started": int(data["times"][0]), + "ci": "buildbot", + # Standard docs values + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "upsert": True, + } + KibbleBit.append("ci_build", doc) + # Yay, it worked! + return True + + # Boo, it failed! + KibbleBit.pprint("Fetching job data failed!") + return False + + +class BuildbotThread(threading.Thread): + """ Generic thread class for scheduling multiple scans at once """ + + def __init__(self, block, KibbleBit, source, creds, jobs): + super().__init__() + self.block = block + self.KibbleBit = KibbleBit + self.creds = creds + self.source = source + self.jobs = jobs + + def run(self): + bad_ones = 0 + while len(self.jobs) > 0 and bad_ones <= 50: + self.block.acquire() + try: + job = self.jobs.pop(0) + except Exception: + self.block.release() + return + if not job: + self.block.release() + return + self.block.release() + if not scan_job(self.KibbleBit, self.source, job, self.creds): + self.KibbleBit.pprint("[%s] This borked, trying another one" % job) + bad_ones += 1 + if bad_ones > 100: + self.KibbleBit.pprint("Too many errors, bailing!") + self.source["steps"]["ci"] = { + "time": time.time(), + "status": "Too many errors while parsing at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": False, + } + self.KibbleBit.update_source(self.source) + return + else: + bad_ones = 0 + + +def scan(kibble_bit, source): + # Simple URL check + buildbot = re.match(r"(https?://.+)", source["sourceURL"]) + if buildbot: + + source["steps"]["ci"] = { + "time": time.time(), + "status": "Parsing Buildbot job changes...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + kibble_bit.pprint("Parsing Buildbot activity at %s" % source["sourceURL"]) + source["steps"]["ci"] = { + "time": time.time(), + "status": "Downloading changeset", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Buildbot may neeed credentials + creds = None + if ( + source["creds"] + and "username" in source["creds"] + and source["creds"]["username"] + and len(source["creds"]["username"]) > 0 + ): + creds = "%s:%s" % (source["creds"]["username"], source["creds"]["password"]) + + # Get the job list + s_url = source["sourceURL"] + kibble_bit.pprint("Getting job list...") + builders = jsonapi.get("%s/json/builders" % s_url, auth=creds) + + # Save queue snapshot + NOW = int(datetime.datetime.utcnow().timestamp()) + queuehash = hashlib.sha224( + ( + "%s-%s-queue-%s" + % (source["organisation"], source["sourceID"], int(time.time())) + ).encode("ascii", errors="replace") + ).hexdigest() + + # Scan queue items + blocked = 0 + stuck = 0 + queue_size = 0 + actual_queue_size = 0 + building = 0 + jobs = [] + + for builder, data in builders.items(): + jobs.append(builder) + if data["state"] == "building": + building += 1 + if data.get("pendingBuilds", 0) > 0: + # All queued items, even offline builders + actual_queue_size += data.get("pendingBuilds", 0) + # Only queues with an online builder (actually waiting stuff) + if data["state"] == "building": + queue_size += data.get("pendingBuilds", 0) + blocked += data.get("pendingBuilds", 0) # Blocked by running builds + # Stuck builds (iow no builder available) + if data["state"] == "offline": + stuck += data.get("pendingBuilds", 0) + + # Write up a queue doc + queuedoc = { + "id": queuehash, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(NOW)), + "time": NOW, + "size": queue_size, + "blocked": blocked, + "stuck": stuck, + "building": building, + "ci": "buildbot", + # Standard docs values + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "upsert": True, + } + kibble_bit.append("ci_queue", queuedoc) + + kibble_bit.pprint("Found %u builders in Buildbot" % len(jobs)) + + threads = [] + block = threading.Lock() + kibble_bit.pprint("Scanning jobs using 4 sub-threads") + for i in range(0, 4): + t = BuildbotThread(block, kibble_bit, source, creds, jobs) + threads.append(t) + t.start() + + for t in threads: + t.join() + + # We're all done, yaay + kibble_bit.pprint("Done scanning %s" % source["sourceURL"]) + + source["steps"]["ci"] = { + "time": time.time(), + "status": "Buildbot successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/discourse.py b/kibble/scanners/scanners/discourse.py new file mode 100644 index 00000000..26fd9229 --- /dev/null +++ b/kibble/scanners/scanners/discourse.py @@ -0,0 +1,341 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the Kibble Discourse scanner plugin. +""" + +import datetime +import hashlib +import os +import re +import threading +import time + +from kibble.scanners.utils import jsonapi + +title = "Scanner for Discourse Forums" +version = "0.1.0" + + +def accepts(source): + """ Determines whether we want to handle this source """ + if source["type"] == "discourse": + return True + return False + + +def scan_job(kibble_bit, source, cat, creds): + """ Scans a single discourse category for activity """ + # Get $discourseURL/c/$catID + catURL = os.path.join(source["sourceURL"], "c/%s" % cat["id"]) + kibble_bit.pprint("Scanning Discourse category '%s' at %s" % (cat["slug"], catURL)) + + page = 0 + allUsers = {} + + # For each paginated result (up to page 100), check for changes + while page < 100: + pcatURL = "%s?page=%u" % (catURL, page) + catjson = jsonapi.get(pcatURL, auth=creds) + page += 1 + + if catjson: + + # If we hit an empty list (no more topics), just break the loop. + if not catjson["topic_list"]["topics"]: + break + + # First (if we have data), we should store the known users + # Since discourse hides the email (obviously!), we'll have to + # fake one to generate an account. + fakeDomain = "foo.discourse" + m = re.match(r"https?://([-a-zA-Z0-9.]+)", source["sourceURL"]) + if m: + fakeDomain = m.group(1) + for user in catjson["users"]: + # Fake email address, compute deterministic ID + email = "%s@%s" % (user["username"], fakeDomain) + dhash = hashlib.sha224( + ( + "%s-%s-%s" + % (source["organisation"], source["sourceURL"], email) + ).encode("ascii", errors="replace") + ).hexdigest() + + # Construct a very sparse user document + userDoc = { + "id": dhash, + "organisation": source["organisation"], + "name": user["username"], + "email": email, + } + + # Store user-ID-to-username mapping for later + allUsers[user["id"]] = userDoc + + # Store it (or, queue storage) unless it exists. + # We don't wanna override better data, so we check if + # it's there first. + if not kibble_bit.exists("person", dhash): + kibble_bit.append("person", userDoc) + + # Now, for each topic, we'll store a topic document + for topic in catjson["topic_list"]["topics"]: + + # Calculate topic ID + dhash = hashlib.sha224( + ( + "%s-%s-topic-%s" + % (source["organisation"], source["sourceURL"], topic["id"]) + ).encode("ascii", errors="replace") + ).hexdigest() + + # Figure out when topic was created and updated + CreatedDate = datetime.datetime.strptime( + topic["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).timestamp() + if topic.get("last_posted_at"): + UpdatedDate = datetime.datetime.strptime( + topic["last_posted_at"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).timestamp() + else: + UpdatedDate = 0 + + # Determine whether we should scan this topic or continue to the next one. + # We'll do this by seeing if the topic already exists and has no changes or not. + if kibble_bit.exists("forum_topic", dhash): + fdoc = kibble_bit.get("forum_topic", dhash) + # If update in the old doc was >= current update timestamp, skip the topic + if fdoc["updated"] >= UpdatedDate: + continue + + # Assuming we need to scan this, start by making the base topic document + topicdoc = { + "id": dhash, + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "type": "discourse", + "category": cat["slug"], + "title": topic["title"], + "creator": allUsers[topic["posters"][0]["user_id"]]["id"], + "creatorName": allUsers[topic["posters"][0]["user_id"]]["name"], + "created": CreatedDate, + "createdDate": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(CreatedDate) + ), + "updated": UpdatedDate, + "solved": False, # Discourse doesn't have this notion, but other forums might. + "posts": topic["posts_count"], + "views": topic["views"], + "url": source["sourceURL"] + + "/t/%s/%s" % (topic["slug"], topic["id"]), + } + + kibble_bit.append("forum_topic", topicdoc) + kibble_bit.pprint("%s is new or changed, scanning" % topicdoc["url"]) + + # Now grab all the individual replies/posts + # Remember to not have it count as a visit! + pURL = "%s?track_visit=false&forceLoad=true" % topicdoc["url"] + pjson = jsonapi.get(pURL, auth=creds) + + posts = pjson["post_stream"]["posts"] + + # For each post/reply, construct a forum_entry document + kibble_bit.pprint("%s has %u posts" % (pURL, len(posts))) + for post in posts: + phash = hashlib.sha224( + ( + "%s-%s-post-%s" + % (source["organisation"], source["sourceURL"], post["id"]) + ).encode("ascii", errors="replace") + ).hexdigest() + uname = ( + post.get("name", post["username"]) or post["username"] + ) # Hack to get longest non-zero value + + # Find the hash of the person who posted it + # We may know them, or we may have to store them. + # If we have better info now (full name), re-store + if ( + post["user_id"] in allUsers + and allUsers[post["user_id"]]["name"] == uname + ): + uhash = allUsers[post["user_id"]]["id"] + else: + # Same as before, fake email, store... + email = "%s@%s" % (post["username"], fakeDomain) + uhash = hashlib.sha224( + ( + "%s-%s-%s" + % (source["organisation"], source["sourceURL"], email) + ).encode("ascii", errors="replace") + ).hexdigest() + + # Construct a very sparse user document + userDoc = { + "id": uhash, + "organisation": source["organisation"], + "name": uname, + "email": email, + } + + # Store user-ID-to-username mapping for later + allUsers[user["id"]] = userDoc + + # Store it (or, queue storage) + kibble_bit.append("person", userDoc) + + # Get post date + CreatedDate = datetime.datetime.strptime( + post["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).timestamp() + + # Store the post/reply document + pdoc = { + "id": phash, + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "type": "discourse", + "creator": uhash, + "created": CreatedDate, + "createdDate": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(CreatedDate) + ), + "topic": dhash, + "post_id": post["id"], + "text": post["cooked"], + "url": topicdoc["url"], + } + kibble_bit.append("forum_post", pdoc) + else: + kibble_bit.pprint("Fetching discourse data failed!") + return False + return True + + +class DiscourseThread(threading.Thread): + """ Generic thread class for scheduling multiple scans at once """ + + def __init__(self, block, KibbleBit, source, creds, jobs): + super().__init__() + self.block = block + self.KibbleBit = KibbleBit + self.creds = creds + self.source = source + self.jobs = jobs + + def run(self): + bad_ones = 0 + while len(self.jobs) > 0 and bad_ones <= 50: + self.block.acquire() + try: + job = self.jobs.pop(0) + except Exception: + self.block.release() + return + if not job: + self.block.release() + return + self.block.release() + if not scan_job(self.KibbleBit, self.source, job, self.creds): + self.KibbleBit.pprint( + "[%s] This borked, trying another one" % job["name"] + ) + bad_ones += 1 + if bad_ones > 10: + self.KibbleBit.pprint("Too many errors, bailing!") + self.source["steps"]["forum"] = { + "time": time.time(), + "status": "Too many errors while parsing at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": False, + } + self.KibbleBit.update_source(self.source) + return + else: + bad_ones = 0 + + +def scan(kibble_bit, source): + # Simple URL check + discourse = re.match(r"(https?://.+)", source["sourceURL"]) + if discourse: + + source["steps"]["forum"] = { + "time": time.time(), + "status": "Parsing Discourse topics...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + pendingJobs = [] + kibble_bit.pprint("Parsing Discourse activity at %s" % source["sourceURL"]) + source["steps"]["forum"] = { + "time": time.time(), + "status": "Downloading changeset", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Discourse may neeed credentials (if basic auth) + creds = None + if ( + source["creds"] + and "username" in source["creds"] + and source["creds"]["username"] + and len(source["creds"]["username"]) > 0 + ): + creds = "%s:%s" % (source["creds"]["username"], source["creds"]["password"]) + + # Get the list of categories + sURL = source["sourceURL"] + kibble_bit.pprint("Getting categories...") + catjs = jsonapi.get("%s/categories_and_latest" % sURL, auth=creds) + + # Directly assign the category list as pending jobs queue, ezpz. + pendingJobs = catjs["category_list"]["categories"] + + kibble_bit.pprint("Found %u categories" % len(pendingJobs)) + + # Now fire off 4 threads to parse the categories + threads = [] + block = threading.Lock() + kibble_bit.pprint("Scanning jobs using 4 sub-threads") + for i in range(0, 4): + t = DiscourseThread(block, kibble_bit, source, creds, pendingJobs) + threads.append(t) + t.start() + + for t in threads: + t.join() + + # We're all done, yaay + kibble_bit.pprint("Done scanning %s" % source["sourceURL"]) + + source["steps"]["forum"] = { + "time": time.time(), + "status": "Discourse successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/gerrit.py b/kibble/scanners/scanners/gerrit.py new file mode 100644 index 00000000..4e4b2e11 --- /dev/null +++ b/kibble/scanners/scanners/gerrit.py @@ -0,0 +1,261 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import hashlib +import json +import re +import time + +import requests +from dateutil import parser + +title = "Scanner for Gerrit Code Review" +version = "0.1.1" + + +CHANGES_URL = "%s/changes/%s" +PROJECT_LIST_URL = "%s/projects/" +ACCOUNTS_URL = "%s/accounts/%d" +COMMIT_ID_RE = re.compile(" Change-Id: (.*)") + + +def accepts(source): + """ Do we accept this source?? """ + if source["type"] == "gerrit": + return True + return False + + +def getjson(response): + response.raise_for_status() + return json.loads(response.text[4:]) + + +def get(url, params=None): + resp = requests.get(url, params=params) + return getjson(resp) + + +def changes(base_url, params=None): + return get(CHANGES_URL % (base_url, ""), params=params) + + +def change_details(base_url, change): + if isinstance(change, dict): + id = change["change_id"] + else: + id = change + + return get(CHANGES_URL % (base_url, id) + "/detail") + + +def get_commit_id(commit_message): + all = COMMIT_ID_RE.findall(commit_message) + if all: + return all[0] + return None + + +def get_all(base_url, f, params=None): + if params is None: + params = {} + acc = [] + + while True: + items = f(base_url, params=params) + if not items: + break + + acc.extend(items) + params.update({"start": len(acc)}) + + return acc + + +def format_date(d, epoch=False): + if not d: + return + parsed = parser.parse(d) + + if epoch: + return time.mktime(parsed.timetuple()) + + return time.strftime("%Y/%m/%d %H:%M:%S", parsed.timetuple()) + + +def make_hash(repo, change): + return hashlib.sha224( + ( + "%s-%s-%s" % (repo["organisation"], repo["sourceID"], change["change_id"]) + ).encode("ascii", errors="replace") + ).hexdigest() + + +def is_closed(change): + return change["status"] == "MERGED" or change["status"] == "ABANDONED" + + +def make_issue(repo, base_url, change): + key = change["change_id"] + dhash = make_hash(repo, change) + + closed_date = None + if is_closed(change): + closed_date = change["updated"] + + if "email" not in change["owner"]: + change["owner"]["email"] = "%u@invalid.gerrit" % change["owner"]["_account_id"] + owner_email = change["owner"]["email"] + + messages = [] + for message in change.get("messages", []): + messages.append(message.get("message", "")) + + return { + "id": dhash, + "key": key, + "organisation": repo["organisation"], + "sourceID": repo["sourceID"], + "url": base_url + "/#/q/" + key, + "status": change["status"], + "created": format_date(change["created"], epoch=True), + "closed": format_date(closed_date, epoch=True), + "issueCloser": owner_email, + "createdDate": format_date(change["created"]), + "closedDate": format_date(closed_date), + "changeDate": format_date(closed_date if closed_date else change["created"]), + "assignee": owner_email, + "issueCreator": owner_email, + "comments": len(messages), + "title": change["subject"], + } + + +def make_person(repo, raw_person): + email = raw_person["email"] + id = hashlib.sha1( + ("%s%s" % (repo["organisation"], email)).encode("ascii", errors="replace") + ).hexdigest() + return { + "email": email, + "id": id, + "organisation": repo["organisation"], + "name": raw_person["name"] + if "name" in raw_person + else "%u" % raw_person["_account_id"], + } + + +def update_issue(kibble_bit, issue): + id = issue["id"] + kibble_bit.pprint("Updating issue: " + id) + kibble_bit.index("issue", id, issue) + + +def update_person(kibble_bit, person): + kibble_bit.pprint("Updating person: " + person["name"] + " - " + person["email"]) + kibble_bit.index("person", person["id"], {"doc": person, "doc_as_upsert": True}) + + +def status_changed(stored_change, change): + if not stored_change or not change: + return True + return stored_change["status"] != change["status"] + + +def scan(kibble_bit, source): + source["steps"]["issues"] = { + "time": time.time(), + "status": "Analyzing Gerrit tickets...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + url = source["sourceURL"] + # Try matching foo.bar/r/project/subfoo + m = re.match(r"(.+://.+?/r)/(.+)", url) + if m: + base_url = m.group(1) + project_name = m.group(2) + # Fall back to old splitty split + else: + url = re.sub(r"^git://", "http://", url) + source_parts = url.split("/") + project_name = source_parts[-1] + base_url = "/".join(source_parts[:-1]) # remove the trailing /blah/ + + # TODO: figure out branch from current checkout + q = ( + '(is:open OR is:new OR is:closed OR is:merged OR is:abandoned) AND project:"%s"' + % project_name + ) + all_changes = get_all( + base_url, changes, {"q": q, "o": ["LABELS", "DETAILED_ACCOUNTS"]} + ) + + print("Found " + str(len(all_changes)) + " changes for project: " + project_name) + + people = {} + for change in all_changes: + try: + # TODO: check if needs updating here before getting details + dhash = make_hash(source, change) + + stored_change = None + if kibble_bit.exists("issue", dhash): + stored_change = kibble_bit.get("issue", dhash) + + if not status_changed(stored_change, change): + # print("change %s seen already and status unchanged. Skipping." % + # change['change_id']) + continue + + details = change_details(base_url, change) + + issue_doc = make_issue(source, base_url, details) + update_issue(kibble_bit, issue_doc) + + labels = details["labels"] + change_people = [] + + if "owner" in details: + change_people.append(details["owner"]) + if "Module-Owner" in labels and "all" in labels["Module-Owner"]: + change_people.extend(labels["Module-Owner"]["all"]) + if "Code-Review" in labels and "all" in labels["Code-Review"]: + change_people.extend(labels["Code-Review"]["all"]) + if "Verified" in labels and "all" in labels["Verified"]: + change_people.extend(labels["Verified"]["all"]) + + print(change["change_id"] + " -> " + str(len(change_people)) + " people.") + + for person in change_people: + if "email" in person and person["email"] not in people: + people[person["email"]] = person + update_person(kibble_bit, make_person(source, person)) + + except requests.HTTPError as e: + print(e) + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Done analyzing tickets!", + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/git-census.py b/kibble/scanners/scanners/git-census.py new file mode 100644 index 00000000..b105fcc3 --- /dev/null +++ b/kibble/scanners/scanners/git-census.py @@ -0,0 +1,326 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import hashlib +import os +import re +import subprocess +import tempfile +import time + +from kibble.configuration import conf + +title = "Census Scanner for Git" +version = "0.1.0" + + +def accepts(source): + """ Do we accept this source?? """ + if source["type"] == "git": + return True + # There are cases where we have a github repo, but don't wanna analyze the code, just issues + if source["type"] == "github" and not source.get("issuesonly", False): + return True + return False + + +def scan(kibble_bit, source): + """ Conduct a census scan """ + people = {} + idseries = {} + lcseries = {} + alcseries = {} + ctseries = {} + atseries = {} + + rid = source["sourceID"] + url = source["sourceURL"] + rootpath = "%s/%s/git" % ( + conf.get("scanner", "scratchdir"), + source["organisation"], + ) + gpath = os.path.join(rootpath, rid) + + if "steps" in source and source["steps"]["sync"]["good"] and os.path.exists(gpath): + source["steps"]["census"] = { + "time": time.time(), + "status": "Census count started at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": True, + "good": True, + } + kibble_bit.update_source(source) + gname = rid + modificationDates = {} + # Did we do a census before? + if "census" in source and source["census"] > 0: + # Go back 2 months, meh... + ts = source["census"] - (62 * 86400) + pd = time.gmtime(ts) + date = time.strftime("%Y-%b-%d 0:00", pd) + inp = subprocess.check_output( + 'git --git-dir %s/.git log --after="%s" --all "--pretty=format:::%%H|%%ce|%%cn|%%ae|%%an|%%ct" --numstat' + % (gpath, date), + shell=True, + ) + else: + inp = subprocess.check_output( + 'git --git-dir %s/.git log --all "--pretty=format:::%%H|%%ce|%%cn|%%ae|%%an|%%ct" --numstat' + % gpath, + shell=True, + ) + tmp = tempfile.NamedTemporaryFile(mode="w+b", buffering=1, delete=False) + tmp.write(inp) + tmp.flush() + tmp.close() + with open(tmp.name, mode="r", encoding="utf-8", errors="replace") as f: + inp = f.read() + f.close() + os.unlink(tmp.name) + kibble_bit.pprint("Parsing log for %s (%s)..." % (rid, url)) + for m in re.finditer( + r":([a-f0-9]+)\|([^\r\n|]+)\|([^\r\n|]+)\|([^\r\n|]+)\|([^\r\n|]+)\|([\d+]+)\r?\n([^:]+?):", + inp, + flags=re.MULTILINE, + ): + if m: + ch = m.group(1) + ce = m.group(2) + cn = m.group(3) + ae = m.group(4) + an = m.group(5) + ct = int(m.group(6)) + diff = m.group(7) + insert = 0 + delete = 0 + files_touched = set() + # Diffs + for l in re.finditer( + r"(\d+)[ \t]+(\d+)[ \t]+([^\r\n]+)", diff, flags=re.MULTILINE + ): + insert += int(l.group(1)) + delete += int(l.group(2)) + filename = l.group(3) + if filename: + files_touched.update([filename]) + if ( + filename + and len(filename) > 0 + and ( + not filename in modificationDates + or modificationDates[filename]["timestamp"] < ct + ) + ): + modificationDates[filename] = { + "hash": ch, + "filename": filename, + "timestamp": ct, + "created": ct + if ( + not filename in modificationDates + or not "created" in modificationDates[filename] + or modificationDates[filename]["created"] > ct + ) + else modificationDates[filename]["created"], + "author_email": ae, + "committer_email": ce, + } + if insert > 100000000: + insert = 0 + if delete > 100000000: + delete = 0 + if delete > 1000000 or insert > 1000000: + kibble_bit.pprint( + "gigantic diff for %s (%s), ignoring" + % (gpath, source["sourceURL"]) + ) + if gname not in idseries: + idseries[gname] = {} + if gname not in lcseries: + lcseries[gname] = {} + if gname not in alcseries: + alcseries[gname] = {} + if gname not in ctseries: + ctseries[gname] = {} + if gname not in atseries: + atseries[gname] = {} + ts = ct - (ct % 86400) + if ts not in idseries[gname]: + idseries[gname][ts] = [0, 0] + + idseries[gname][ts][0] += insert + idseries[gname][ts][1] += delete + + if ts not in lcseries[gname]: + lcseries[gname][ts] = {} + if ts not in alcseries[gname]: + alcseries[gname][ts] = {} + if ce not in lcseries[gname][ts]: + lcseries[gname][ts][ce] = [0, 0] + lcseries[gname][ts][ce][0] += insert + lcseries[gname][ts][ce][1] = lcseries[gname][ts][ce][0] + delete + + if ae not in alcseries[gname][ts]: + alcseries[gname][ts][ae] = [0, 0] + alcseries[gname][ts][ae][0] += insert + alcseries[gname][ts][ae][1] = alcseries[gname][ts][ae][0] + delete + + if ts not in ctseries[gname]: + ctseries[gname][ts] = {} + if ts not in atseries[gname]: + atseries[gname][ts] = {} + + if ce not in ctseries[gname][ts]: + ctseries[gname][ts][ce] = 0 + ctseries[gname][ts][ce] += 1 + + if ae not in atseries[gname][ts]: + atseries[gname][ts][ae] = 0 + atseries[gname][ts][ae] += 1 + + # Committer + if ce not in people or len(people[ce]["name"]) < len(cn): + people[ce] = people[ce] if ce in people else {"projects": [gname]} + people[ce]["name"] = cn + if gname not in people[ce]["projects"]: + people[ce]["projects"].append(gname) + + # Author + if ae not in people or len(people[ae]["name"]) < len(an): + people[ae] = people[ae] if ae in people else {"projects": [gname]} + people[ae]["name"] = an + if gname not in people[ae]["projects"]: + people[ae]["projects"].append(gname) + + # Make a list of changed files, max 1024 + filelist = list(files_touched) + filelist = filelist[:1023] + + # ES commit documents + tsd = ts - (ts % 86400) + js = { + "id": rid + "/" + ch, + "sourceID": rid, + "sourceURL": source["sourceURL"], + "organisation": source["organisation"], + "ts": ct, + "tsday": tsd, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(ct)), + "committer_name": cn, + "committer_email": ce, + "author_name": an, + "author_email": ae, + "insertions": insert, + "deletions": delete, + "vcs": "git", + "files_changed": filelist, + } + jsx = { + "id": ch, + "organisation": source["organisation"], + "sourceID": source[ + "sourceID" + ], # Only ever the last source with this + "ts": ct, + "tsday": tsd, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(ct)), + "committer_name": cn, + "committer_email": ce, + "author_name": an, + "author_email": ae, + "insertions": insert, + "deletions": delete, + "repository": rid, # This will always ever only be the last repo that had it! + "vcs": "git", + "files_changed": filelist, + } + kibble_bit.append( + "person", + { + "upsert": True, + "name": cn, + "email": ce, + "address": ce, + "organisation": source["organisation"], + "id": hashlib.sha1( + ("%s%s" % (source["organisation"], ce)).encode( + "ascii", errors="replace" + ) + ).hexdigest(), + }, + ) + kibble_bit.append( + "person", + { + "upsert": True, + "name": an, + "email": ae, + "address": ae, + "organisation": source["organisation"], + "id": hashlib.sha1( + ("%s%s" % (source["organisation"], ae)).encode( + "ascii", errors="replace" + ) + ).hexdigest(), + }, + ) + kibble_bit.append("code_commit", js) + kibble_bit.append("code_commit_unique", jsx) + + changed = True # Do file changes?? Might wanna make this optional + if changed: + kibble_bit.pprint("Scanning file changes for %s" % source["sourceURL"]) + for filename in modificationDates: + fid = hashlib.sha1( + ("%s/%s" % (source["sourceID"], filename)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + jsfe = { + "upsert": True, + "id": fid, + "organisation": source["organisation"], + "sourceID": source["sourceID"], + "ts": modificationDates[filename]["timestamp"], + "date": time.strftime( + "%Y/%m/%d %H:%M:%S", + time.gmtime(modificationDates[filename]["timestamp"]), + ), + "committer_email": modificationDates[filename]["committer_email"], + "author_email": modificationDates[filename]["author_email"], + "hash": modificationDates[filename]["hash"], + "created": modificationDates[filename]["created"], + "createdDate": time.strftime( + "%Y/%m/%d %H:%M:%S", + time.gmtime(modificationDates[filename]["created"]), + ), + } + found = kibble_bit.exists("file_history", fid) + if found: + del jsfe["created"] + del jsfe["createdDate"] + kibble_bit.append("file_history", jsfe) + + source["steps"]["census"] = { + "time": time.time(), + "status": "Census count completed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": False, + "good": True, + } + source["census"] = time.time() + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/git-evolution.py b/kibble/scanners/scanners/git-evolution.py new file mode 100644 index 00000000..1e6ad16f --- /dev/null +++ b/kibble/scanners/scanners/git-evolution.py @@ -0,0 +1,258 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" Git Evolution scanner """ +import calendar +import datetime +import hashlib +import os +import subprocess +import time + +from kibble.configuration import conf +from kibble.scanners.utils import sloc + +title = "Git Evolution Scanner" +version = "0.1.0" + + +def accepts(source): + """ Do we accept this source? """ + if source["type"] == "git": + return True + # There are cases where we have a github repo, but don't wanna analyze the code, just issues + if source["type"] == "github" and not source.get("issuesonly", False): + return True + return False + + +def get_first_ref(gpath): + try: + return subprocess.check_output( + "cd %s && git log `git rev-list --max-parents=0 HEAD` --pretty=format:%%ct" + % gpath, + shell=True, + ) + except: # pylint: disable=bare-except # pylint: disable=bare-except + print("Could not get first ref, exiting!") + return None + + +def acquire(kibble_bit, source): + source["steps"]["evolution"] = { + "time": time.time(), + "status": "Evolution scan started at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + +def release(kibble_bit, source, status, exception=None, good=False): + source["steps"]["evolution"] = { + "time": time.time(), + "status": status, + "running": False, + "good": good, + } + + if exception: + source["steps"]["evolution"].update({"exception": exception}) + kibble_bit.update_source(source) + + +def check_branch(gpath, date, branch): + try: + subprocess.check_call( + 'cd %s && git rev-list -n 1 --before="%s" %s' % (gpath, date, branch), + shell=True, + ) + return True + except: # pylint: disable=bare-except # pylint: disable=bare-except + return False + + +def checkout(gpath, date, branch): + # print("Ready to cloc...checking out %s " % date) + try: + ref = ( + subprocess.check_output( + 'cd %s && git rev-list -n 1 --before="%s" "%s"' % (gpath, date, branch), + shell=True, + stderr=subprocess.STDOUT, + ) + .decode("ascii", "replace") + .strip() + ) + subprocess.check_output( + "cd %s && git checkout %s -- " % (gpath, ref), + shell=True, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as err: + print(err.output) + + +def find_branch(date, gpath): + try: + os.chdir(gpath) + subprocess.check_call( + 'cd %s && git rev-list -n 1 --before="%s" master' % (gpath, date), + shell=True, + stderr=subprocess.DEVNULL, + ) + return "master" + except: # pylint: disable=bare-except # pylint: disable=bare-except + os.chdir(gpath) + try: + return ( + subprocess.check_output( + "cd %s && git rev-parse --abbrev-ref HEAD" % gpath, + shell=True, + stderr=subprocess.DEVNULL, + ) + .decode("ascii", "replace") + .strip() + .strip("* ") + ) + except: # pylint: disable=bare-except # pylint: disable=bare-except + # print("meh! no branch") + return None + + +def scan(kibble_bit, source): + + rid = source["sourceID"] + rootpath = "%s/%s/git" % ( + conf.get("scanner", "scratchdir"), + source["organisation"], + ) + gpath = os.path.join(rootpath, rid) + + gname = source["sourceID"] + kibble_bit.pprint("Doing evolution scan of %s" % gname) + + inp = get_first_ref(gpath) + if inp: + ts = int(inp.split()[0]) + ts -= ts % 86400 + date = time.strftime("%Y-%b-%d 0:00", time.gmtime(ts)) + + # print("Starting from %s" % date) + now = time.time() + + rid = source["sourceID"] + url = source["sourceURL"] + rootpath = "%s/%s/git" % ( + conf.get("scanner", "scratchdir"), + source["organisation"], + ) + gpath = os.path.join(rootpath, rid) + + if source["steps"]["sync"]["good"] and os.path.exists(gpath): + acquire(kibble_bit, source) + branch = find_branch(date, gpath) + + if not branch: + release( + source, + "Could not do evolutionary scan of code", + "No default branch was found in this repository", + ) + return + + branch_exists = check_branch(gpath, date, branch) + + if not branch_exists: + kibble_bit.pprint("Not trunk either (bad repo?), skipping") + release( + source, + "Could not do evolutionary scan of code", + "No default branch was found in this repository", + ) + return + + try: + + d = time.gmtime(now) + year = d[0] + quarter = d[1] - (d[1] % 3) + if quarter <= 0: + quarter += 12 + year -= 1 + while now > ts: + pd = ( + datetime.datetime(year, quarter, 1) + .replace(tzinfo=datetime.timezone.utc) + .timetuple() + ) + date = time.strftime("%Y-%b-%d 0:00", pd) + unix = calendar.timegm(pd) + + # Skip the dates we've already processed + dhash = hashlib.sha224( + (source["sourceID"] + date).encode("ascii", "replace") + ).hexdigest() + found = kibble_bit.exists("evolution", dhash) + if not found: + checkout(gpath, date, branch) + kibble_bit.pprint( + "Running cloc on %s (%s) at %s" + % (gname, source["sourceURL"], date) + ) + languages, codecount, comment, blank, years, cost = sloc.count( + gpath + ) + js = { + "time": unix, + "sourceID": source["sourceID"], + "sourceURL": source["sourceURL"], + "organisation": source["organisation"], + "loc": codecount, + "comments": comment, + "blank": blank, + "years": years, + "cost": cost, + "languages": languages, + } + kibble_bit.index("evolution", dhash, js) + quarter -= 3 + if quarter <= 0: + quarter += 12 + year -= 1 + + # decrease month by 3 + now = time.mktime(datetime.date(year, quarter, 1).timetuple()) + except Exception as e: + kibble_bit.pprint(e) + release( + kibble_bit, + source, + "Evolution scan failed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + str(e), + ) + return + + release( + kibble_bit, + source, + "Evolution scan completed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + good=True, + ) diff --git a/kibble/scanners/scanners/git-sloc.py b/kibble/scanners/scanners/git-sloc.py new file mode 100644 index 00000000..fcc40092 --- /dev/null +++ b/kibble/scanners/scanners/git-sloc.py @@ -0,0 +1,90 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Source Lines of Code counter for Git. +""" + +import os +import subprocess +import time + +from kibble.configuration import conf +from kibble.scanners.utils import git, sloc + +title = "SloC Counter for Git" +version = "0.1.0" + + +def accepts(source): + """ Do we accept this source? """ + if source["type"] == "git": + return True + # There are cases where we have a github repo, but don't wanna analyze the code, just issues + if source["type"] == "github" and not source.get("issuesonly", False): + return True + return False + + +def scan(kibble_bit, source): + + rid = source["sourceID"] + url = source["sourceURL"] + rootpath = "%s/%s/git" % ( + conf.get("scanner", "scratchdir"), + source["organisation"], + ) + gpath = os.path.join(rootpath, rid) + + if source["steps"]["sync"]["good"] and os.path.exists(gpath): + source["steps"]["count"] = { + "time": time.time(), + "status": "SLoC count started at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + try: + branch = git.default_branch(source, gpath) + subprocess.call("cd %s && git checkout %s" % (gpath, branch), shell=True) + except: # pylint: disable=bare-except # pylint: disable=bare-except + kibble_bit.pprint("SLoC counter failed to find main branch for %s!!" % url) + return False + + kibble_bit.pprint("Running SLoC count for %s" % url) + languages, codecount, comment, blank, years, cost = sloc.count(gpath) + + sloc_ = { + "sourceID": source["sourceID"], + "loc": codecount, + "comments": comment, + "blanks": blank, + "years": years, + "cost": cost, + "languages": languages, + } + source["sloc"] = sloc_ + source["steps"]["count"] = { + "time": time.time(), + "status": "SLoC count completed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/git-sync.py b/kibble/scanners/scanners/git-sync.py new file mode 100644 index 00000000..d590cc75 --- /dev/null +++ b/kibble/scanners/scanners/git-sync.py @@ -0,0 +1,174 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import subprocess +import time + +from kibble.configuration import conf +from kibble.scanners.utils import git + +title = "Sync plugin for Git repositories" +version = "0.1.2" + + +def accepts(source): + """ Do we accept this source? """ + if source["type"] == "git": + return True + # There are cases where we have a github repo, but don't wanna analyze the code, just issues + if source["type"] == "github" and source.get("issuesonly", False) is False: + return True + return False + + +def scan(kibble_bit, source): + + # Get some vars, construct a data path for the repo + path = source["sourceID"] + url = source["sourceURL"] + rootpath = "%s/%s/git" % ( + conf.get("scanner", "scratchdir"), + source["organisation"], + ) + + # If the root path does not exist, try to make it recursively. + if not os.path.exists(rootpath): + try: + os.makedirs(rootpath, exist_ok=True) + print("Created root path %s" % rootpath) + except: # pylint: disable=bare-except # pylint: disable=bare-except + source["steps"]["sync"] = { + "time": time.time(), + "status": "Could not create root scratch dir - permision denied?", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + return + + # This is were the repo should be cloned + datapath = os.path.join(rootpath, path) + + kibble_bit.pprint("Checking out %s as %s" % (url, path)) + + try: + source["steps"]["sync"] = { + "time": time.time(), + "status": "Fetching code data from source location...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # If we already checked this out earlier, just sync it. + if os.path.exists(datapath): + kibble_bit.pprint("Repo %s exists, fetching changes..." % datapath) + + # Do we have a default branch here? + branch = git.default_branch(source, datapath) + if len(branch) == 0: + source["default_branch"] = branch + source["steps"]["sync"] = { + "time": time.time(), + "status": "Could not sync with source", + "exception": "No default branch was found in this repository", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + kibble_bit.pprint( + "No default branch found for %s (%s)" + % (source["sourceID"], source["sourceURL"]) + ) + return + + kibble_bit.pprint("Using branch %s" % branch) + # Try twice checking out the main branch and fetching changes. + # Sometimes we need to clean up after older scanners, which is + # why we try twice. If first attempt fails, clean up and try again. + for n in range(0, 2): + try: + subprocess.check_output( + "GIT_TERMINAL_PROMPT=0 cd %s && git checkout %s && git fetch --all && git merge -X theirs --no-edit" + % (datapath, branch), + shell=True, + stderr=subprocess.STDOUT, + ) + break + except subprocess.CalledProcessError as err: + e = str(err.output).lower() + # We're interested in merge conflicts, which we can resolve through trickery. + if n > 0 or not ( + "resolve" in e or "merge" in e or "overwritten" in e + ): + # This isn't a merge conflict, pass it to outer func + raise err + # Switch to first commit + fcommit = subprocess.check_output( + "cd %s && git rev-list --max-parents=0 --abbrev-commit HEAD" + % datapath, + shell=True, + stderr=subprocess.STDOUT, + ) + fcommit = fcommit.decode("ascii").strip() + subprocess.check_call( + "cd %s && git reset --hard %s" % (datapath, fcommit), + shell=True, + stderr=subprocess.STDOUT, + ) + try: + subprocess.check_call( + "cd %s && git clean -xfd" % datapath, + shell=True, + stderr=subprocess.STDOUT, + ) + except: # pylint: disable=bare-except # pylint: disable=bare-except + pass + # This is a new repo, clone it! + else: + kibble_bit.pprint("%s is new, cloning...!" % datapath) + subprocess.check_output( + "GIT_TERMINAL_PROMPT=0 cd %s && git clone %s %s" + % (rootpath, url, path), + shell=True, + stderr=subprocess.STDOUT, + ) + + except subprocess.CalledProcessError as err: + kibble_bit.pprint("Repository sync failed (no master?)") + kibble_bit.pprint(str(err.output)) + source["steps"]["sync"] = { + "time": time.time(), + "status": "Sync failed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": False, + "good": False, + "exception": str(err.output), + } + kibble_bit.update_source(source) + return + + # All good, yay! + source["steps"]["sync"] = { + "time": time.time(), + "status": "Source code fetched successfully at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/github-issues.py b/kibble/scanners/scanners/github-issues.py new file mode 100644 index 00000000..95ddf942 --- /dev/null +++ b/kibble/scanners/scanners/github-issues.py @@ -0,0 +1,247 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import hashlib +import re +import time + +import requests +from dateutil import parser + +from kibble.scanners.utils import github + +title = "Scanner for GitHub Issues" +version = "0.1.0" + + +def accepts(source): + """ Return true if this is a github repo """ + if source["type"] == "github": + return True + if source["type"] == "git" and re.match( + r"https://(?:www\.)?github.com/", source["sourceURL"] + ): + return True + return False + + +def format_date(d, epoch=False): + if not d: + return + parsed = parser.parse(d) + + if epoch: + return time.mktime(parsed.timetuple()) + + return time.strftime("%Y/%m/%d %H:%M:%S", parsed.timetuple()) + + +def make_hash(source, issue): + return hashlib.sha224( + ( + "%s-%s-%s" % (source["organisation"], source["sourceID"], str(issue["id"])) + ).encode("ascii", errors="replace") + ).hexdigest() + + +def make_issue(source, issue, people): + + key = str(issue["number"]) + dhash = make_hash(source, issue) + + closed_date = issue.get("closed_at", None) + + owner_email = people[issue["user"]["login"]]["email"] + + issue_closer = owner_email + if "closed_by" in issue: + issue_closer = people[issue["closed_by"]["login"]] + # Is this an issue ro a pull request? + itype = "issue" + if "pull_request" in issue: + itype = "pullrequest" + labels = [] + for l in issue.get("labels", []): + labels.append(l["name"]) + return { + "id": dhash, + "key": key, + "issuetype": itype, + "organisation": source["organisation"], + "sourceID": source["sourceID"], + "url": issue["html_url"], + "status": issue["state"], + "labels": labels, + "created": format_date(issue["created_at"], epoch=True), + "closed": format_date(closed_date, epoch=True), + "issueCloser": issue_closer, + "createdDate": format_date(issue["created_at"]), + "closedDate": format_date(closed_date), + "changeDate": format_date(closed_date if closed_date else issue["updated_at"]), + "assignee": owner_email, + "issueCreator": owner_email, + "comments": issue["comments"], + "title": issue["title"], + } + + +def make_person(source, issue, raw_person): + email = raw_person["email"] + if not email: + email = "%s@invalid.github.com" % issue["user"]["login"] + + name = raw_person["name"] + if not name: + name = raw_person["login"] + + id = hashlib.sha1( + ("%s%s" % (source["organisation"], email)).encode("ascii", errors="replace") + ).hexdigest() + + return { + "email": email, + "id": id, + "organisation": source["organisation"], + "name": name, + } + + +def status_changed(stored_issue, issue): + return stored_issue["status"] != issue["status"] + + +def update_issue(kibble_bit, issue): + kibble_bit.append("issue", issue) + + +def update_person(kibble_bit, person): + person["upsert"] = True + kibble_bit.append("person", person) + + +def scan(kibble_bit, source, first_attempt=True): + auth = None + people = {} + if "creds" in source: + kibble_bit.pprint("Using auth for repo %s" % source["sourceURL"]) + creds = source["creds"] + if creds and "username" in creds: + auth = (creds["username"], creds["password"]) + TL = github.get_tokens_left(auth=auth) + kibble_bit.pprint("Scanning for GitHub issues (%u tokens left on GitHub)" % TL) + # Have we scanned before? If so, only do a 3 month scan here. + done_before = False + if source.get("steps") and source["steps"].get("issues"): + done_before = True + source["steps"]["issues"] = { + "time": time.time(), + "status": "Issue scan started at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": True, + "good": True, + } + kibble_bit.update_source(source) + try: + if done_before: + since = time.strftime( + "%Y-%m-%dT%H:%M:%SZ", time.gmtime(time.time() - (3 * 30 * 86400)) + ) + kibble_bit.pprint("Fetching changes since %s" % since) + issues = github.get_all( + source, + github.issues, + params={"filter": "all", "state": "all", "since": since}, + auth=auth, + ) + else: + issues = github.get_all( + source, + github.issues, + params={"filter": "all", "state": "all"}, + auth=auth, + ) + kibble_bit.pprint( + "Fetched %s issues for %s" % (str(len(issues)), source["sourceURL"]) + ) + + for issue in issues: + + if issue["user"]["login"] not in people: + person = make_person( + source, issue, github.user(issue["user"]["url"], auth=auth) + ) + people[issue["user"]["login"]] = person + update_person(kibble_bit, person) + + if "closed_by" in issue and not issue["closed_by"]["login"] in people: + closer = make_person( + source, issue, github.user(issue["closed_by"]["url"], auth=auth) + ) + people[issue["closed_by"]["login"]] = closer + update_person(kibble_bit, closer) + + doc = make_issue(source, issue, people) + dhash = doc["id"] + if kibble_bit.exists("issue", dhash): + es_doc = kibble_bit.get("issue", dhash) + if not status_changed(es_doc, doc): + # KibbleBit.pprint("change %s seen already and status unchanged. Skipping." % issue['id']) + continue + + update_issue(kibble_bit, doc) + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Issue scan completed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), + "running": False, + "good": True, + } + kibble_bit.update_source(source) + + except requests.HTTPError as e: + # If we errored out because of rate limiting, retry later, otherwise bail + if first_attempt: + sleeps = 0 + if github.get_tokens_left(auth=auth) < 10: + kibble_bit.pprint("Hit rate limits, trying to sleep it off!") + while github.get_tokens_left(auth=auth) < 10: + sleeps += 1 + if sleeps > 24: + kibble_bit.pprint( + "Slept for too long without finding a reset rate limit, giving up!" + ) + break + time.sleep(300) # Sleep 5 min, then check again.. + # If we have tokens, try one more time... + if github.get_tokens_left(auth=auth) > 10: + scan( + kibble_bit, source, False + ) # If this one fails, bail completely + return + + kibble_bit.pprint("HTTP Error, rate limit exceeded?") + source["steps"]["issues"] = { + "time": time.time(), + "status": "Issue scan failed at " + + time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()) + + ": " + + e.response.text, + "running": False, + "good": False, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/github-stats.py b/kibble/scanners/scanners/github-stats.py new file mode 100644 index 00000000..aa7ec93e --- /dev/null +++ b/kibble/scanners/scanners/github-stats.py @@ -0,0 +1,137 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import hashlib +import re +import time + +from kibble.scanners.utils import github + +title = "Traffic statistics plugin for GitHub repositories" +version = "0.1.0" + + +def accepts(source): + """ Do we accept this source? """ + if source["type"] == "github": + return True + return False + + +def get_time(string): + """ Convert GitHub timestamp to epoch """ + return time.mktime( + time.strptime(re.sub(r"Z", "", str(string)), "%Y-%m-%dT%H:%M:%S") + ) + + +def scan(kibble_bit, source): + + # Get some vars, construct a data path for the repo + url = source["sourceURL"] + + auth = None + if "creds" in source: + kibble_bit.pprint("Using auth for repo %s" % source["sourceURL"]) + creds = source["creds"] + if creds and "username" in creds: + auth = (creds["username"], creds["password"]) + else: + kibble_bit.pprint( + "GitHub stats requires auth, none provided. Ignoring this repo." + ) + return + try: + source["steps"]["stats"] = { + "time": time.time(), + "status": "Fetching statistics from source location...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Get views + views = github.views(url, auth) + if "views" in views: + for el in views["views"]: + ts = get_time(el["timestamp"]) + shash = hashlib.sha224( + ( + "%s-%s-%s-clones" + % (source["organisation"], url, el["timestamp"]) + ).encode("ascii", errors="replace") + ).hexdigest() + bit = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(ts)), + "count": el["count"], + "uniques": el["uniques"], + "ghtype": "views", + "id": shash, + } + kibble_bit.append("ghstats", bit) + + # Get clones + clones = github.clones(url, auth) + if "clones" in clones: + for el in clones["clones"]: + ts = get_time(el["timestamp"]) + shash = hashlib.sha224( + ( + "%s-%s-%s-clones" + % (source["organisation"], url, el["timestamp"]) + ).encode("ascii", errors="replace") + ).hexdigest() + bit = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(ts)), + "count": el["count"], + "uniques": el["uniques"], + "ghtype": "clones", + "id": shash, + } + kibble_bit.append("ghstats", bit) + + # Get referrers + refs = github.referrers(url, auth) + if refs: + for el in refs: + el["timestamp"] = time.strftime("%Y-%m-%dT%H:%M:%S", time.time()) + ts = get_time(el["timestamp"]) + shash = hashlib.sha224( + ( + "%s-%s-%s-refs" % (source["organisation"], url, el["timestamp"]) + ).encode("ascii", errors="replace") + ).hexdigest() + bit = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(ts)), + "count": el["count"], + "uniques": el["uniques"], + "ghtype": "referrers", + "id": shash, + } + kibble_bit.append("ghstats", bit) + except: # pylint: disable=bare-except # pylint: disable=bare-except + pass + # All done! diff --git a/kibble/scanners/scanners/jenkins.py b/kibble/scanners/scanners/jenkins.py new file mode 100644 index 00000000..b1e47071 --- /dev/null +++ b/kibble/scanners/scanners/jenkins.py @@ -0,0 +1,351 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the Kibble Jenkins scanner plugin. +""" + +import datetime +import hashlib +import re +import threading +import time +import urllib.parse + +from kibble.scanners.utils import jsonapi + +title = "Scanner for Jenkins CI" +version = "0.1.0" + + +def accepts(source): + """ Determines whether we want to handle this source """ + if source["type"] == "jenkins": + return True + return False + + +def scan_job(kibble_bit, source, job, creds): + """ Scans a single job for activity """ + NOW = int(datetime.datetime.utcnow().timestamp()) + jname = job["name"] + if job.get("folder"): + jname = job.get("folder") + "-" + job["name"] + dhash = hashlib.sha224( + ("%s-%s-%s" % (source["organisation"], source["sourceURL"], jname)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + + # Get $jenkins/job/$job-name/json... + job_url = ( + "%s/api/json?depth=2&tree=builds[number,status,timestamp,id,result,duration]" + % job["fullURL"] + ) + kibble_bit.pprint(job_url) + jobjson = jsonapi.get(job_url, auth=creds) + + # If valid JSON, ... + if jobjson: + for build in jobjson.get("builds", []): + buildhash = hashlib.sha224( + ( + "%s-%s-%s-%s" + % (source["organisation"], source["sourceURL"], jname, build["id"]) + ).encode("ascii", errors="replace") + ).hexdigest() + builddoc = None + try: + builddoc = kibble_bit.get("ci_build", buildhash) + except: # pylint: disable=bare-except # pylint: disable=bare-except + pass + + # If this build already completed, no need to parse it again + if builddoc and builddoc.get("completed", False): + continue + + kibble_bit.pprint( + "[%s-%s] This is new or pending, analyzing..." % (jname, build["id"]) + ) + + completed = bool(build["result"]) + + # Estimate time spent in queue + queuetime = 0 + TS = int(build["timestamp"] / 1000) + if builddoc: + queuetime = builddoc.get("queuetime", 0) + if not completed: + queuetime = NOW - TS + + # Get build status (success, failed, canceled etc) + status = "building" + if build["result"] in ["SUCCESS", "STABLE"]: + status = "success" + if build["result"] in ["FAILURE", "UNSTABLE"]: + status = "failed" + if build["result"] in ["ABORTED"]: + status = "aborted" + + # Calc when the build finished (jenkins doesn't show this) + if completed: + FIN = int(build["timestamp"] + build["duration"]) / 1000 + else: + FIN = 0 + + doc = { + # Build specific data + "id": buildhash, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(FIN)), + "buildID": build["id"], + "completed": completed, + "duration": build["duration"], + "job": jname, + "job_url": job_url, + "status": status, + "started": int(build["timestamp"] / 1000), + "ci": "jenkins", + "queuetime": queuetime, + # Standard docs values + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "upsert": True, + } + kibble_bit.append("ci_build", doc) + # Yay, it worked! + return True + + # Boo, it failed! + kibble_bit.pprint("Fetching job data failed!") + return False + + +class Jenkinsthread(threading.Thread): + """ Generic thread class for scheduling multiple scans at once """ + + def __init__(self, block, KibbleBit, source, creds, jobs): + super().__init__() + self.block = block + self.KibbleBit = KibbleBit + self.creds = creds + self.source = source + self.jobs = jobs + + def run(self): + bad_ones = 0 + while len(self.jobs) > 0 and bad_ones <= 50: + self.block.acquire() + try: + job = self.jobs.pop(0) + except Exception as err: + print(f"An error occurred: {err}") + self.block.release() + return + if not job: + self.block.release() + return + self.block.release() + jfolder = job.get("folder") + ssource = dict(self.source) + if jfolder: + ssource["sourceURL"] += "/job/" + jfolder + if not scan_job(self.KibbleBit, ssource, job, self.creds): + self.KibbleBit.pprint( + "[%s] This borked, trying another one" % job["name"] + ) + bad_ones += 1 + if bad_ones > 100: + self.KibbleBit.pprint("Too many errors, bailing!") + self.source["steps"]["issues"] = { + "time": time.time(), + "status": "Too many errors while parsing at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": False, + } + self.KibbleBit.update_source(self.source) + return + else: + bad_ones = 0 + + +def scan(kibble_bit, source): + # Simple URL check + jenkins = re.match(r"(https?://.+)", source["sourceURL"]) + if jenkins: + + source["steps"]["jenkins"] = { + "time": time.time(), + "status": "Parsing Jenkins job changes...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + kibble_bit.pprint("Parsing Jenkins activity at %s" % source["sourceURL"]) + source["steps"]["issues"] = { + "time": time.time(), + "status": "Downloading changeset", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Jenkins may neeed credentials + creds = None + if ( + source["creds"] + and "username" in source["creds"] + and source["creds"]["username"] + and len(source["creds"]["username"]) > 0 + ): + creds = "%s:%s" % (source["creds"]["username"], source["creds"]["password"]) + + # Get the job list + s_url = source["sourceURL"] + kibble_bit.pprint("Getting job list...") + jobsjs = jsonapi.get( + "%s/api/json?tree=jobs[name,color]&depth=1" % s_url, auth=creds + ) + + # Get the current queue + kibble_bit.pprint("Getting job queue...") + queuejs = jsonapi.get("%s/queue/api/json?depth=1" % s_url, auth=creds) + + # Save queue snapshot + NOW = int(datetime.datetime.utcnow().timestamp()) + queuehash = hashlib.sha224( + ( + "%s-%s-queue-%s" + % (source["organisation"], source["sourceURL"], int(time.time())) + ).encode("ascii", errors="replace") + ).hexdigest() + + # Scan queue items + blocked = 0 + stuck = 0 + totalqueuetime = 0 + items = queuejs.get("items", []) + + for item in items: + if item["blocked"]: + blocked += 1 + if item["stuck"]: + stuck += 1 + if "inQueueSince" in item: + totalqueuetime += NOW - int(item["inQueueSince"] / 1000) + + avgqueuetime = totalqueuetime / max(1, len(items)) + + # Count how many jobs are building, find any folders... + actual_jobs, building = get_all_jobs( + kibble_bit, source, jobsjs.get("jobs", []), creds + ) + + # Write up a queue doc + queuedoc = { + "id": queuehash, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(NOW)), + "time": NOW, + "building": building, + "size": len(items), + "blocked": blocked, + "stuck": stuck, + "avgwait": avgqueuetime, + "ci": "jenkins", + # Standard docs values + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "upsert": True, + } + kibble_bit.append("ci_queue", queuedoc) + + pending_jobs = actual_jobs + kibble_bit.pprint("Found %u jobs in Jenkins" % len(pending_jobs)) + + threads = [] + block = threading.Lock() + kibble_bit.pprint("Scanning jobs using 4 sub-threads") + for i in range(0, 4): + t = Jenkinsthread(block, kibble_bit, source, creds, pending_jobs) + threads.append(t) + t.start() + + for t in threads: + t.join() + + # We're all done, yaay + kibble_bit.pprint("Done scanning %s" % source["sourceURL"]) + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Jenkins successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) + + +def get_all_jobs(kibble_bit, source, joblist, creds): + real_jobs = [] + building = 0 + for job in joblist: + # Is this a job folder? + jclass = job.get("_class") + if jclass in [ + "jenkins.branch.OrganizationFolder", + "org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject", + ]: + kibble_bit.pprint("%s is a jobs folder, expanding..." % job["name"]) + cs_url = "%s/job/%s" % ( + source["sourceURL"], + urllib.parse.quote(job["name"].replace("/", "%2F")), + ) + try: + child_jobs = jsonapi.get( + "%s/api/json?tree=jobs[name,color]&depth=1" % cs_url, auth=creds + ) + csource = dict(source) + csource["sourceURL"] = cs_url + if not csource.get("folder"): + csource["folder"] = job["name"] + else: + csource["folder"] += "-" + job["name"] + cjobs, cbuilding = get_all_jobs( + kibble_bit, csource, child_jobs.get("jobs", []), creds + ) + building += cbuilding + for cjob in cjobs: + real_jobs.append(cjob) + except: # pylint: disable=bare-except # pylint: disable=bare-except + kibble_bit.pprint("Couldn't get child jobs, bailing") + print("%s/api/json?tree=jobs[name,color]&depth=1" % cs_url) + # Or standard job? + else: + # Is it building? + if "anime" in job.get( + "color", "" + ): # a running job will have foo_anime as color + building += 1 + job["fullURL"] = "%s/job/%s" % ( + source["sourceURL"], + urllib.parse.quote(job["name"].replace("/", "%2F")), + ) + job["folder"] = source.get("folder") + real_jobs.append(job) + return real_jobs, building diff --git a/kibble/scanners/scanners/jira.py b/kibble/scanners/scanners/jira.py new file mode 100644 index 00000000..dedd84c6 --- /dev/null +++ b/kibble/scanners/scanners/jira.py @@ -0,0 +1,461 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the Kibble JIRA scanner plugin. +""" + +import hashlib +import re +import threading +import time + +import requests.exceptions + +from kibble.scanners.utils import jsonapi + +title = "Scanner for Atlassian JIRA" +version = "0.1.0" + + +def accepts(source): + """ Determines whether we want to handle this source """ + if source["type"] == "jira": + return True + if source["type"] == "issuetracker": + jira = re.match(r"(https?://.+)/browse/([A-Z0-9]+)", url) + if jira: + return True + return False + + +def get_time(string): + return time.mktime( + time.strptime(re.sub(r"\..*", "", str(string)), "%Y-%m-%dT%H:%M:%S") + ) + + +def assigned(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "assignee": + return True + return False + + +def wfi(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "status" and item["toString"] == "Waiting for Infra": + return True + return False + + +def wfu(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "status" and item["toString"] == "Waiting for user": + return True + return False + + +def moved(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "Key" and item["toString"].find("INFRA-") != -1: + return True + return False + + +def wasclosed(js): + if "changelog" in js: + cjs = js["changelog"]["histories"] + for citem in cjs: + if "items" in citem: + for item in citem["items"]: + if item["field"] == "status" and ( + item["toString"].lower().find("closed") != -1 + or item["toString"].lower().find("resolved") != -1 + ): + return True, citem.get("author", {}) + else: + if "items" in js: + for item in js["items"]: + if item["field"] == "status" and ( + item["toString"].find("Closed") != -1 + ): + return True, None + return False, None + + +def resolved(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "resolution" and ( + item["toString"] != "Pending Closed" + and item["toString"] != "Unresolved" + ): + return True + return False + + +def pchange(js): + if "items" in js: + for item in js["items"]: + if item["field"] == "priority": + return True + return False + + +def scan_ticket(kibble_bit, key, u, source, creds, open_tickets): + """ Scans a single ticket for activity and people """ + + dhash = hashlib.sha224( + ("%s-%s-%s" % (source["organisation"], source["sourceURL"], key)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + parse_it = False + + # the 'domain' var we try to figure out here is used + # for faking email addresses and keep them unique, + # in case JIRA has email visibility turned off. + domain = "jira" + m = re.search(r"https?://([^/]+)", u) + if m: + domain = m.group(1) + + found = kibble_bit.exists("issue", dhash) + if not found: + kibble_bit.pprint("[%s] We've never seen this ticket before, parsing..." % key) + parse_it = True + else: + ticket = kibble_bit.get("issue", dhash) + if ticket["status"] == "closed" and key in open_tickets: + kibble_bit.pprint("[%s] Ticket was reopened, reparsing" % key) + parse_it = True + elif ticket["status"] == "open" and not key in open_tickets: + kibble_bit.pprint("[%s] Ticket was recently closed, parsing it" % key) + parse_it = True + else: + if ( + ticket["issueCreator"] == "unknown@kibble" + or ticket["issueCloser"] == "unknown@kibble" + ): # Gotta redo these! + parse_it = True + kibble_bit.pprint( + "[%s] Ticket contains erroneous data from a previous scan, reparsing" + % key + ) + # This is just noise! + # KibbleBit.pprint("[%s] Ticket hasn't changed, ignoring..." % key) + + if parse_it: + kibble_bit.pprint("[%s] Parsing data from JIRA at %s..." % (key, domain)) + query_url = ( + "%s/rest/api/2/issue/%s?fields=creator,reporter,status,issuetype,summary,assignee,resolutiondate,created,priority,changelog,comment,resolution,votes&expand=changelog" + % (u, key) + ) + jira_url = "%s/browse/%s" % (u, key) + try: + tjson = jsonapi.get(query_url, auth=creds) + if not tjson: + kibble_bit.pprint("%s does not exist (404'ed)" % key) + return False + except requests.exceptions.ConnectionError as err: + kibble_bit.pprint(f"Connection error: {err}, skipping this ticket for now!") + return False + st, closer = wasclosed(tjson) + if st and not closer: + kibble_bit.pprint("Closed but no closer??") + closer_email = None + status = "closed" if st else "open" + + # Make sure we actually have field data to work with + if not tjson.get("fields") or not tjson["fields"].get("created"): + kibble_bit.pprint( + "[%s] JIRA response is missing field data, ignoring ticket." % key + ) + return False + + cd = get_time(tjson["fields"]["created"]) + rd = ( + get_time(tjson["fields"]["resolutiondate"]) + if "resolutiondate" in tjson["fields"] and tjson["fields"]["resolutiondate"] + else None + ) + comments = 0 + if "comment" in tjson["fields"] and tjson["fields"]["comment"]: + comments = tjson["fields"]["comment"]["total"] + assignee = ( + tjson["fields"]["assignee"].get( + "emailAddress", # Try email, fall back to username + tjson["fields"]["assignee"].get("name"), + ) + if tjson["fields"].get("assignee") + else None + ) + creator = ( + tjson["fields"]["reporter"].get( + "emailAddress", # Try email, fall back to username + tjson["fields"]["reporter"].get("name"), + ) + if tjson["fields"].get("reporter") + else None + ) + title = tjson["fields"]["summary"] + if closer: + # print("Parsing closer") + closer_email = ( + closer.get("emailAddress", closer.get("name")) + .replace(" dot ", ".", 10) + .replace(" at ", "@", 1) + ) + if "@" not in closer_email: + closer_email = "%s@%s" % (closer_email, domain) + display_name = closer.get("displayName", "Unkown") + if display_name and len(display_name) > 0: + # Add to people db + pid = hashlib.sha1( + ("%s%s" % (source["organisation"], closer_email)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + jsp = { + "name": display_name, + "email": closer_email, + "organisation": source["organisation"], + "id": pid, + "upsert": True, + } + kibble_bit.append("person", jsp) + + if creator: + creator = creator.replace(" dot ", ".", 10).replace(" at ", "@", 1) + if "@" not in creator: + creator = "%s@%s" % (creator, domain) + display_name = ( + tjson["fields"]["reporter"]["displayName"] + if tjson["fields"]["reporter"] + else None + ) + if display_name and len(display_name) > 0: + # Add to people db + pid = hashlib.sha1( + ("%s%s" % (source["organisation"], creator)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + jsp = { + "name": display_name, + "email": creator, + "organisation": source["organisation"], + "id": pid, + "upsert": True, + } + kibble_bit.append("person", jsp) + if assignee and not "@" in assignee: + assignee = "%s@%s" % (assignee, domain) + jso = { + "id": dhash, + "key": key, + "organisation": source["organisation"], + "sourceID": source["sourceID"], + "url": jira_url, + "status": status, + "created": cd, + "closed": rd, + "issuetype": "issue", + "issueCloser": closer_email, + "createdDate": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(cd)), + "closedDate": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(rd)) + if rd + else None, + "changeDate": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(rd if rd else cd) + ), + "assignee": assignee, + "issueCreator": creator, + "comments": comments, + "title": title, + } + kibble_bit.append("issue", jso) + return True + + +# +# except Exception as err: +# KibbleBit.pprint(err) +# return False + + +class JiraThread(threading.Thread): + def __init__(self, block, kibble_bit, source, creds, pt, ot): + super().__init__() + self.block = block + self.KibbleBit = kibble_bit + self.creds = creds + self.source = source + self.pendingTickets = pt + self.openTickets = ot + + def run(self): + bad_ones = 0 + while len(self.pendingTickets) > 0 and bad_ones <= 50: + # print("%u elements left to count" % len(pendingTickets)) + self.block.acquire() + try: + rl = self.pendingTickets.pop(0) + except Exception as err: + print(f"An error occured: {err}") + self.block.release() + return + if not rl: + self.block.release() + return + self.block.release() + if not scan_ticket( + self.KibbleBit, rl[0], rl[1], rl[2], self.creds, self.openTickets + ): + self.KibbleBit.pprint("[%s] This borked, trying another one" % rl[0]) + bad_ones += 1 + if bad_ones > 100: + self.KibbleBit.pprint("Too many errors, bailing!") + self.source["steps"]["issues"] = { + "time": time.time(), + "status": "Too many errors while parsing at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": False, + } + self.KibbleBit.update_source(self.source) + return + else: + bad_ones = 0 + + +def scan(kibble_bit, source): + jira = re.match(r"(https?://.+)/browse/([A-Z0-9]+)", source["sourceURL"]) + if jira: + + # JIRA NEEDS credentials to do a proper scan! + creds = None + if ( + source["creds"] + and "username" in source["creds"] + and source["creds"]["username"] + and len(source["creds"]["username"]) > 0 + ): + creds = "%s:%s" % (source["creds"]["username"], source["creds"]["password"]) + if not creds: + kibble_bit.pprint( + "JIRA at %s requires authentication, but none was found! Bailing." + % source["sourceURL"] + ) + source["steps"]["issues"] = { + "time": time.time(), + "status": "Parsing JIRA changes...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + return + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Parsing JIRA changes...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + pending_tickets = [] + kibble_bit.pprint("Parsing JIRA activity at %s" % source["sourceURL"]) + source["steps"]["issues"] = { + "time": time.time(), + "status": "Downloading changeset", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Get base URL, list and domain to parse + u = jira.group(1) + instance = jira.group(2) + last_ticket = 0 + latest_url = ( + "%s/rest/api/2/search?jql=project=%s+order+by+createdDate+DESC&fields=id,key&maxResults=1" + % (u, instance) + ) + js = None + + js = jsonapi.get(latest_url, auth=creds) + if "issues" in js and len(js["issues"]) == 1: + key = js["issues"][0]["key"] + m = re.search(r"-(\d+)$", key) + if m: + last_ticket = int(m.group(1)) + + open_tickets = [] + start_at = 0 + bad_tries = 0 + while bad_tries < 10: + open_url = ( + "%s/rest/api/2/search?jql=project=%s+and+status=open+order+by+createdDate+ASC&fields=id,key&maxResults=100&startAt=%u" + % (u, instance, start_at) + ) + # print(openURL) + try: + ojs = jsonapi.get(open_url, auth=creds) + if "issues" not in ojs or len(ojs["issues"]) == 0: + break + for item in ojs["issues"]: + open_tickets.append(item["key"]) + kibble_bit.pprint("Found %u open tickets" % len(open_tickets)) + start_at += 100 + except: # pylint: disable=bare-except # pylint: disable=bare-except + kibble_bit.pprint("JIRA borked, retrying") + bad_tries += 1 + kibble_bit.pprint("Found %u open tickets" % len(open_tickets)) + + for i in reversed(range(1, last_ticket + 1)): + key = "%s-%u" % (instance, i) + pending_tickets.append([key, u, source]) + + threads = [] + block = threading.Lock() + kibble_bit.pprint("Scanning tickets using 4 sub-threads") + for i in range(0, 4): + t = JiraThread( + block, kibble_bit, source, creds, pending_tickets, open_tickets + ) + threads.append(t) + t.start() + + for t in threads: + t.join() + + kibble_bit.pprint("Done scanning %s" % source["sourceURL"]) + + source["steps"]["issues"] = { + "time": time.time(), + "status": "Issue tracker (JIRA) successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/pipermail.py b/kibble/scanners/scanners/pipermail.py new file mode 100644 index 00000000..bf344f9a --- /dev/null +++ b/kibble/scanners/scanners/pipermail.py @@ -0,0 +1,290 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import datetime +import email.errors +import email.header +import email.utils +import hashlib +import mailbox +import os +import re +import time + +from kibble.scanners.utils import urlmisc + +title = "Scanner for GNU Mailman Pipermail" +version = "0.1.0" + + +def accepts(source): + """ Whether or not we think this is pipermail """ + if source["type"] == "pipermail": + return True + if source["type"] == "mail": + url = source["sourceURL"] + pipermail = re.match(r"(https?://.+/(archives|pipermail)/.+?)/?$", url) + if pipermail: + return True + return False + + +def scan(kibble_bit, source): + url = source["sourceURL"] + pipermail = re.match(r"(https?://.+/(archives|pipermail)/.+?)/?$", url) + if pipermail: + kibble_bit.pprint("Scanning Pipermail source %s" % url) + skipped = 0 + + source["steps"]["mail"] = { + "time": time.time(), + "status": "Downloading Pipermail statistics", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + dt = time.gmtime(time.time()) + first_year = 1970 + year = dt[0] + month = dt[1] + if month <= 0: + month += 12 + year -= 1 + months = 0 + + knowns = {} + + # While we have older archives, continue to parse + month_names = [ + "December", + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + while first_year <= year: + gzurl = "%s/%04u-%s.txt.gz" % (url, year, month_names[month]) + pd = datetime.date(year, month, 1).timetuple() + dhash = hashlib.sha224( + ("%s %s" % (source["organisation"], gzurl)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + found = kibble_bit.exists("mailstats", dhash) + if ( + months <= 1 or not found + ): # Always parse this month's stats and the previous month :) + months += 1 + mailFile = urlmisc.unzip(gzurl) + if mailFile: + try: + skipped = 0 + messages = mailbox.mbox(mailFile) + + rawtopics = {} + posters = {} + no_posters = 0 + emails = 0 + senders = {} + for message in messages: + emails += 1 + sender = message["from"] + name = sender + if ( + not "subject" in message + or not message["subject"] + or not "from" in message + or not message["from"] + ): + continue + + irt = message.get("in-reply-to", None) + if not irt and message.get("references"): + irt = message.get("references").split("\n")[0].strip() + replyto = None + if irt and irt in senders: + replyto = senders[irt] + print("This is a reply to %s" % replyto) + raw_subject = re.sub( + r"^[a-zA-Z]+\s*:\s*", "", message["subject"], count=10 + ) + raw_subject = re.sub( + r"[\r\n\t]+", "", raw_subject, count=10 + ) + if raw_subject not in rawtopics: + rawtopics[raw_subject] = 0 + rawtopics[raw_subject] += 1 + m = re.match( + r"(.+?) at (.+?) \((.*)\)$", + message["from"], + flags=re.UNICODE, + ) + if m: + name = m.group(3).strip() + sender = m.group(1) + "@" + m.group(2) + else: + m = re.match( + r"(.+)\s*<(.+)>", message["from"], flags=re.UNICODE + ) + if m: + name = m.group(1).replace('"', "").strip() + sender = m.group(2) + if sender not in posters: + posters[sender] = {"name": name, "email": sender} + senders[message.get("message-id", "??")] = sender + mdate = email.utils.parsedate_tz(message["date"]) + mdatestring = time.strftime( + "%Y/%m/%d %H:%M:%S", + time.gmtime(email.utils.mktime_tz(mdate)), + ) + if sender not in knowns: + sid = hashlib.sha1( + ("%s%s" % (source["organisation"], sender)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + knowns[sender] = kibble_bit.exists("person", sid) + if sender not in knowns: + kibble_bit.append( + "person", + { + "name": name, + "email": sender, + "organisation": source["organisation"], + "id": hashlib.sha1( + ( + "%s%s" + % (source["organisation"], sender) + ).encode("ascii", errors="replace") + ).hexdigest(), + }, + ) + knowns[sender] = True + jse = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": mdatestring, + "sender": sender, + "replyto": replyto, + "subject": message["subject"], + "address": sender, + "ts": email.utils.mktime_tz(mdate), + "id": message["message-id"], + } + kibble_bit.append("email", jse) + + no_posters = len(posters) + topics = len(rawtopics) + i = 0 + for key in reversed(sorted(rawtopics, key=lambda x: x)): + val = rawtopics[key] + i += 1 + if i > 10: + break + kibble_bit.pprint( + "Found top 10: %s (%s emails)" % (key, val) + ) + shash = hashlib.sha224( + key.encode("ascii", errors="replace") + ).hexdigest() + md = time.strftime("%Y/%m/%d %H:%M:%S", pd) + mlhash = hashlib.sha224( + ( + "%s%s%s%s" + % ( + key, + source["sourceURL"], + source["organisation"], + md, + ) + ).encode("ascii", errors="replace") + ).hexdigest() # one unique id per month per mail thread + jst = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": md, + "emails": val, + "shash": shash, + "subject": key, + "ts": time.mktime(pd), + "id": mlhash, + } + kibble_bit.index("mailtop", mlhash, jst) + + jso = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": time.strftime("%Y/%m/%d %H:%M:%S", pd), + "authors": no_posters, + "emails": emails, + "topics": topics, + } + kibble_bit.index("mailstats", dhash, jso) + + os.unlink(mailFile) + except Exception as err: + kibble_bit.pprint( + "Couldn't parse %s, skipping: %s" % (gzurl, err) + ) + skipped += 1 + if skipped > 12: + kibble_bit.pprint( + "12 skips in a row, breaking off (no more data?)" + ) + break + else: + kibble_bit.pprint("Couldn't find %s, skipping." % gzurl) + skipped += 1 + if skipped > 12: + kibble_bit.pprint( + "12 skips in a row, breaking off (no more data?)" + ) + break + month -= 1 + if month <= 0: + month += 12 + year -= 1 + + source["steps"]["mail"] = { + "time": time.time(), + "status": "Mail archives successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) + else: + kibble_bit.pprint("Invalid Pipermail URL detected: %s" % url, True) + source["steps"]["mail"] = { + "time": time.time(), + "status": "Invalid or malformed URL detected!", + "running": False, + "good": False, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/ponymail-kpe.py b/kibble/scanners/scanners/ponymail-kpe.py new file mode 100644 index 00000000..218c9daf --- /dev/null +++ b/kibble/scanners/scanners/ponymail-kpe.py @@ -0,0 +1,136 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is a Kibble scanner plugin for Apache Pony Mail sources. +""" + +import re +import time + +from kibble.scanners.utils import jsonapi, kpe +from kibble.settings import AZURE_ENABLED, PICOAPI_ENABLED, WATSON_ENABLED + +title = "Key Phrase Extraction plugin for Apache Pony Mail" +version = "0.1.0" +ROBITS = r"(git|gerrit|jenkins|hudson|builds|bugzilla)@" +MAX_COUNT = ( + 100 # Max number of unparsed emails to handle (so we don't max out API credits!) +) + + +def accepts(source): + """ Test if source matches a Pony Mail archive """ + # If the source equals the plugin name, assume a yes + if source["type"] == "ponymail": + return True + + # If it's of type 'mail', check the URL + if source["type"] == "mail": + if re.match(r"(https?://.+)/list\.html\?(.+)@(.+)", source["sourceURL"]): + return True + + # Default to not recognizing the source + return False + + +def scan(kibble_bit, source): + # Validate URL first + url = re.match(r"(https?://.+)/list\.html\?(.+)@(.+)", source["sourceURL"]) + if not url: + kibble_bit.pprint( + "Malformed or invalid Pony Mail URL passed to scanner: %s" + % source["sourceURL"] + ) + source["steps"]["mail"] = { + "time": time.time(), + "status": "Could not parse Pony Mail URL!", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + return + + if not AZURE_ENABLED and not PICOAPI_ENABLED: + kibble_bit.pprint( + "No Azure/picoAPI creds configured, skipping key phrase extraction" + ) + return + + cookie = None + if "creds" in source and source["creds"]: + cookie = source["creds"].get("cookie", None) + + root_url = re.sub(r"list.html.+", "", source["sourceURL"]) + query = { + "query": {"bool": {"must": [{"term": {"sourceID": source["sourceID"]}}]}}, + "sort": [{"ts": "desc"}], + } + + # Get an initial count of commits + res = kibble_bit.broker.DB.search( + index=kibble_bit.dbname, doc_type="email", body=query, size=MAX_COUNT * 4 + ) + ec = 0 + hits = [] + for hit in res["hits"]["hits"]: + eml = hit["_source"] + if not re.search(ROBITS, eml["sender"]): + ec += 1 + if ec > MAX_COUNT: + break + if "kpe" not in eml: + emlurl = "%s/api/email.lua?id=%s" % (root_url, eml["id"]) + kibble_bit.pprint("Fetching %s" % emlurl) + rv = None + try: + rv = jsonapi.get(emlurl, cookie=cookie) + if rv and "body" in rv: + hits.append([hit["_id"], rv["body"], eml]) + except Exception as err: + kibble_bit.pprint(f"Server error: {err}, skipping this email") + + bodies = [] + for hit in hits: + body = hit[1] + # bid = hit[0] + bodies.append(body) + if bodies: + KPEs = None + if WATSON_ENABLED: + pass # Haven't written this yet + elif AZURE_ENABLED: + KPEs = kpe.azure_kpe(kibble_bit, bodies) + elif PICOAPI_ENABLED: + KPEs = kpe.pico_kpe(kibble_bit, bodies) + if not KPEs: + kibble_bit.pprint("Hit rate limit, not trying further emails for now.") + + a = 0 + for hit in hits: + kpe_ = KPEs[a] + bid = hit[0] + eml = hit[2] + a += 1 + if not kpe_: + kpe_ = ["_NULL_"] + eml["kpe"] = kpe_ + print("Key phrases for %s: %s" % (bid, ", ".join(kpe_))) + kibble_bit.index("email", bid, eml) + else: + kibble_bit.pprint("No emails to analyze") + kibble_bit.pprint("Done with key phrase extraction") diff --git a/kibble/scanners/scanners/ponymail-tone.py b/kibble/scanners/scanners/ponymail-tone.py new file mode 100644 index 00000000..bdffdca4 --- /dev/null +++ b/kibble/scanners/scanners/ponymail-tone.py @@ -0,0 +1,134 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is a Kibble scanner plugin for Apache Pony Mail sources. +""" +import re +import time + +from kibble.scanners.utils import jsonapi, tone +from kibble.settings import AZURE_ENABLED, PICOAPI_ENABLED, WATSON_ENABLED + +title = "Tone/Mood Scanner plugin for Apache Pony Mail" +version = "0.1.0" +ROBITS = r"(git|gerrit|jenkins|hudson|builds|bugzilla)@" +MAX_COUNT = 250 + + +def accepts(source): + """ Test if source matches a Pony Mail archive """ + # If the source equals the plugin name, assume a yes + if source["type"] == "ponymail": + return True + + # If it's of type 'mail', check the URL + if source["type"] == "mail": + if re.match(r"(https?://.+)/list\.html\?(.+)@(.+)", source["sourceURL"]): + return True + + # Default to not recognizing the source + return False + + +def scan(kibble_bit, source): + # Validate URL first + url = re.match(r"(https?://.+)/list\.html\?(.+)@(.+)", source["sourceURL"]) + if not url: + kibble_bit.pprint( + "Malformed or invalid Pony Mail URL passed to scanner: %s" + % source["sourceURL"] + ) + source["steps"]["mail"] = { + "time": time.time(), + "status": "Could not parse Pony Mail URL!", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + return + + if not WATSON_ENABLED and not AZURE_ENABLED and not PICOAPI_ENABLED: + kibble_bit.pprint( + "No Watson/Azure/picoAPI creds configured, skipping tone analyzer" + ) + return + + cookie = None + if "creds" in source and source["creds"]: + cookie = source["creds"].get("cookie", None) + + root_url = re.sub(r"list.html.+", "", source["sourceURL"]) + query = { + "query": {"bool": {"must": [{"term": {"sourceID": source["sourceID"]}}]}}, + "sort": [{"ts": "desc"}], + } + + # Get an initial count of commits + res = kibble_bit.broker.DB.search( + index=kibble_bit.dbname, doc_type="email", body=query, size=MAX_COUNT * 4 + ) + ec = 0 + hits = [] + for hit in res["hits"]["hits"]: + eml = hit["_source"] + if not re.search(ROBITS, eml["sender"]): + ec += 1 + if ec > MAX_COUNT: + break + if "mood" not in eml: + emlurl = "%s/api/email.lua?id=%s" % (root_url, eml["id"]) + kibble_bit.pprint("Fetching %s" % emlurl) + try: + rv = jsonapi.get(emlurl, cookie=cookie) + if rv and "body" in rv: + hits.append([hit["_id"], rv["body"], eml]) + except Exception as err: + kibble_bit.pprint(f"Server error: {err}, skipping this email") + + bodies = [] + for hit in hits: + body = hit[1] + # bid = hit[0] + bodies.append(body) + if bodies: + moods = None + if WATSON_ENABLED: + moods = tone.watson_tone(kibble_bit, bodies) + elif AZURE_ENABLED: + moods = tone.azure_tone(kibble_bit, bodies) + elif PICOAPI_ENABLED: + moods = tone.pico_tone(kibble_bit, bodies) + if not moods: + kibble_bit.pprint("Hit rate limit, not trying further emails for now.") + + a = 0 + for hit in hits: + mood = moods[a] + bid = hit[0] + eml = hit[2] + a += 1 + eml["mood"] = mood + hm = [0, "unknown"] + for m, s in mood.items(): + if s > hm[0]: + hm = [s, m] + print("Likeliest overall mood for %s: %s" % (bid, hm[1])) + kibble_bit.index("email", bid, eml) + else: + kibble_bit.pprint("No emails to analyze") + kibble_bit.pprint("Done with tone analysis") diff --git a/kibble/scanners/scanners/ponymail.py b/kibble/scanners/scanners/ponymail.py new file mode 100644 index 00000000..b6586537 --- /dev/null +++ b/kibble/scanners/scanners/ponymail.py @@ -0,0 +1,307 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is a Kibble scanner plugin for Apache Pony Mail sources. +""" + +import datetime +import hashlib +import re +import time + +from kibble.scanners.utils import jsonapi + +title = "Scanner plugin for Apache Pony Mail" +version = "0.1.0" + + +def accepts(source): + """ Test if source matches a Pony Mail archive """ + # If the source equals the plugin name, assume a yes + if source["type"] == "ponymail": + return True + + # If it's of type 'mail', check the URL + if source["type"] == "mail": + if re.match(r"(https?://.+)/list\.html\?(.+)@(.+)", source["sourceURL"]): + return True + + # Default to not recognizing the source + return False + + +def count_subs(struct, kids=0): + """ Counts replies in a thread """ + if "children" in struct and len(struct["children"]) > 0: + for child in struct["children"]: + kids += 1 + kids += count_subs(child) + return kids + + +def replied_to(emails, struct): + my_list = {} + for eml in struct: + my_id = eml["tid"] + if "children" in eml: + for child in eml["children"]: + my_list[child["tid"]] = my_id + if len(child["children"]) > 0: + c_list = replied_to(emails, child["children"]) + my_list.update(c_list) + return my_list + + +def get_sender(email): + sender = email["from"] + m = re.match(r"(.+)\s*<(.+)>", email["from"], flags=re.UNICODE) + if m: + # name = m.group(1).replace('"', "").strip() + sender = m.group(2) + return sender + + +def scan(kibble_bit, source): + # Validate URL first + url = re.match(r"(https?://.+)/list\.html\?(.+)@(.+)", source["sourceURL"]) + if not url: + kibble_bit.pprint( + "Malformed or invalid Pony Mail URL passed to scanner: %s" + % source["sourceURL"] + ) + source["steps"]["mail"] = { + "time": time.time(), + "status": "Could not parse Pony Mail URL!", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + return + + # Pony Mail requires a UI cookie in order to work. Maked sure we have one! + cookie = None + if "creds" in source and source["creds"]: + cookie = source["creds"].get("cookie", None) + if not cookie: + kibble_bit.pprint( + "Pony Mail instance at %s requires an authorized cookie, none found! Bailing." + % source["sourceURL"] + ) + source["steps"]["mail"] = { + "time": time.time(), + "status": "No authorized cookie found in source object.", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + return + + # Notify scanner and DB that this is valid and we've begun parsing + kibble_bit.pprint("%s is a valid Pony Mail address, parsing" % source["sourceURL"]) + source["steps"]["mail"] = { + "time": time.time(), + "status": "Downloading Pony Mail statistics", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Get base URL, list and domain to parse + u = url.group(1) + l = url.group(2) + d = url.group(3) + + # Get this month + dt = time.gmtime(time.time()) + first_year = 1970 + year = dt[0] + month = dt[1] + if month <= 0: + month += 12 + year -= 1 + months = 0 + + # Hash for keeping records of who we know + knowns = {} + + # While we have older archives, continue to parse + while first_year <= year: + statsurl = "%s/api/stats.lua?list=%s&domain=%s&d=%s" % ( + u, + l, + d, + "%04u-%02u" % (year, month), + ) + dhash = hashlib.sha224( + ("%s %s" % (source["organisation"], statsurl)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + found = False + if kibble_bit.exists("mailstats", dhash): + found = True + if months <= 1 or not found: # Always parse this month's stats :) + months += 1 + kibble_bit.pprint("Parsing %04u-%02u" % (year, month)) + kibble_bit.pprint(statsurl) + pd = datetime.date(year, month, 1).timetuple() + try: + js = jsonapi.get(statsurl, cookie=cookie) + except Exception as err: + kibble_bit.pprint(f"Server error: {err}, skipping this month") + month -= 1 + if month <= 0: + month += 12 + year -= 1 + continue + if "firstYear" in js: + first_year = js["firstYear"] + # print("First Year is %u" % firstYear) + else: + kibble_bit.pprint("JSON was missing fields, aborting!") + break + reply_list = replied_to(js["emails"], js["thread_struct"]) + topics = js["no_threads"] + posters = {} + no_posters = 0 + emails = len(js["emails"]) + top10 = [] + for eml in js["thread_struct"]: + count = count_subs(eml, 0) + subject = "" + for reml in js["emails"]: + if reml["id"] == eml["tid"]: + subject = reml["subject"] + break + if len(subject) > 0 and count > 0: + subject = re.sub( + r"^((re|fwd|aw|fw):\s*)+", "", subject, flags=re.IGNORECASE + ) + subject = re.sub(r"[\r\n\t]+", "", subject, count=20) + emlid = hashlib.sha1( + subject.encode("ascii", errors="replace") + ).hexdigest() + top10.append([emlid, subject, count]) + i = 0 + for top in reversed(sorted(top10, key=lambda x: x[2])): + i += 1 + if i > 10: + break + kibble_bit.pprint("Found top 10: %s (%s emails)" % (top[1], top[2])) + md = time.strftime("%Y/%m/%d %H:%M:%S", pd) + mlhash = hashlib.sha224( + ( + "%s%s%s%s" + % (top[0], source["sourceURL"], source["organisation"], md) + ).encode("ascii", errors="replace") + ).hexdigest() # one unique id per month per mail thread + jst = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": md, + "emails": top[2], + "shash": top[0], + "subject": top[1], + "ts": time.mktime(pd), + "id": mlhash, + } + kibble_bit.index("mailtop", mlhash, jst) + + for email in js["emails"]: + sender = email["from"] + name = sender + m = re.match(r"(.+)\s*<(.+)>", email["from"], flags=re.UNICODE) + if m: + name = m.group(1).replace('"', "").strip() + sender = m.group(2) + if sender not in posters: + posters[sender] = {"name": name, "email": sender} + if sender not in knowns: + sid = hashlib.sha1( + ("%s%s" % (source["organisation"], sender)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + if kibble_bit.exists("person", sid): + knowns[sender] = True + if sender not in knowns or name != sender: + kibble_bit.append( + "person", + { + "upsert": True, + "name": name, + "email": sender, + "organisation": source["organisation"], + "id": hashlib.sha1( + ("%s%s" % (source["organisation"], sender)).encode( + "ascii", errors="replace" + ) + ).hexdigest(), + }, + ) + knowns[sender] = True + reply_to = None + if email["id"] in reply_list: + rt = reply_list[email["id"]] + for eml in js["emails"]: + if eml["id"] == rt: + reply_to = get_sender(eml) + print("Email was reply to %s" % sender) + jse = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(email["epoch"]) + ), + "sender": sender, + "address": sender, + "subject": email["subject"], + "replyto": reply_to, + "ts": email["epoch"], + "id": email["id"], + "upsert": True, + } + kibble_bit.append("email", jse) + no_posters = len(posters) + + jso = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "date": time.strftime("%Y/%m/%d %H:%M:%S", pd), + "authors": no_posters, + "emails": emails, + "topics": topics, + } + # print("Indexing as %s" % dhash) + kibble_bit.index("mailstats", dhash, jso) + month -= 1 + if month <= 0: + month += 12 + year -= 1 + + source["steps"]["mail"] = { + "time": time.time(), + "status": "Mail archives successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/travis.py b/kibble/scanners/scanners/travis.py new file mode 100644 index 00000000..461d41e9 --- /dev/null +++ b/kibble/scanners/scanners/travis.py @@ -0,0 +1,376 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is the Kibble Travis CI scanner plugin. +""" + +import datetime +import hashlib +import re +import threading +import time + +import requests +import requests.exceptions + +title = "Scanner for Travis CI" +version = "0.1.0" + + +def accepts(source): + """ Determines whether we want to handle this source """ + if source["type"] == "travis": + return True + return False + + +def scan_job(kibble_bit, source, bid, token, TLD): + """ Scans a single job for activity """ + # Get the job data + pages = 0 + offset = 0 + last_page = False + o_url = "https://api.travis-ci.%s/repo/%s/builds" % (TLD, bid) + + # For as long as pagination makes sense... + while not last_page: + b_url = "https://api.travis-ci.%s/repo/%s/builds?limit=100&offset=%u" % ( + TLD, + bid, + offset, + ) + kibble_bit.pprint("Scanning %s" % b_url) + rv = requests.get( + b_url, + headers={"Travis-API-Version": "3", "Authorization": "token %s" % token}, + ) + if rv.status_code == 200: + repojs = rv.json() + # If travis tells us it's the last page, trust it. + if repojs["@pagination"]["is_last"]: + kibble_bit.pprint( + "Assuming this is the last page we need (travis says so)" + ) + last_page = True + + kibble_bit.pprint( + "%s has %u builds done" % (b_url, repojs["@pagination"]["count"]) + ) + + # BREAKER: If we go past count somehow, and travis doesn't say so, bork anyway + if repojs["@pagination"]["count"] < offset: + return True + + offset += 100 + for build in repojs.get("builds", []): + build_id = build["id"] + build_project = build["repository"]["slug"] + started_at = build["started_at"] + finished_at = build["finished_at"] + duration = build["duration"] + completed = bool(duration) + duration = duration or 0 + + buildhash = hashlib.sha224( + ( + "%s-%s-%s-%s" + % (source["organisation"], source["sourceURL"], bid, build_id) + ).encode("ascii", errors="replace") + ).hexdigest() + builddoc = None + try: + builddoc = kibble_bit.get("ci_build", buildhash) + except: # pylint: disable=bare-except # pylint: disable=bare-except + pass + + # If this build already completed, no need to parse it again + if builddoc and builddoc.get("completed", False): + # If we're on page > 1 and we've seen a completed build, assume + # that we don't need the older ones + if pages > 1: + kibble_bit.pprint( + "Assuming this is the last page we need (found completed build on page > 1)" + ) + last_page = True + break + continue + + # Get build status (success, failed, canceled etc) + status = "building" + if build["state"] in ["finished", "passed"]: + status = "success" + if build["state"] in ["failed", "errored"]: + status = "failed" + if build["state"] in ["aborted", "canceled"]: + status = "aborted" + + fin = 0 + sta = 0 + if finished_at: + fin = datetime.datetime.strptime( + finished_at, "%Y-%m-%dT%H:%M:%SZ" + ).timestamp() + if started_at: + sta = int( + datetime.datetime.strptime( + started_at, "%Y-%m-%dT%H:%M:%SZ" + ).timestamp() + ) + + # We don't know how to calc queues yet, set to 0 + queuetime = 0 + + doc = { + # Build specific data + "id": buildhash, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(fin)), + "buildID": build_id, + "completed": completed, + "duration": duration * 1000, + "job": build_project, + "jobURL": o_url, + "status": status, + "started": sta, + "ci": "travis", + "queuetime": queuetime, + # Standard docs values + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "upsert": True, + } + kibble_bit.append("ci_build", doc) + pages += 1 + else: + # We hit a snag, abort! + kibble_bit.pprint("Travis returned a non-200 response, aborting.") + return False + + return True + + +class TravisThread(threading.Thread): + """ Generic thread class for scheduling multiple scans at once """ + + def __init__(self, block, kibble_bit, source, token, jobs, TLD): + super().__init__() + self.block = block + self.kibble_bit = kibble_bit + self.token = token + self.source = source + self.jobs = jobs + self.tld = TLD + + def run(self): + bad_ones = 0 + while len(self.jobs) > 0 and bad_ones <= 50: + self.block.acquire() + try: + job = self.jobs.pop(0) + except Exception: + self.block.release() + return + if not job: + self.block.release() + return + self.block.release() + if not scan_job(self.kibble_bit, self.source, job, self.token, self.tld): + self.kibble_bit.pprint("[%s] This borked, trying another one" % job) + bad_ones += 1 + if bad_ones > 100: + self.kibble_bit.pprint("Too many errors, bailing!") + self.source["steps"]["travis"] = { + "time": time.time(), + "status": "Too many errors while parsing at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": False, + } + self.kibble_bit.update_source(self.source) + return + else: + bad_ones = 0 + + +def scan(kibble_bit, source): + # Simple URL check + travis = re.match(r"https?://travis-ci\.(org|com)", source["sourceURL"]) + if travis: + # Is this travs-ci.org or travis-ci.com - we need to know! + tld = travis.group(1) + source["steps"]["travis"] = { + "time": time.time(), + "status": "Parsing Travis job changes...", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + pending_jobs = [] + kibble_bit.pprint("Parsing Travis activity at %s" % source["sourceURL"]) + source["steps"]["travis"] = { + "time": time.time(), + "status": "Downloading changeset", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + + # Travis needs a token + if ( + source["creds"] + and "token" in source["creds"] + and source["creds"]["token"] + and len(source["creds"]["token"]) > 0 + ): + token = source["creds"]["token"] + else: + kibble_bit.pprint("Travis CI requires a token to work!") + return False + + # Used for pagination + jobs = 100 + offset = 0 + + # Counters; builds queued, running and total jobs + queued = 0 # We don't know how to count this yet + building = 0 + total = 0 + blocked = 0 # Dunno how to count yet + stuck = 0 # Ditto + avgqueuetime = 0 # Ditto, fake it + + maybe_queued = [] + while jobs == 100: + url = ( + "https://api.travis-ci.%s/repos?repository.active=true&sort_by=current_build:desc&offset=%u&limit=100&include=repository.last_started_build" + % (tld, offset) + ) + offset += 100 + r = requests.get( + url, + headers={ + "Travis-API-Version": "3", + "Authorization": "token %s" % token, + }, + ) + + if r.status_code != 200: + kibble_bit.pprint("Travis did not return a 200 Okay, bad token?!") + + source["steps"]["travis"] = { + "time": time.time(), + "status": "Travis CI scan failed at " + + time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime(time.time()) + ". Bad token??!" + ), + "running": False, + "good": False, + } + kibble_bit.update_source(source) + return + + # For each build job + js = r.json() + for repo in js["repositories"]: + total += 1 + cb = repo.get("last_started_build") + if cb: + # Is the build currently running? + if cb["state"] in ["started", "created", "queued", "pending"]: + for job in cb.get("jobs", []): + maybe_queued.append(job["id"]) + + # Queue up build jobs for the threaded scanner + bid = repo["id"] + pending_jobs.append(bid) + + jobs = len(js["repositories"]) + kibble_bit.pprint("Scanned %u jobs..." % total) + + # Find out how many building and pending jobs + for job_id in maybe_queued: + url = "https://api.travis-ci.%s/job/%u" % (tld, job_id) + r = requests.get( + url, + headers={ + "Travis-API-Version": "3", + "Authorization": "token %s" % token, + }, + ) + if r.status_code == 200: + jobjs = r.json() + if jobjs["state"] == "started": + building += 1 + kibble_bit.pprint("Job %u is building" % job_id) + elif jobjs["state"] in ["created", "queued", "pending"]: + queued += 1 + blocked += 1 # Queued in Travis generally means a job can't find an executor, and thus is blocked. + kibble_bit.pprint("Job %u is pending" % job_id) + kibble_bit.pprint("%u building, %u queued..." % (building, queued)) + + # Save queue snapshot + now = int(datetime.datetime.utcnow().timestamp()) + queuehash = hashlib.sha224( + ( + "%s-%s-queue-%s" + % (source["organisation"], source["sourceURL"], int(time.time())) + ).encode("ascii", errors="replace") + ).hexdigest() + + # Write up a queue doc + queuedoc = { + "id": queuehash, + "date": time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(now)), + "time": now, + "building": building, + "size": queued, + "blocked": blocked, + "stuck": stuck, + "avgwait": avgqueuetime, + "ci": "travis", + # Standard docs values + "sourceID": source["sourceID"], + "organisation": source["organisation"], + "upsert": True, + } + kibble_bit.append("ci_queue", queuedoc) + + kibble_bit.pprint("Found %u jobs in Travis" % len(pending_jobs)) + + threads = [] + block = threading.Lock() + kibble_bit.pprint("Scanning jobs using 4 sub-threads") + for i in range(0, 4): + t = TravisThread(block, kibble_bit, source, token, pending_jobs, tld) + threads.append(t) + t.start() + + for t in threads: + t.join() + + # We're all done, yaay + kibble_bit.pprint("Done scanning %s" % source["sourceURL"]) + + source["steps"]["travis"] = { + "time": time.time(), + "status": "Travis successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/scanners/twitter.py b/kibble/scanners/scanners/twitter.py new file mode 100644 index 00000000..a11f271c --- /dev/null +++ b/kibble/scanners/scanners/twitter.py @@ -0,0 +1,150 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is a Kibble scanner plugin for Twitter sources. +""" +import hashlib +import time + +import twitter + +title = "Scanner plugin for Twitter" +version = "0.1.0" + + +def accepts(source): + """ Test if source matches a Twitter handle """ + # If the source equals the plugin name, assume a yes + if source["type"] == "twitter": + return True + + # Default to not recognizing the source + return False + + +def get_followers(kibble_bit, source, t): + """ Get followers of a handle, store them for mapping and trend purposes""" + # Get our twitter handle + handle = source["sourceURL"] + + # Get number of followers + tuser = t.GetUser(screen_name=handle) + no_followers = tuser.followers_count + d = time.strftime("%Y/%m/%d 0:00:00", time.gmtime()) # Today at midnight + dhash = hashlib.sha224( + ("twitter:%s:%s:%s" % (source["organisation"], source["sourceURL"], d)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + jst = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "id": dhash, + "followers": no_followers, + "date": d, + } + kibble_bit.pprint("%s has %u followers currently." % (handle, no_followers)) + kibble_bit.index("twitter_followers", dhash, jst) + + # Collect list of current followers + followers = t.GetFollowers(screen_name=handle) + + # For each follower, if they're not mapped yet, add them + # This has a limitation of 100 new added per run, but meh... + kibble_bit.pprint("Looking up followers of %s" % handle) + for follower in followers: + # id, name, screen_name are useful here + kibble_bit.pprint("Found %s as follower" % follower.screen_name) + + # Store twitter follower profile if not already logged + dhash = hashlib.sha224( + ("twitter:%s:%s:%s" % (source["organisation"], handle, follower.id)).encode( + "ascii", errors="replace" + ) + ).hexdigest() + if not kibble_bit.exists("twitter_follow", dhash): + jst = { + "organisation": source["organisation"], + "sourceURL": source["sourceURL"], + "sourceID": source["sourceID"], + "twitterid": follower.id, + "name": follower.name, + "screenname": follower.screen_name, + "id": dhash, + "date": time.strftime( + "%Y/%m/%d %H:%M:%S", time.gmtime() + ), # First time we spotted them following. + } + kibble_bit.pprint( + "%s is new, recording date and details." % follower.screen_name + ) + kibble_bit.index("twitter_follow", dhash, jst) + + +def scan(kibble_bit, source): + source["steps"]["twitter"] = { + "time": time.time(), + "status": "Scanning Twitter activity and status", + "running": True, + "good": True, + } + kibble_bit.update_source(source) + t = None + if "creds" in source and source["creds"]: + t = twitter.Api( + access_token_key=source["creds"].get("token", None), + access_token_secret=source["creds"].get("token_secret", None), + consumer_key=source["creds"].get("consumer_key", None), + consumer_secret=source["creds"].get("consumer_secret", None), + ) + kibble_bit.pprint("Verifying twitter credentials...") + try: + t.VerifyCredentials() + except: # pylint: disable=bare-except # pylint: disable=bare-except + source["steps"]["twitter"] = { + "time": time.time(), + "status": "Could not verify twitter credentials", + "running": False, + "good": False, + } + kibble_bit.update_source(source) + kibble_bit.pprint("Could not verify twitter creds, aborting!") + return + # Start by getting and saving followers + try: + get_followers(kibble_bit, source, t) + except Exception as err: + source["steps"]["twitter"] = { + "time": time.time(), + "status": "Could not scan Twitter: %s" % err, + "running": False, + "good": False, + } + kibble_bit.update_source(source) + kibble_bit.pprint("Twitter scan failed: %s" % err) + + # All done, report that! + source["steps"]["twitter"] = { + "time": time.time(), + "status": "Twitter successfully scanned at " + + time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time())), + "running": False, + "good": True, + } + kibble_bit.update_source(source) diff --git a/kibble/scanners/utils/__init__.py b/kibble/scanners/utils/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/kibble/scanners/utils/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/kibble/scanners/utils/git.py b/kibble/scanners/utils/git.py new file mode 100644 index 00000000..bd0e5fa7 --- /dev/null +++ b/kibble/scanners/utils/git.py @@ -0,0 +1,91 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" This is the Kibble git utility plugin """ + +import re +import subprocess + +from kibble.configuration import conf + + +def default_branch(source, datapath): + """ Tries to figure out what the main branch of a repo is """ + # If we have an override of branches we like, use 'em + wanted_branches = conf.get("git", "wanted_branches", fallback=None) + if wanted_branches: + wanted_branches = wanted_branches.split(",") + else: + wanted_branches = ["master", "main", "trunk"] + + # For each wanted branch, in order, look for it in our clone, + # and return the name if found. + for B in wanted_branches: + try: + branch = ( + subprocess.check_output( + "cd %s && git rev-parse --abbrev-ref %s" % (datapath, B), + shell=True, + stderr=subprocess.DEVNULL, + ) + .decode("ascii", "replace") + .strip() + .strip("* ") + ) + return branch + except: # pylint: disable=bare-except + pass + # If we couldn't find it locally, looking at all (local+remote) + try: + inp = ( + subprocess.check_output( + r"cd %s && git branch -a | awk -F ' +' '! /\(no branch\)/ {print $2}'" + % datapath, + shell=True, + stderr=subprocess.DEVNULL, + ) + .decode("ascii", "replace") + .split() + ) + if len(inp) > 0: + for b in sorted(inp): + if b.find("detached") == -1: + branch = str(b.replace("remotes/origin/", "", 1)) + for B in wanted_branches: + if branch == B: + return branch + except: # pylint: disable=bare-except + pass + + # If still not found, resort to whatever branch comes first in the remote listing... + inp = ( + subprocess.check_output( + "cd %s && git ls-remote --heads %s" % (datapath, source["sourceURL"]), + shell=True, + stderr=subprocess.DEVNULL, + ) + .decode("ascii", "replace") + .split() + ) + if len(inp) > 0: + for remote in inp: + m = re.match(r"[a-f0-9]+\s+refs/heads/(?:remotes/)?(.+)", remote) + if m: + branch = m.group(1) + return branch + # Give up + return "" diff --git a/kibble/scanners/utils/github.py b/kibble/scanners/utils/github.py new file mode 100644 index 00000000..251e9803 --- /dev/null +++ b/kibble/scanners/utils/github.py @@ -0,0 +1,101 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" GitHub utility library """ +import re +import time + +import requests + +repo_pattern = re.compile(".*[:/]([^/]+)/([^/]+).git") +issues_api = "https://api.github.com/repos/%s/%s/issues" +traffic_api = "https://api.github.com/repos/%s/%s/traffic" +popular_api = "https://api.github.com/repos/%s/%s/popular" +rate_limit_api = "https://api.github.com/rate_limit" + + +def get_limited(url, params=None, auth=None): + """Get a GitHub API response, keeping in mind that we may be rate-limited by the abuse system""" + number_of_retries = 0 + resp = requests.get(url, params=params, auth=auth) + while resp.status_code == 403 and number_of_retries < 20: + js = resp.json() + # If abuse-detection kicks in, sleep it off + if "You have triggered an abuse" in js["message"]: + time.sleep(5) + number_of_retries += 1 + resp = requests.get(url, params=params, auth=auth) + else: + break + resp.raise_for_status() + return resp.json() + + +def get_tokens_left(auth=None): + """ Gets number of GitHub tokens left this hour... """ + js = get_limited(rate_limit_api, auth=auth) + tokens_left = js["rate"]["remaining"] + return tokens_left + + +def issues(source, params=None, auth=None): + if params is None: + params = {} + local_params = {"per_page": 100, "page": 1} + local_params.update(params) + + repo_user = repo_pattern.findall(source["sourceURL"])[0] + return get_limited(issues_api % repo_user, params=local_params, auth=auth) + + +def views(source, auth=None): + repo_user = repo_pattern.findall(source["sourceURL"])[0] + return get_limited("%s/views" % (traffic_api % repo_user), auth=auth) + + +def clones(source, auth=None): + repo_user = repo_pattern.findall(source["sourceURL"])[0] + return get_limited("%s/clones" % (traffic_api % repo_user), auth=auth) + + +def referrers(source, auth=None): + repo_user = repo_pattern.findall(source["sourceURL"])[0] + return get_limited("%s/referrers" % (popular_api % repo_user), auth=auth) + + +def user(user_url, auth=None): + return get_limited(user_url, auth=auth) + + +def get_all(source, f, params=None, auth=None): + if params is None: + params = {} + acc = [] + page = params.get("page", 1) + + while True: + time.sleep(1.5) + items = f(source, params=params, auth=auth) + if not items: + break + + acc.extend(items) + + page += 1 + params.update({"page": page}) + + return acc diff --git a/kibble/scanners/utils/jsonapi.py b/kibble/scanners/utils/jsonapi.py new file mode 100644 index 00000000..64e7a7c5 --- /dev/null +++ b/kibble/scanners/utils/jsonapi.py @@ -0,0 +1,109 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is a Kibble JSON API plugin. +""" +import base64 +import time + +import requests + +CONNECT_TIMEOUT = 2 # Max timeout for the connect part of a request. + + +# Should be set low as it may otherwise freeze the scanner. +def get(url, cookie=None, auth=None, token=None, retries=5, timeout=30): + headers = { + "Content-type": "application/json", + "Accept": "application/json", + "User-Agent": "Apache Kibble", + } + if auth: + xcreds = auth.encode(encoding="ascii", errors="replace") + bauth = ( + base64.encodebytes(xcreds) + .decode("ascii", errors="replace") + .replace("\n", "") + ) + headers["Authorization"] = "Basic %s" % bauth + if token: + headers["Authorization"] = "token %s" % token + if cookie: + headers["Cookie"] = cookie + rv = requests.get(url, headers=headers, timeout=(CONNECT_TIMEOUT, timeout)) + # Some services may be rate limited. We'll try sleeping it off in 60 second + # intervals for a max of five minutes, then give up. + if rv.status_code == 429: + if retries > 0: + time.sleep(60) + retries -= 1 + return get( + url, + cookie=cookie, + auth=auth, + token=token, + retries=retries, + timeout=timeout, + ) + if rv.status_code < 400: + return rv.json() + raise requests.exceptions.ConnectionError( + "Could not fetch JSON, server responded with status code %u" % rv.status_code, + response=rv, + ) + + +def gettxt(url, cookie=None, auth=None): + """ Same as above, but returns as text blob """ + headers = {"Content-type": "application/json", "Accept": "*/*"} + if auth: + xcreds = auth.encode(encoding="ascii", errors="replace") + bauth = ( + base64.encodebytes(xcreds) + .decode("ascii", errors="replace") + .replace("\n", "") + ) + headers["Authorization"] = "Basic %s" % bauth + if cookie: + headers["Cookie"] = cookie + rv = requests.get(url, headers=headers) + js = rv.text + if rv.status_code != 404: + return js + return None + + +def post(url, data, cookie=None, auth=None): + headers = { + "Content-type": "application/json", + "Accept": "*/*", + "User-Agent": "Apache Kibble", + } + if auth: + xcreds = auth.encode(encoding="ascii", errors="replace") + bauth = ( + base64.encodebytes(xcreds) + .decode("ascii", errors="replace") + .replace("\n", "") + ) + headers["Authorization"] = "Basic %s" % bauth + if cookie: + headers["Cookie"] = cookie + rv = requests.post(url, headers=headers, json=data) + js = rv.json() + return js diff --git a/kibble/scanners/utils/kpe.py b/kibble/scanners/utils/kpe.py new file mode 100644 index 00000000..5ce3886e --- /dev/null +++ b/kibble/scanners/utils/kpe.py @@ -0,0 +1,167 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is an experimental key phrase extraction plugin for using +Azure/picoAPI for analyzing the key elements of an email on a list. This +requires an account with a text analysis service provider, and a +corresponding API section in kibble.ini, as such: + +# picoAPI example: +[picoapi] +key = abcdef1234567890 + +# Azure example: +[azure] +apikey = abcdef1234567890 +location = westeurope + +Currently only pony mail is supported. more to come. +""" + +import json +import re + +import requests + +from kibble.configuration import conf + + +def trim_body(body): + """ Quick function for trimming away the fat from emails """ + # Cut away "On $date, jane doe wrote: " kind of texts + body = re.sub( + r"(((?:\r?\n|^)((on .+ wrote:[\r\n]+)|(sent from my .+)|(>+[ \t]*[^\r\n]*\r?\n[^\n]*\n*)+)+)+)", + "", + body, + flags=re.I | re.M, + ) + + # Crop out quotes + lines = body.split("\n") + body = "\n".join([x for x in lines if not x.startswith(">")]) + + # Remove hyperlinks + body = re.sub(r"[a-z]+://\S+", "", body) + + # Remove email addresses + body = re.sub(r"(<[^>]+>\s*\S+@\S+)", "", body) + body = re.sub(r"(\S+@\S+)", "", body) + return body + + +def azure_kpe(kibble_bit, bodies): + """ KPE using Azure Text Analysis API """ + headers = { + "Content-Type": "application/json", + "Ocp-Apim-Subscription-Key": conf.get("azure", "apikey"), + } + + js = {"documents": []} + + # For each body... + a = 0 + KPEs = [] + for body in bodies: + # Crop out quotes + body = trim_body(body) + doc = {"language": "en", "id": str(a), "text": body} + js["documents"].append(doc) + KPEs.append({}) # placeholder for each doc, to be replaced + a += 1 + try: + rv = requests.post( + "https://%s.api.cognitive.microsoft.com/text/analytics/v2.0/keyPhrases" + % conf.get("azure", "location"), + headers=headers, + data=json.dumps(js), + ) + jsout = rv.json() + except: # pylint: disable=bare-except + jsout = {} # borked sentiment analysis? + + if "documents" in jsout and len(jsout["documents"]) > 0: + for doc in jsout["documents"]: + KPEs[int(doc["id"])] = doc["keyPhrases"][ + :5 + ] # Replace KPEs[X] with the actual phrases, 5 first ones. + + else: + kibble_bit.pprint("Failed to analyze email body.") + print(jsout) + # Depending on price tier, Azure will return a 429 if you go too fast. + # If we see a statusCode return, let's just stop for now. + # Later scans can pick up the slack. + if "statusCode" in jsout: + kibble_bit.pprint("Possible rate limiting in place, stopping for now.") + return False + return KPEs + + +def pico_kpe(kibble_bit, bodies): + """ KPE using picoAPI Text Analysis """ + headers = { + "Content-Type": "application/json", + "PicoAPI-Key": conf.get("picoapi", "key"), + } + + js = {"texts": []} + + # For each body... + a = 0 + KPEs = [] + for body in bodies: + body = trim_body(body) + + doc = {"id": str(a), "body": body} + js["texts"].append(doc) + KPEs.append({}) # placeholder for each doc, to be replaced + a += 1 + try: + rv = requests.post( + "https://v1.picoapi.com/api/text/keyphrase", + headers=headers, + data=json.dumps(js), + ) + jsout = rv.json() + except: # pylint: disable=bare-except + jsout = {} # borked sentiment analysis? + + if "results" in jsout and len(jsout["results"]) > 0: + for doc in jsout["results"]: + phrases = [] + # This is a bit different than Azure, in that it has a weighting score + # So we need to just extract key phrases above a certain level. + # Grab up o 5 key phrases per text + MINIMUM_WEIGHT = 0.02 + for element in doc["keyphrases"]: + if element["score"] > MINIMUM_WEIGHT: + phrases.append(element["phrase"]) + if len(phrases) == 5: + break + KPEs[int(doc["id"])] = phrases # Replace KPEs[X] with the actual phrases + + else: + kibble_bit.pprint("Failed to analyze email body.") + print(jsout) + # 403 returned on invalid key, 429 on rate exceeded. + # If we see a code return, let's just stop for now. + # Later scans can pick up the slack. + if "code" in jsout: + kibble_bit.pprint("Possible rate limiting in place, stopping for now.") + return False + return KPEs diff --git a/kibble/scanners/utils/sloc.py b/kibble/scanners/utils/sloc.py new file mode 100644 index 00000000..850ab13e --- /dev/null +++ b/kibble/scanners/utils/sloc.py @@ -0,0 +1,78 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" This is the SLoC counter utility for Kibble """ + +import multiprocessing +import re +import subprocess + + +def count(path): + """ Count lines of Code """ + # We determine how many cores there are, and adjust the + # process count based on that. Max 4 procs. + my_core_count = min((4, int(multiprocessing.cpu_count()))) + inp = subprocess.check_output( + "cloc --quiet --progress-rate=0 --processes=%u %s" % (my_core_count, path), + shell=True, + ).decode("ascii", "replace") + m = re.search( + r".*Language\s+files\s+blank\s+comment\s+code[\s\S]+?-+([\s\S]+?)-+[\s\S]+?SUM:\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)", + inp, + flags=re.MULTILINE | re.UNICODE, + ) + languages = {} + ccount = 0 + years = 0 + cost = 0 + codecount = "" + comment = "" + blank = "" + if m: + lingos = m.group(1) + fcount = m.group(2) + blank = m.group(3) + comment = m.group(4) + codecount = m.group(5) + for lm in re.finditer( + r"([A-Za-z +-/0-9]+)\s+\d+\s+(\d+)\s+(\d+)\s+(\d+)", lingos + ): + lang = lm.group(1).replace(" Header", "").lower() + lang = re.sub(r"\s\s+", "", lang) + lang = re.sub(r"^[Cc]\\?/", "", lang) + lang = lang.replace(".", "_") + if len(lang) > 0: + C = 0 + D = 0 + E = 0 + if lang in languages: + C = languages[lang]["code"] + D = languages[lang]["comment"] + E = languages[lang]["blank"] + languages[lang] = { + "code": int(lm.group(4)) + C, + "comment": int(lm.group(3)) + D, + "blank": int(lm.group(2)) + E, + } + ccount = int(codecount.replace(",", "")) + int(comment.replace(",", "")) + codecount = int(codecount.replace(",,", "")) + blank = int(blank.replace(",,", "")) + comment = int(comment.replace(",,", "")) + years = ccount / 3300.0 + cost = years * 72000 + return [languages, codecount, comment, blank, years, cost] diff --git a/kibble/scanners/utils/tone.py b/kibble/scanners/utils/tone.py new file mode 100644 index 00000000..6df8fb17 --- /dev/null +++ b/kibble/scanners/utils/tone.py @@ -0,0 +1,190 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is an experimental tone analyzer plugin for using Watson/BlueMix for +analyzing the mood of email on a list. This requires a Watson account +and a watson section in kibble.ini, as such: + +[watson] +username = $user +password = $pass +api = https://$something.watsonplatform.net/tone-analyzer/api + +Currently only pony mail is supported. more to come. +""" + +import json + +import requests + +from kibble.configuration import conf + + +def watson_tone(kibble_bit, bodies): + """ Sentiment analysis using IBM Watson """ + headers = {"Content-Type": "application/json"} + + # Crop out quotes + for body in bodies: + lines = body.split("\n") + body = "\n".join([x for x in lines if not x.startswith(">")]) + + js = {"text": body} + try: + rv = requests.post( + "%s/v3/tone?version=2017-09-21&sentences=false" + % conf.get("watson", "api"), + headers=headers, + data=json.dumps(js), + auth=( + conf.get("watson", "username"), + conf.get("watson", "password"), + ), + ) + jsout = rv.json() + except: # pylint: disable=bare-except + jsout = {} # borked Watson? + mood = {} + if "document_tone" in jsout: + for tone in jsout["document_tone"]["tones"]: + mood[tone["tone_id"]] = tone["score"] + else: + kibble_bit.pprint("Failed to analyze email body.") + yield mood + + +def azure_tone(kibble_bit, bodies): + """ Sentiment analysis using Azure Text Analysis API """ + headers = { + "Content-Type": "application/json", + "Ocp-Apim-Subscription-Key": conf.get("azure", "apikey"), + } + + js = {"documents": []} + + # For each body... + a = 0 + moods = [] + for body in bodies: + # Crop out quotes + lines = body.split("\n") + body = "\n".join([x for x in lines if not x.startswith(">")]) + doc = {"language": "en", "id": str(a), "text": body} + js["documents"].append(doc) + moods.append({}) # placeholder for each doc, to be replaced + a += 1 + try: + rv = requests.post( + "https://%s.api.cognitive.microsoft.com/text/analytics/v2.0/sentiment" + % conf.get("azure", "location"), + headers=headers, + data=json.dumps(js), + ) + jsout = rv.json() + except: # pylint: disable=bare-except + jsout = {} # borked sentiment analysis? + + if "documents" in jsout and len(jsout["documents"]) > 0: + for doc in jsout["documents"]: + mood = {} + # This is more parred than Watson, so we'll split it into three groups: positive, neutral and negative. + # Divide into four segments, 0->40%, 25->75% and 60->100%. + # 0-40 promotes negative, 60-100 promotes positive, and 25-75% promotes neutral. + # As we don't want to over-represent negative/positive where the results are + # muddy, the neutral zone is larger than the positive/negative zones by 10%. + val = doc["score"] + mood["negative"] = max( + 0, ((0.4 - val) * 2.5) + ) # For 40% and below, use 2½ distance + mood["positive"] = max( + 0, ((val - 0.6) * 2.5) + ) # For 60% and above, use 2½ distance + mood["neutral"] = max( + 0, 1 - (abs(val - 0.5) * 2) + ) # Between 25% and 75% use double the distance to middle. + moods[int(doc["id"])] = mood # Replace moods[X] with the actual mood + + else: + kibble_bit.pprint("Failed to analyze email body.") + print(jsout) + # Depending on price tier, Azure will return a 429 if you go too fast. + # If we see a statusCode return, let's just stop for now. + # Later scans can pick up the slack. + if "statusCode" in jsout: + kibble_bit.pprint("Possible rate limiting in place, stopping for now.") + return False + return moods + + +def pico_tone(kibble_bit, bodies): + """ Sentiment analysis using picoAPI Text Analysis """ + headers = { + "Content-Type": "application/json", + "PicoAPI-Key": conf.get("picoapi", "key"), + } + + js = {"texts": []} + + # For each body... + a = 0 + moods = [] + for body in bodies: + # Crop out quotes + lines = body.split("\n") + body = "\n".join([x for x in lines if not x.startswith(">")]) + doc = {"id": str(a), "body": body} + js["texts"].append(doc) + moods.append({}) # placeholder for each doc, to be replaced + a += 1 + try: + rv = requests.post( + "https://v1.picoapi.com/api/text/sentiment", + headers=headers, + data=json.dumps(js), + ) + jsout = rv.json() + except: # pylint: disable=bare-except + jsout = {} # borked sentiment analysis? + + if "results" in jsout and len(jsout["results"]) > 0: + for doc in jsout["results"]: + mood = { + "negative": doc["negativity"], + "positive": doc["positivity"], + "neutral": doc["neutrality"], + } + + # Sentiment is the overall score, and we use that for the neutrality of a text + + # Additional (optional) emotion weighting + if "emotions" in doc: + for k, v in doc["emotions"].items(): + mood[k] = v / 100 # Value is between 0 and 100. + + moods[int(doc["id"])] = mood # Replace moods[X] with the actual mood + + else: + kibble_bit.pprint("Failed to analyze email body.") + print(jsout) + # 403 returned on invalid key, 429 on rate exceeded. + # If we see a code return, let's just stop for now. + # Later scans can pick up the slack. + if "code" in jsout: + kibble_bit.pprint("Possible rate limiting in place, stopping for now.") + return False + return moods diff --git a/kibble/scanners/utils/urlmisc.py b/kibble/scanners/utils/urlmisc.py new file mode 100644 index 00000000..82575448 --- /dev/null +++ b/kibble/scanners/utils/urlmisc.py @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +This is a Kibble miscellaneous URL functions plugin. +""" +import base64 +import gzip +import io +import subprocess +import tempfile +import urllib.request + + +def unzip(url, creds=None, cookie=None): + """Attempts to download an unzip an archive. Returns the temporary file path of the unzipped contents""" + headers = {} + if creds: + auth = str(base64.encodebytes(bytes(creds)).replace("\n", "")) + headers = { + "Content-type": "application/json", + "Accept": "*/*", + "Authorization": "Basic %s" % auth, + } + if cookie: + headers = { + "Content-type": "application/json", + "Accept": "*/*", + "Cookie": cookie, + } + request = urllib.request.Request(url, headers=headers) + # Try fetching via python, fall back to wget (redhat == broken!) + decompressedFile = None + try: + result = urllib.request.urlopen(request) + compressedFile = io.BytesIO() + compressedFile.write(result.read()) + compressedFile.seek(0) + decompressedFile = gzip.GzipFile(fileobj=compressedFile, mode="rb") + except urllib.error.HTTPError as err: + # We're not interested in 404s, only transport errors + if err.code != 404 and err.code != 401: + tmpfile = tempfile.NamedTemporaryFile(mode="w+b", buffering=1, delete=False) + subprocess.check_call(("/usr/bin/wget", "-O", tmpfile.name, url)) + + try: + compressedFile = open("/tmp/kibbletmp.gz", "rb") + if compressedFile.read(2) == "\x1f\x8b": + compressedFile.seek(0) + decompressedFile = gzip.GzipFile(fileobj=compressedFile, mode="rb") + else: + compressedFile.close() + return tmpfile.name + except: # pylint: disable=bare-except + # Probably not a gzipped file! + decompressedFile = open(tmpfile.name, "rb") + if decompressedFile: + tmpfile = tempfile.NamedTemporaryFile(mode="w+b", buffering=1, delete=False) + tmpfile.write(decompressedFile.read()) + tmpfile.flush() + tmpfile.close() + return tmpfile.name + return None diff --git a/kibble/settings.py b/kibble/settings.py new file mode 100644 index 00000000..b9c40039 --- /dev/null +++ b/kibble/settings.py @@ -0,0 +1,32 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os + +from kibble.configuration import conf + +YAML_DIRECTORY = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "api", "yaml" +) +KIBBLE_YAML = os.path.join(YAML_DIRECTORY, "kibble.yaml") +MAPPING_DIRECTORY = os.path.join( + os.path.dirname(os.path.realpath(__file__)), "mappings" +) + +WATSON_ENABLED = bool(conf.get("watson", "username", fallback=None)) +AZURE_ENABLED = bool(conf.get("azure", "apikey", fallback=None)) +PICOAPI_ENABLED = bool(conf.get("picoapi", "key", fallback=None)) diff --git a/kibble/version.py b/kibble/version.py new file mode 100644 index 00000000..a913b5ac --- /dev/null +++ b/kibble/version.py @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +version = "1.0.0dev" diff --git a/license-templates/LICENSE.rst b/license-templates/LICENSE.rst new file mode 100644 index 00000000..adf897d1 --- /dev/null +++ b/license-templates/LICENSE.rst @@ -0,0 +1,16 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. diff --git a/license-templates/LICENSE.txt b/license-templates/LICENSE.txt new file mode 100644 index 00000000..60b675e3 --- /dev/null +++ b/license-templates/LICENSE.txt @@ -0,0 +1,16 @@ +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. diff --git a/nginx-dev.conf b/nginx-dev.conf new file mode 100644 index 00000000..22270d31 --- /dev/null +++ b/nginx-dev.conf @@ -0,0 +1,34 @@ +events {} +http { + server { + listen 8000; + + server_name kibble; + + access_log /var/log/nginx/kibble_access.log; + error_log /var/log/nginx/kibble_error.log; + + proxy_set_header Host $http_host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header X-Forwarded-Port $server_port; + proxy_set_header X-Forwarded-Proto $scheme; + + root /kibble/ui; + index index.html; + + location / { + try_files $uri $uri/ =404; + } + + location ~ /css { + add_header Content-Type text/css; + } + + # Reverse proxy to Apache Kibble API + location /api { + proxy_pass http://kibble:8001; + rewrite ^/api(.*)/$ $1 break; + } + } +} diff --git a/pylintrc b/pylintrc new file mode 100644 index 00000000..a87d1eb1 --- /dev/null +++ b/pylintrc @@ -0,0 +1,612 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Specify a score threshold to be exceeded before program exits with error. +fail-under=10 + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=invalid-name, + print-statement, + parameter-unpacking, + unpacking-in-except, + old-raise-syntax, + backtick, + long-suffix, + old-ne-operator, + old-octal-literal, + import-star-module-level, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + apply-builtin, + basestring-builtin, + buffer-builtin, + cmp-builtin, + coerce-builtin, + execfile-builtin, + file-builtin, + long-builtin, + raw_input-builtin, + reduce-builtin, + standarderror-builtin, + unicode-builtin, + xrange-builtin, + coerce-method, + delslice-method, + getslice-method, + setslice-method, + no-absolute-import, + old-division, + dict-iter-method, + dict-view-method, + next-method-called, + metaclass-assignment, + indexing-exception, + raising-string, + reload-builtin, + oct-method, + hex-method, + nonzero-method, + cmp-method, + input-builtin, + round-builtin, + intern-builtin, + unichr-builtin, + map-builtin-not-iterating, + zip-builtin-not-iterating, + range-builtin-not-iterating, + filter-builtin-not-iterating, + using-cmp-argument, + eq-without-hash, + div-method, + idiv-method, + rdiv-method, + exception-message-attribute, + invalid-str-codec, + sys-max-int, + bad-python3-import, + deprecated-string-function, + deprecated-str-translate-call, + deprecated-itertools-function, + deprecated-types-field, + next-method-defined, + dict-items-not-iterating, + dict-keys-not-iterating, + dict-values-not-iterating, + deprecated-operator-function, + deprecated-urllib-function, + xreadlines-attribute, + deprecated-sys-function, + exception-escape, + comprehension-escape, + bad-continuation, + # TODO remove in future maybe + missing-class-docstring, + duplicate-code, + missing-function-docstring, + too-many-locals, + too-many-statements, + too-many-branches, + broad-except, + line-too-long, + redefined-builtin, + too-many-nested-blocks, + missing-module-docstring, + too-many-arguments, + unused-variable, + unused-argument, + too-few-public-methods, + inconsistent-return-statements, + fixme, + dangerous-default-value, + useless-object-inheritance, + redefined-outer-name + + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=colorized + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +#notes-rgx= + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=BaseException, + Exception diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..b3135bb3 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,36 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[metadata] +name = Kibble +summary = Apache Kibble is a tool to collect, aggregate and visualize data about any software project that uses commonly known tools. +description-file = README.md +author = Apache Kibble +author-email = dev@kibble.apache.org +license = Apache License, Version 2.0 +license_files = + LICENSE + NOTICE + +[bdist_wheel] +python-tag=py3 + + +[files] +packages = kibble + +[easy_install] diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..53b7975b --- /dev/null +++ b/setup.py @@ -0,0 +1,109 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +from importlib import util + +from setuptools import find_packages, setup + +# Kept manually in sync with kibble.version +spec = util.spec_from_file_location( + "kibble.version", os.path.join("kibble", "version.py") +) # noqa +mod = util.module_from_spec(spec) +spec.loader.exec_module(mod) # type: ignore +version = mod.version # type: ignore + +DEVEL_REQUIREMENTS = [ + "black==20.8b1", + "pre-commit==2.7.1", + "pylint==2.6.0", + "pytest==6.1.1", +] + +INSTALL_REQUIREMENTS = [ + "bcrypt==3.2.0", + "certifi==2020.6.20", + "click==7.1.2", + "elasticsearch==7.9.1", + "gunicorn==20.0.4", + "psutil==5.7.3", + "python-dateutil==2.8.1", + "python-twitter==3.5", + "PyYAML==5.3.1", + "requests==2.25.0", + "tenacity==6.2.0", +] + +EXTRAS_REQUIREMENTS = {"devel": DEVEL_REQUIREMENTS} + + +def get_long_description(): + description = "" + try: + with open( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "README.md"), + encoding="utf-8", + ) as f: + description = f.read() + except FileNotFoundError: + pass + return description + + +def do_setup(): + """Perform the Kibble package setup.""" + setup( + name="apache-kibble", + description="Apache Kibble is a tool to collect, aggregate and visualize data about any software project.", + long_description=get_long_description(), + long_description_content_type="text/markdown", + license="Apache License 2.0", + version=version, + packages=find_packages(include=["kibble*"]), + package_data={"kibble": ["py.typed"], "kibble.api.yaml": ["*.yaml"]}, + include_package_data=True, + zip_safe=False, + entry_points={"console_scripts": ["kibble = kibble.__main__:main"]}, + install_requires=INSTALL_REQUIREMENTS, + setup_requires=["docutils", "gitpython", "setuptools", "wheel"], + extras_require=EXTRAS_REQUIREMENTS, + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3.8", + ], + author="Apache Software Foundation", + author_email="dev@kibble.apache.org", + url="https://kibble.apache.org/", + download_url=f"https://archive.apache.org/dist/kibble/{version}", + test_suite="setup.kibble_test_suite", + python_requires="~=3.8", + project_urls={ + "Documentation": "https://kibble.apache.org/docs/", + "Bug Tracker": "https://github.com/apache/kibble/issues", + "Source Code": "https://github.com/apache/kibble", + }, + ) + + +if __name__ == "__main__": + do_setup() diff --git a/setup/kibble.yaml.sample b/setup/kibble.yaml.sample deleted file mode 100644 index c523c9e3..00000000 --- a/setup/kibble.yaml.sample +++ /dev/null @@ -1,20 +0,0 @@ -elasticsearch: - host: localhost - port: 9200 - ssl: false - dbname: kibble - -mail: - mailhost: localhost - mailport: 25 - sender: Kibble - -accounts: - allowSignup: true - verifyEmail: false - # Example auto-invite setup: - autoInvite: - - - domain: apache.org - organisation: apache - diff --git a/setup/makeaccount.py b/setup/makeaccount.py deleted file mode 100644 index 67b41516..00000000 --- a/setup/makeaccount.py +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys, os, os.path -import elasticsearch -import argparse -import yaml -import bcrypt - -class KibbleDatabase(object): - def __init__(self, config): - self.config = config - self.dbname = config['elasticsearch']['dbname'] - self.ES = elasticsearch.Elasticsearch([{ - 'host': config['elasticsearch']['host'], - 'port': int(config['elasticsearch']['port']), - 'use_ssl': config['elasticsearch']['ssl'], - 'verify_certs': False, - 'url_prefix': config['elasticsearch']['uri'] if 'uri' in config['elasticsearch'] else '', - 'http_auth': config['elasticsearch']['auth'] if 'auth' in config['elasticsearch'] else None - }], - max_retries=5, - retry_on_timeout=True - ) - - -arg_parser = argparse.ArgumentParser() -arg_parser.add_argument("-u", "--username", required=True, help="Username (email) of accoun to create") -arg_parser.add_argument("-p", "--password", required=True, help="Password to set for account") -arg_parser.add_argument("-n", "--name", help="Real name (displayname) of account (optional)") -arg_parser.add_argument("-A", "--admin", action="store_true", help="Make account global admin") -arg_parser.add_argument("-a", "--orgadmin", action="store_true", help="Make account owner of orgs invited to") -arg_parser.add_argument("-o", "--org", help="Invite to this organisation") - -args = arg_parser.parse_args() - -# Load Kibble master configuration -config = yaml.load(open("../api/yaml/kibble.yaml")) - -DB = KibbleDatabase(config) - -username = args.username -password = args.password -name = args.name if args.name else args.username -admin = True if args.admin else False -adminorg = True if args.orgadmin else False -orgs = [args.org] if args.org else [] -aorgs = orgs if adminorg else [] - -salt = bcrypt.gensalt() -pwd = bcrypt.hashpw(password.encode('utf-8'), salt).decode('ascii') -doc = { - 'email': username, # Username (email) - 'password': pwd, # Hashed password - 'displayName': username, # Display Name - 'organisations': orgs, # Orgs user belongs to (default is none) - 'ownerships': aorgs, # Orgs user owns (default is none) - 'defaultOrganisation': None, # Default org for user - 'verified': True, # Account verified via email? - 'userlevel': "admin" if admin else "user" # User level (user/admin) - } -DB.ES.index(index=DB.dbname, doc_type='useraccount', id = username, body = doc) -print("Account created!") - diff --git a/setup/requirements.txt b/setup/requirements.txt deleted file mode 100644 index 24b3bca6..00000000 --- a/setup/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -certifi -pyyaml -bcrypt -elasticsearch -pre-commit -python-dateutil diff --git a/setup/setup.py b/setup/setup.py deleted file mode 100644 index f06bfd2f..00000000 --- a/setup/setup.py +++ /dev/null @@ -1,318 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -KIBBLE_VERSION = '0.1.0' # ABI/API compat demarcation. -KIBBLE_DB_VERSION = 2 # Second database revision - -import sys - -if sys.version_info <= (3, 3): - print("This script requires Python 3.4 or higher") - sys.exit(-1) - -import os -import getpass -import subprocess -import argparse -import shutil -import yaml -import bcrypt -import json - -mappings = json.load(open("mappings.json")) -myyaml = yaml.load(open("kibble.yaml.sample")) - -dopip = False -try: - from elasticsearch import Elasticsearch - from elasticsearch import VERSION as ES_VERSION - ES_MAJOR = ES_VERSION[0] -except: - dopip = True - -if dopip and (getpass.getuser() != "root"): - print("It looks like you need to install some python modules first") - print("Either run this as root to do so, or run: ") - print("pip3 install elasticsearch certifi bcrypt") - sys.exit(-1) - -elif dopip: - print("Before we get started, we need to install some modules") - print("Hang on!") - try: - subprocess.check_call(('pip3','install','elasticsearch', 'certifi', 'bcrypt')) - from elasticsearch import Elasticsearch - except: - print("Oh dear, looks like this failed :(") - print("Please install elasticsearch and certifi before you try again:") - print("pip install elasticsearch certifi") - sys.exit(-1) - - -# Arguments for non-interactive setups like docker -arg_parser = argparse.ArgumentParser() -arg_parser.add_argument("-e", "--hostname", help="Pre-defined hostname for ElasticSearch (docker setups)") -arg_parser.add_argument("-p", "--port", help="Pre-defined port for ES (docker setups)") -arg_parser.add_argument("-d", "--dbname", help="Pre-defined Database prefix (docker setups)") -arg_parser.add_argument("-s", "--shards", help="Predefined number of ES shards (docker setups)") -arg_parser.add_argument("-r", "--replicas", help="Predefined number of replicas for ES (docker setups)") -arg_parser.add_argument("-m", "--mailhost", help="Pre-defined mail server host (docker setups)") -arg_parser.add_argument("-a", "--autoadmin", action='store_true', help="Generate generic admin account (docker setups)") -arg_parser.add_argument("-k", "--skiponexist", action='store_true', help="Skip DB creation if DBs exist (docker setups)") -args = arg_parser.parse_args() - -print("Welcome to the Apache Kibble setup script!") -print("Let's start by determining some settings...") -print("") - - -hostname = args.hostname or "" -port = int(args.port) if args.port else 0 -dbname = args.dbname or "" -mlserver = args.mailhost or "" -mldom = "" -wc = "" -genname = "" -wce = False -shards = int(args.shards) if args.shards else 0 -replicas = int(args.replicas) if args.replicas else -1 - -while hostname == "": - hostname = input("What is the hostname of the ElasticSearch server? [localhost]: ") - if hostname == "": - print("Using default; localhost") - hostname = "localhost" -while port < 1: - try: - port = input("What port is ElasticSearch listening on? [9200]: ") - if port == "": - print("Using default; 9200") - port = 9200 - port = int(port) - except ValueError: - pass - -while dbname == "": - dbname = input("What would you like to call the DB index [kibble]: ") - if dbname == "": - print("Using default; kibble") - dbname = "kibble" - -while mlserver == "": - mlserver = input("What is the hostname of the outgoing mailserver? [localhost:25]: ") - if mlserver == "": - print("Using default; localhost:25") - mlserver = "localhost:25" - -while shards < 1: - try: - shards = input("How many shards for the ElasticSearch index? [5]:") - if shards == "": - print("Using default; 5") - shards = 5 - shards = int(shards) - except ValueError: - pass - -while replicas < 0: - try: - replicas = input("How many replicas for each shard? [1]: ") - if replicas == "": - print("Using default; 1") - replicas = 1 - replicas = int(replicas) - except ValueError: - pass - -adminName = "" -adminPass = "" -if args.autoadmin: - adminName = "admin@kibble" - adminPass = "kibbleAdmin" -while adminName == "": - adminName = input("Enter an email address for the adminstrator account: ") -while adminPass == "": - adminPass = input("Enter a password for the adminstrator account: ") - -print("Okay, I got all I need, setting up Kibble...") - -def createIndex(): - global mappings - es = Elasticsearch([ - { - 'host': hostname, - 'port': port, - 'use_ssl': False, - 'url_prefix': '' - }], - max_retries=5, - retry_on_timeout=True - ) - - es6 = True if int(es.info()['version']['number'].split('.')[0]) >= 6 else False - es7 = True if int(es.info()['version']['number'].split('.')[0]) >= 7 else False - if not es6: - print("New Kibble installations require ElasticSearch 6.x or newer! You appear to be running %s!" % es.info()['version']['number']) - sys.exit(-1) - # If ES >= 7, _doc is invalid and mapping should be rooted - if es7: - mappings['mappings'] = mappings['mappings']['_doc'] - # Check if index already exists - if es.indices.exists(dbname+"_api"): - if args.skiponexist: # Skip this is DB exists and -k added - print("DB prefix exists, but --skiponexist used, skipping this step.") - return - print("Error: ElasticSearch DB prefix '%s' already exists!" % dbname) - sys.exit(-1) - - types = [ - 'api', - # ci_*: CI service stats - 'ci_build', - 'ci_queue', - # code_* + evolution + file_history: git repo stats - 'code_commit', - 'code_commit_unique', - 'code_modification', - 'evolution', - 'file_history', - # forum_*: forum stats (SO, Discourse, Askbot etc) - 'forum_post', - 'forum_topic', - # GitHub stats - 'ghstats', - # im_*: Instant messaging stats - 'im_stats', - 'im_ops', - 'im_msg', - 'issue', - 'logstats', - # email, mail*: Email statitics - 'email', - 'mailstats', - 'mailtop', - # organisation, view, source, publish: UI Org DB - 'organisation', - 'view', - 'publish', - 'source', - # stats: Miscellaneous stats - 'stats', - # social_*: Twitter, Mastodon, Facebook etc - 'social_follow', - 'social_followers', - 'social_follower', - 'social_person', - # uisession, useraccount, message: UI user DB - 'uisession', - 'useraccount', - 'message', - # person: contributor DB - 'person', - ] - - for t in types: - iname = "%s_%s" % (dbname, t) - print("Creating index " + iname) - - settings = { - "number_of_shards" : shards, - "number_of_replicas" : replicas - } - - - res = es.indices.create(index = iname, body = { - "mappings" : mappings['mappings'], - "settings": settings - } - ) - - print("Indices created! %s " % res) - - salt = bcrypt.gensalt() - pwd = bcrypt.hashpw(adminPass.encode('utf-8'), salt).decode('ascii') - print("Creating administrator account") - doc = { - 'email': adminName, # Username (email) - 'password': pwd, # Hashed password - 'displayName': "Administrator", # Display Name - 'organisations': [], # Orgs user belongs to (default is none) - 'ownerships': [], # Orgs user owns (default is none) - 'defaultOrganisation': None, # Default org for user - 'verified': True, # Account verified via email? - 'userlevel': "admin" # User level (user/admin) - } - dbdoc = { - 'apiversion': KIBBLE_VERSION, # Log current API version - 'dbversion': KIBBLE_DB_VERSION # Log the database revision we accept (might change!) - } - es.index(index=dbname+'_useraccount', doc_type = '_doc', id = adminName, body = doc) - es.index(index=dbname+'_api', doc_type = '_doc', id = 'current', body = dbdoc) - print("Account created!") - -try: - import logging - # elasticsearch logs lots of warnings on retries/connection failure - logging.getLogger("elasticsearch").setLevel(logging.ERROR) - createIndex() - - -except Exception as e: - print("Index creation failed: %s" % e) - sys.exit(1) - -kibble_yaml = '../api/yaml/kibble.yaml' - -if os.path.exists(kibble_yaml): - print("%s already exists! Writing to %s.tmp instead" % (kibble_yaml, kibble_yaml)) - kibble_yaml = kibble_yaml + ".tmp" - - -print("Writing Kibble config (%s)" % kibble_yaml) - -m = mlserver.split(':') -if len(m) == 1: - m.append(25) - -myconfig = { - 'api': { - 'version': KIBBLE_VERSION, - 'database': KIBBLE_DB_VERSION - }, - 'elasticsearch': { - 'host': hostname, - 'port': port, - 'ssl': False, - 'dbname': dbname - }, - 'mail': { - 'mailhost': m[0], - 'mailport': m[1], - 'sender': 'Kibble ' - }, - 'accounts': { - 'allowSignup': True, - 'verify': True - } -} - -with open(kibble_yaml, "w") as f: - f.write(yaml.dump(myconfig, default_flow_style = False)) - f.close() - - -print("All done, Kibble should...work now :)") - diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..13a83393 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/test_configuration.py b/tests/test_configuration.py new file mode 100644 index 00000000..f68837cb --- /dev/null +++ b/tests/test_configuration.py @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pytest + +from kibble.configuration import conf + + +# pylint: disable=no-self-use +class TestDefaultConfig: + @pytest.mark.parametrize( + "section, key, value", + [ + ("accounts", "allowSignup", True), + ("accounts", "verify", True), + ("api", "database", 2), + ("api", "version", "0.1.0"), + ("elasticsearch", "conn_uri", "http://elasticsearch:9200"), + ("mail", "mailhost", "localhost:25"), + ], + ) + def test_default_values(self, section, key, value): + if isinstance(value, bool): + config_value = conf.getboolean(section, key) + elif isinstance(value, int): + config_value = conf.getint(section, key) + else: + config_value = conf.get(section, key) + + assert config_value == value diff --git a/ui/apidoc.html b/ui/apidoc.html index 218050f4..833b7cde 100644 --- a/ui/apidoc.html +++ b/ui/apidoc.html @@ -107,16 +107,16 @@

Overview:

    Input examples:

    application/json:
     {
    @@ -141,14 +141,14 @@ 

    Response examples:

    - +
    - +
    PATCH
    - + /api/account
    @@ -190,14 +190,14 @@

    Response examples:

    - +
    - +
    PUT
    - + /api/account
    @@ -235,23 +235,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/bio/bio
    Shows some facts about a contributor

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -268,14 +268,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/bio/bio
    @@ -334,23 +334,23 @@

    Response examples:

    - + - +
    - +
    POST
    - + /api/bio/trends
    @@ -433,23 +433,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/changes
    Show insertions/deletions as a timeseries

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -474,14 +474,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/changes
    @@ -548,23 +548,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/commits
    Show commits as a timeseries

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -589,14 +589,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/commits
    @@ -663,23 +663,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/committers
    Shows the top N of committers

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -696,14 +696,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/committers
    @@ -762,23 +762,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/evolution
    Show code evolution as a timeseries

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -803,14 +803,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/evolution
    @@ -877,23 +877,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/pony
    Shows pony factor data for a set of repos over a given period of time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -914,14 +914,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/pony
    @@ -984,23 +984,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/pony-timeseries
    Shows timeseries of Pony Factor over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1025,14 +1025,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/pony-timeseries
    @@ -1099,23 +1099,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/relationships
    Shows a breakdown of contributor relationships between repositories

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1132,14 +1132,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/relationships
    @@ -1198,23 +1198,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/retention
    Shows retention metrics for a set of repos over a given period of time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1235,14 +1235,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/retention
    @@ -1305,23 +1305,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/sloc
    Shows a breakdown of lines of code for one or more sources

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1338,14 +1338,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/sloc
    @@ -1404,23 +1404,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/top-commits
    Shows top 25 repos by commit volume

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1445,14 +1445,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/top-commits
    @@ -1519,23 +1519,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/code/top-sloc
    Shows top 25 repos by lines of code

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1560,14 +1560,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/code/top-sloc
    @@ -1634,23 +1634,23 @@

    Response examples:

    - + - +
    - +
    POST
    - + /api/code/trends
    @@ -1733,23 +1733,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/actors
    Shows timeseries of no. of people opening/closing issues over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1774,14 +1774,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/actors
    @@ -1848,23 +1848,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/age
    Shows timeseries of no. of open tickets by age

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1889,14 +1889,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/age
    @@ -1963,23 +1963,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/closers
    Shows the top N of issue closers

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -1996,14 +1996,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/closers
    @@ -2062,23 +2062,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/issues
    Shows timeseries of issues opened/closed over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2103,14 +2103,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/issues
    @@ -2177,23 +2177,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/openers
    Shows the top N of issue openers

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2210,14 +2210,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/openers
    @@ -2276,23 +2276,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/pony-timeseries
    Shows timeseries of Pony Factor over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2317,14 +2317,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/pony-timeseries
    @@ -2391,23 +2391,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/relationships
    Shows a breakdown of contributor relationships between issue trackers

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2424,14 +2424,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/relationships
    @@ -2490,23 +2490,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/retention
    Shows retention metrics for a set of issue trackers over a given period of time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2527,14 +2527,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/retention
    @@ -2597,23 +2597,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/top
    Shows the top N issues by interactions

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2634,14 +2634,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/top
    @@ -2704,23 +2704,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/issue/top-count
    Shows top 25 issue trackers by issues

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2745,14 +2745,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/issue/top-count
    @@ -2819,23 +2819,23 @@

    Response examples:

    - + - +
    - +
    POST
    - + /api/issue/trends
    @@ -2918,23 +2918,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/map
    Shows a breakdown of email author reply mappings

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -2951,14 +2951,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/map
    @@ -3017,23 +3017,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/pony-timeseries
    Shows timeseries of Pony Factor over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3058,14 +3058,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/pony-timeseries
    @@ -3132,23 +3132,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/relationships
    Shows a breakdown of contributor relationships between mailing lists

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3165,14 +3165,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/relationships
    @@ -3231,23 +3231,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/retention
    Shows retention metrics for a set of mailing lists over a given period of time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3268,14 +3268,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/retention
    @@ -3338,23 +3338,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/timeseries
    Shows email sent over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3379,14 +3379,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/timeseries
    @@ -3453,23 +3453,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/timeseries-single
    Shows email sent over time

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3494,14 +3494,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/timeseries-single
    @@ -3568,23 +3568,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/top-authors
    Shows the top N of email authors

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3601,14 +3601,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/top-authors
    @@ -3667,23 +3667,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/mail/top-topics
    Shows the top N of email authors

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3700,14 +3700,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/mail/top-topics
    @@ -3766,23 +3766,23 @@

    Response examples:

    - + - +
    - +
    POST
    - + /api/mail/trends
    @@ -3865,23 +3865,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/org/list
    Lists the organisations you belong to (or all, if admin)

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -3898,14 +3898,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/org/list
    @@ -3972,14 +3972,14 @@

    Response examples:

    - +
    - +
    PUT
    - + /api/org/list
    @@ -4017,14 +4017,14 @@

    Response examples:

    - +
    - +
    DELETE
    - + /api/org/members
    @@ -4066,23 +4066,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/org/members
    Lists the members of an organisation

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -4099,14 +4099,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/org/members
    @@ -4167,14 +4167,14 @@

    Response examples:

    - +
    - +
    PUT
    - + /api/org/members
    @@ -4216,23 +4216,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/org/sourcetypes
    Lists the available source types supported by Kibble

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -4249,14 +4249,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/org/sourcetypes
    @@ -4315,23 +4315,23 @@

    Response examples:

    - + - +
    - +
    POST
    - + /api/org/trends
    @@ -4414,14 +4414,14 @@

    Response examples:

    - +
    - +
    DELETE
    - + /api/session
    @@ -4454,23 +4454,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/session
    Display your login details

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -4496,14 +4496,14 @@

    Response examples:

    - +
    - +
    PUT
    - + /api/session
    @@ -4539,14 +4539,14 @@

    Response examples:

    - +
    - +
    DELETE
    - + /api/sources
    @@ -4567,23 +4567,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/sources
    Fetches a list of all sources for this organisation

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -4604,14 +4604,14 @@

    Response examples:

    - +
    - +
    PATCH
    - + /api/sources
    @@ -4649,14 +4649,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/sources
    @@ -4719,14 +4719,14 @@

    Response examples:

    - +
    - +
    PUT
    - + /api/sources
    @@ -4763,23 +4763,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/verify/{email}/{vcode}
    Verify an account

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -4798,21 +4798,21 @@

    Response examples:

    - +
    - +
    DELETE
    - + /api/views
    Delete a new view

    JSON parameters:

    - +

    Input examples:

    application/json:
     {}
    @@ -4835,23 +4835,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/views
    Fetches a list of all views (filters) for this user

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -4883,21 +4883,21 @@

    Response examples:

    - +
    - +
    PATCH
    - + /api/views
    Edit an existing source

    JSON parameters:

    - +

    Input examples:

    application/json:
     {}
    @@ -4920,14 +4920,14 @@

    Response examples:

    - +
    - +
    POST
    - + /api/views
    @@ -5001,21 +5001,21 @@

    Response examples:

    - +
    - +
    PUT
    - + /api/views
    Add a new view

    JSON parameters:

    - +

    Input examples:

    application/json:
     {}
    @@ -5038,23 +5038,23 @@

    Response examples:

    - +
    - +
    GET
    - + /api/widgets/{pageid}
    Shows the widget layout for a specific page

    JSON parameters:

    - +
    - +

    Response examples:

    @@ -5086,5 +5086,5 @@

    Response examples:

    - + diff --git a/ui/contributors.html b/ui/contributors.html index 086f1d75..b65acce9 100644 --- a/ui/contributors.html +++ b/ui/contributors.html @@ -78,8 +78,8 @@ - - + + @@ -88,4 +88,4 @@ - \ No newline at end of file + diff --git a/ui/css/c3.css b/ui/css/c3.css index be5a9475..f3b43ab0 100644 --- a/ui/css/c3.css +++ b/ui/css/c3.css @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + /*-- Chart --*/ .c3 svg { font: 10px sans-serif; @@ -15,7 +34,7 @@ -webkit-user-select: none; -moz-user-select: none; user-select: none; } - + .c3-legend-item text { font: 12px sans-serif !important; } @@ -113,7 +132,7 @@ z-index: 10; } .c3-tooltip { - + background-color: #fff; empty-cells: show; border-spacing: 0px; @@ -123,7 +142,7 @@ border-radius: 3px; border: 2px solid #333; opacity: 0.9; } - + .c3-tooltip tr { border: 1px solid #CCC; font-family: sans-serif; @@ -276,4 +295,4 @@ .linkedChart { min-height: 600px; -} \ No newline at end of file +} diff --git a/ui/css/chosen.css b/ui/css/chosen.css index d4219b49..9feaa026 100644 --- a/ui/css/chosen.css +++ b/ui/css/chosen.css @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + /*! Chosen, a Select Box Enhancer for jQuery and Prototype by Patrick Filler for Harvest, http://getharvest.com diff --git a/ui/css/daterangepicker.css b/ui/css/daterangepicker.css index dfce19cb..c06ed521 100644 --- a/ui/css/daterangepicker.css +++ b/ui/css/daterangepicker.css @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + .daterangepicker { position: absolute; color: inherit; @@ -229,4 +248,3 @@ float: left; } .daterangepicker .calendar.left { clear: none; } } - diff --git a/ui/css/kibble.min.css b/ui/css/kibble.min.css index 1f046beb..00b73b95 100644 --- a/ui/css/kibble.min.css +++ b/ui/css/kibble.min.css @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + /* * Kibble CSS */ @@ -3100,10 +3119,10 @@ body.error .logo h1 { border: 1.25px solid #3338; line-height: 28px; z-index: 1001; - font: 12px sans-serif; - background: lightgoldenrodyellow; - border-radius: 6px; - pointer-events: none; + font: 12px sans-serif; + background: lightgoldenrodyellow; + border-radius: 6px; + pointer-events: none; } .show-calendar { diff --git a/ui/css/main.css b/ui/css/main.css index eee1265e..05e32579 100644 --- a/ui/css/main.css +++ b/ui/css/main.css @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + /** * bootstrap-admin-template - Free Admin Template Based On Twitter Bootstrap 3.x * @version 2.4.2 diff --git a/ui/css/theme.css b/ui/css/theme.css index 7c5e55d7..f0e318cd 100644 --- a/ui/css/theme.css +++ b/ui/css/theme.css @@ -1,3 +1,22 @@ +/*! + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + /** * bootstrap-admin-template - Free Admin Template Based On Twitter Bootstrap 3.x * @version 2.4.2 diff --git a/ui/dashboard.html b/ui/dashboard.html index f93cedbe..0a16ee87 100644 --- a/ui/dashboard.html +++ b/ui/dashboard.html @@ -6,7 +6,7 @@ - + @@ -81,8 +81,8 @@ - - + + @@ -91,4 +91,4 @@ - \ No newline at end of file + diff --git a/ui/engagement.html b/ui/engagement.html index 2fec1b8f..47252a46 100644 --- a/ui/engagement.html +++ b/ui/engagement.html @@ -79,8 +79,8 @@ - - + + @@ -89,4 +89,4 @@ - \ No newline at end of file + diff --git a/ui/index.html b/ui/index.html index 44b17f02..01026f35 100644 --- a/ui/index.html +++ b/ui/index.html @@ -27,8 +27,8 @@ - - + + @@ -42,4 +42,4 @@ Apache Kibble, Kibble, Apache, the Apache feather logo, and the Apache Kibble project logo are either registered trademarks or trademarks of the Apache Software Foundation in the United States and other countries. - \ No newline at end of file + diff --git a/ui/js/app.js b/ui/js/app.js index 17c4c918..2cb1c5e1 100644 --- a/ui/js/app.js +++ b/ui/js/app.js @@ -1025,14 +1025,14 @@ // $('#editModal #fName').val(val1.html()); // $('#editModal #lName').val(val2.html()); // $('#editModal #uName').val(val3.html()); - // - // + // + // // $('#editModal #sbmtBtn').on('click', function() { // val1.html($('#editModal #fName').val()); // val2.html($('#editModal #lName').val()); // val3.html($('#editModal #uName').val()); // }); - // + // // }); /*----------- END action table CODE -------------------------*/ }; @@ -1082,4 +1082,4 @@ }); }; return Metis; -})(jQuery, Metis); \ No newline at end of file +})(jQuery, Metis); diff --git a/ui/js/c3.min.js b/ui/js/c3.min.js index 15506791..b98c158d 100644 --- a/ui/js/c3.min.js +++ b/ui/js/c3.min.js @@ -1 +1 @@ -!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.c3=e()}(this,function(){"use strict";function t(t,e){function i(t,e){t.attr("transform",function(t){return"translate("+Math.ceil(e(t)+w)+", 0)"})}function n(t,e){t.attr("transform",function(t){return"translate(0,"+Math.ceil(e(t))+")"})}function a(t){var e=t[0],i=t[t.length-1];return e0&&n[0]>0&&n.unshift(n[0]-(n[1]-n[0])),n}function s(){var t,i=_.copy();return e.isCategory&&(t=_.domain(),i.domain([t[0],t[1]-1])),i}function c(t){var e=g?g(t):t;return void 0!==e?e:""}function d(t){if(r)return r;var e={h:11.5,w:5.5};return t.select("text").text(c).each(function(t){var i=this.getBoundingClientRect(),n=c(t),a=i.height,r=n?i.width/n.length:void 0;a&&r&&(e.h=a,e.w=r)}).text(""),r=e,e}function l(i){return e.withoutTransition?i:t.transition(i)}function u(r){r.each(function(){function r(t,i){function n(t,e){r=void 0;for(var s=1;s0?1:-1):N}(j)).style("text-anchor",function(t){return t?t>0?"start":"end":"middle"}(j)).attr("transform",function(t){return t?"rotate("+t+")":""}(j)),H.attr("x",0).attr("dy",g).attr("dx",function(t){return t?8*Math.sin(Math.PI*(t/180)):0}(j)),R.attr("d","M"+I[0]+","+h+"V0H"+I[1]+"V"+h);break;case"top":p=i,D.attr("y2",-y),X.attr("y",-N),F.attr("x2",0).attr("y2",-y),k.attr("x",0).attr("y",-N),M.style("text-anchor","middle"),H.attr("x",0).attr("dy","0em"),R.attr("d","M"+I[0]+","+-h+"V0H"+I[1]+"V"+-h);break;case"left":p=n,D.attr("x2",-y),X.attr("x",-N),F.attr("x2",-y).attr("y1",b).attr("y2",b),k.attr("x",-N).attr("y",w),M.style("text-anchor","end"),H.attr("x",-N).attr("dy",g),R.attr("d","M"+-h+","+I[0]+"H0V"+I[1]+"H"+-h);break;case"right":p=n,D.attr("x2",y),X.attr("x",N),F.attr("x2",y).attr("y2",0),k.attr("x",N).attr("y",0),M.style("text-anchor","start"),H.attr("x",N).attr("dy",g),R.attr("d","M"+h+","+I[0]+"H0V"+I[1]+"H"+h)}if(P.rangeBand){var U=P,W=U.rangeBand()/2;T=P=function(t){return U(t)+W}}else T.rangeBand?T=P:G.call(p,P);V.call(p,T),E.call(p,P)})}var h,g,p,f,_=t.scale.linear(),x="bottom",y=6,m=3,S=null,w=0,v=!0;return e=e||{},h=e.withOuterTick?6:0,u.scale=function(t){return arguments.length?(_=t,u):_},u.orient=function(t){return arguments.length?(x=t in{top:1,right:1,bottom:1,left:1}?t+"":"bottom",u):x},u.tickFormat=function(t){return arguments.length?(g=t,u):g},u.tickCentered=function(t){return arguments.length?(f=t,u):f},u.tickOffset=function(){return w},u.tickInterval=function(){var t;return t=e.isCategory?2*w:(u.g.select("path.domain").node().getTotalLength()-2*h)/u.g.selectAll("line").size(),t===1/0?0:t},u.ticks=function(){return arguments.length?(p=arguments,u):p},u.tickCulling=function(t){return arguments.length?(v=t,u):v},u.tickValues=function(t){if("function"==typeof t)S=function(){return t(_.domain())};else{if(!arguments.length)return S;S=t}return u},u}function e(t){i.call(this,t)}function i(t){this.owner=t}function n(t){var e=this.internal=new a(this);e.loadConfig(t),e.beforeInit(t),e.init(),e.afterInit(t),function t(e,i,n){Object.keys(e).forEach(function(a){i[a]=e[a].bind(n),Object.keys(e[a]).length>0&&t(e[a],i[a],n)})}(b,this,this)}function a(t){var e=this;e.d3=window.d3?window.d3:"undefined"!=typeof require?require("d3"):void 0,e.api=t,e.config=e.getDefaultConfig(),e.data={},e.cache={},e.axes={}}var r,o={target:"c3-target",chart:"c3-chart",chartLine:"c3-chart-line",chartLines:"c3-chart-lines",chartBar:"c3-chart-bar",chartBars:"c3-chart-bars",chartText:"c3-chart-text",chartTexts:"c3-chart-texts",chartArc:"c3-chart-arc",chartArcs:"c3-chart-arcs",chartArcsTitle:"c3-chart-arcs-title",chartArcsBackground:"c3-chart-arcs-background",chartArcsGaugeUnit:"c3-chart-arcs-gauge-unit",chartArcsGaugeMax:"c3-chart-arcs-gauge-max",chartArcsGaugeMin:"c3-chart-arcs-gauge-min",selectedCircle:"c3-selected-circle",selectedCircles:"c3-selected-circles",eventRect:"c3-event-rect",eventRects:"c3-event-rects",eventRectsSingle:"c3-event-rects-single",eventRectsMultiple:"c3-event-rects-multiple",zoomRect:"c3-zoom-rect",brush:"c3-brush",focused:"c3-focused",defocused:"c3-defocused",region:"c3-region",regions:"c3-regions",title:"c3-title",tooltipContainer:"c3-tooltip-container",tooltip:"c3-tooltip",tooltipName:"c3-tooltip-name",shape:"c3-shape",shapes:"c3-shapes",line:"c3-line",lines:"c3-lines",bar:"c3-bar",bars:"c3-bars",circle:"c3-circle",circles:"c3-circles",arc:"c3-arc",arcs:"c3-arcs",area:"c3-area",areas:"c3-areas",empty:"c3-empty",text:"c3-text",texts:"c3-texts",gaugeValue:"c3-gauge-value",grid:"c3-grid",gridLines:"c3-grid-lines",xgrid:"c3-xgrid",xgrids:"c3-xgrids",xgridLine:"c3-xgrid-line",xgridLines:"c3-xgrid-lines",xgridFocus:"c3-xgrid-focus",ygrid:"c3-ygrid",ygrids:"c3-ygrids",ygridLine:"c3-ygrid-line",ygridLines:"c3-ygrid-lines",axis:"c3-axis",axisX:"c3-axis-x",axisXLabel:"c3-axis-x-label",axisY:"c3-axis-y",axisYLabel:"c3-axis-y-label",axisY2:"c3-axis-y2",axisY2Label:"c3-axis-y2-label",legendBackground:"c3-legend-background",legendItem:"c3-legend-item",legendItemEvent:"c3-legend-item-event",legendItemTile:"c3-legend-item-tile",legendItemHidden:"c3-legend-item-hidden",legendItemFocused:"c3-legend-item-focused",dragarea:"c3-dragarea",EXPANDED:"_expanded_",SELECTED:"_selected_",INCLUDED:"_included_"},s="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},c=function(t){return t||0===t},d=function(t){return"function"==typeof t},l=function(t){return Array.isArray(t)},u=function(t){return"string"==typeof t},h=function(t){return void 0===t},g=function(t){return void 0!==t},p=function(t){return 10*Math.ceil(t/10)},f=function(t){return Math.ceil(t)+.5},_=function(t){return t[1]-t[0]},x=function(t){return void 0===t||null===t||u(t)&&0===t.length||"object"===(void 0===t?"undefined":s(t))&&0===Object.keys(t).length},y=function(t){return!A.isEmpty(t)},m=function(t,e,i){return void 0!==t[e]?t[e]:i},S=function(t,e){var i=!1;return Object.keys(t).forEach(function(n){t[n]===e&&(i=!0)}),i},w=function(t){return"string"==typeof t?t.replace(//g,">"):t},v=function(t){var e=t.getBoundingClientRect(),i=[t.pathSegList.getItem(0),t.pathSegList.getItem(1)];return{x:i[0].x,y:Math.min(i[0].y,i[1].y),width:e.width,height:e.height}};!function(t,e){if(Object.create)e.prototype=Object.create(t.prototype);else{var i=function(){};i.prototype=t.prototype,e.prototype=new i}e.prototype.constructor=e}(i,e),e.prototype.init=function(){var t=this.owner,e=t.config,i=t.main;t.axes.x=i.append("g").attr("class",o.axis+" "+o.axisX).attr("clip-path",t.clipPathForXAxis).attr("transform",t.getTranslate("x")).style("visibility",e.axis_x_show?"visible":"hidden"),t.axes.x.append("text").attr("class",o.axisXLabel).attr("transform",e.axis_rotated?"rotate(-90)":"").style("text-anchor",this.textAnchorForXAxisLabel.bind(this)),t.axes.y=i.append("g").attr("class",o.axis+" "+o.axisY).attr("clip-path",e.axis_y_inner?"":t.clipPathForYAxis).attr("transform",t.getTranslate("y")).style("visibility",e.axis_y_show?"visible":"hidden"),t.axes.y.append("text").attr("class",o.axisYLabel).attr("transform",e.axis_rotated?"":"rotate(-90)").style("text-anchor",this.textAnchorForYAxisLabel.bind(this)),t.axes.y2=i.append("g").attr("class",o.axis+" "+o.axisY2).attr("transform",t.getTranslate("y2")).style("visibility",e.axis_y2_show?"visible":"hidden"),t.axes.y2.append("text").attr("class",o.axisY2Label).attr("transform",e.axis_rotated?"":"rotate(-90)").style("text-anchor",this.textAnchorForY2AxisLabel.bind(this))},e.prototype.getXAxis=function(e,i,n,a,r,o,s){var c=this.owner,d=c.config,l={isCategory:c.isCategorized(),withOuterTick:r,tickMultiline:d.axis_x_tick_multiline,tickWidth:d.axis_x_tick_width,tickTextRotate:s?0:d.axis_x_tick_rotate,withoutTransition:o},u=t(c.d3,l).scale(e).orient(i);return c.isTimeSeries()&&a&&"function"!=typeof a&&(a=a.map(function(t){return c.parseDate(t)})),u.tickFormat(n).tickValues(a),c.isCategorized()&&(u.tickCentered(d.axis_x_tick_centered),x(d.axis_x_tick_culling)&&(d.axis_x_tick_culling=!1)),u},e.prototype.updateXAxisTickValues=function(t,e){var i,n=this.owner,a=n.config;return(a.axis_x_tick_fit||a.axis_x_tick_count)&&(i=this.generateTickValues(n.mapTargetsToUniqueXs(t),a.axis_x_tick_count,n.isTimeSeries())),e?e.tickValues(i):(n.xAxis.tickValues(i),n.subXAxis.tickValues(i)),i},e.prototype.getYAxis=function(e,i,n,a,r,o,s){var c=this.owner,d=c.config,l={withOuterTick:r,withoutTransition:o,tickTextRotate:s?0:d.axis_y_tick_rotate},u=t(c.d3,l).scale(e).orient(i).tickFormat(n);return c.isTimeSeriesY()?u.ticks(c.d3.time[d.axis_y_tick_time_value],d.axis_y_tick_time_interval):u.tickValues(a),u},e.prototype.getId=function(t){var e=this.owner.config;return t in e.data_axes?e.data_axes[t]:"y"},e.prototype.getXAxisTickFormat=function(){var t=this.owner,e=t.config,i=t.isTimeSeries()?t.defaultAxisTimeFormat:t.isCategorized()?t.categoryName:function(t){return t<0?t.toFixed(0):t};return e.axis_x_tick_format&&(d(e.axis_x_tick_format)?i=e.axis_x_tick_format:t.isTimeSeries()&&(i=function(i){return i?t.axisTimeFormat(e.axis_x_tick_format)(i):""})),d(i)?function(e){return i.call(t,e)}:i},e.prototype.getTickValues=function(t,e){return t||(e?e.tickValues():void 0)},e.prototype.getXAxisTickValues=function(){return this.getTickValues(this.owner.config.axis_x_tick_values,this.owner.xAxis)},e.prototype.getYAxisTickValues=function(){return this.getTickValues(this.owner.config.axis_y_tick_values,this.owner.yAxis)},e.prototype.getY2AxisTickValues=function(){return this.getTickValues(this.owner.config.axis_y2_tick_values,this.owner.y2Axis)},e.prototype.getLabelOptionByAxisId=function(t){var e,i=this.owner.config;return"y"===t?e=i.axis_y_label:"y2"===t?e=i.axis_y2_label:"x"===t&&(e=i.axis_x_label),e},e.prototype.getLabelText=function(t){var e=this.getLabelOptionByAxisId(t);return u(e)?e:e?e.text:null},e.prototype.setLabelText=function(t,e){var i=this.owner.config,n=this.getLabelOptionByAxisId(t);u(n)?"y"===t?i.axis_y_label=e:"y2"===t?i.axis_y2_label=e:"x"===t&&(i.axis_x_label=e):n&&(n.text=e)},e.prototype.getLabelPosition=function(t,e){var i=this.getLabelOptionByAxisId(t),n=i&&"object"===(void 0===i?"undefined":s(i))&&i.position?i.position:e;return{isInner:n.indexOf("inner")>=0,isOuter:n.indexOf("outer")>=0,isLeft:n.indexOf("left")>=0,isCenter:n.indexOf("center")>=0,isRight:n.indexOf("right")>=0,isTop:n.indexOf("top")>=0,isMiddle:n.indexOf("middle")>=0,isBottom:n.indexOf("bottom")>=0}},e.prototype.getXAxisLabelPosition=function(){return this.getLabelPosition("x",this.owner.config.axis_rotated?"inner-top":"inner-right")},e.prototype.getYAxisLabelPosition=function(){return this.getLabelPosition("y",this.owner.config.axis_rotated?"inner-right":"inner-top")},e.prototype.getY2AxisLabelPosition=function(){return this.getLabelPosition("y2",this.owner.config.axis_rotated?"inner-right":"inner-top")},e.prototype.getLabelPositionById=function(t){return"y2"===t?this.getY2AxisLabelPosition():"y"===t?this.getYAxisLabelPosition():this.getXAxisLabelPosition()},e.prototype.textForXAxisLabel=function(){return this.getLabelText("x")},e.prototype.textForYAxisLabel=function(){return this.getLabelText("y")},e.prototype.textForY2AxisLabel=function(){return this.getLabelText("y2")},e.prototype.xForAxisLabel=function(t,e){var i=this.owner;return t?e.isLeft?0:e.isCenter?i.width/2:i.width:e.isBottom?-i.height:e.isMiddle?-i.height/2:0},e.prototype.dxForAxisLabel=function(t,e){return t?e.isLeft?"0.5em":e.isRight?"-0.5em":"0":e.isTop?"-0.5em":e.isBottom?"0.5em":"0"},e.prototype.textAnchorForAxisLabel=function(t,e){return t?e.isLeft?"start":e.isCenter?"middle":"end":e.isBottom?"start":e.isMiddle?"middle":"end"},e.prototype.xForXAxisLabel=function(){return this.xForAxisLabel(!this.owner.config.axis_rotated,this.getXAxisLabelPosition())},e.prototype.xForYAxisLabel=function(){return this.xForAxisLabel(this.owner.config.axis_rotated,this.getYAxisLabelPosition())},e.prototype.xForY2AxisLabel=function(){return this.xForAxisLabel(this.owner.config.axis_rotated,this.getY2AxisLabelPosition())},e.prototype.dxForXAxisLabel=function(){return this.dxForAxisLabel(!this.owner.config.axis_rotated,this.getXAxisLabelPosition())},e.prototype.dxForYAxisLabel=function(){return this.dxForAxisLabel(this.owner.config.axis_rotated,this.getYAxisLabelPosition())},e.prototype.dxForY2AxisLabel=function(){return this.dxForAxisLabel(this.owner.config.axis_rotated,this.getY2AxisLabelPosition())},e.prototype.dyForXAxisLabel=function(){var t=this.owner.config,e=this.getXAxisLabelPosition();return t.axis_rotated?e.isInner?"1.2em":-25-this.getMaxTickWidth("x"):e.isInner?"-0.5em":t.axis_x_height?t.axis_x_height-10:"3em"},e.prototype.dyForYAxisLabel=function(){var t=this.owner,e=this.getYAxisLabelPosition();return t.config.axis_rotated?e.isInner?"-0.5em":"3em":e.isInner?"1.2em":-10-(t.config.axis_y_inner?0:this.getMaxTickWidth("y")+10)},e.prototype.dyForY2AxisLabel=function(){var t=this.owner,e=this.getY2AxisLabelPosition();return t.config.axis_rotated?e.isInner?"1.2em":"-2.2em":e.isInner?"-0.5em":15+(t.config.axis_y2_inner?0:this.getMaxTickWidth("y2")+15)},e.prototype.textAnchorForXAxisLabel=function(){var t=this.owner;return this.textAnchorForAxisLabel(!t.config.axis_rotated,this.getXAxisLabelPosition())},e.prototype.textAnchorForYAxisLabel=function(){var t=this.owner;return this.textAnchorForAxisLabel(t.config.axis_rotated,this.getYAxisLabelPosition())},e.prototype.textAnchorForY2AxisLabel=function(){var t=this.owner;return this.textAnchorForAxisLabel(t.config.axis_rotated,this.getY2AxisLabelPosition())},e.prototype.getMaxTickWidth=function(t,e){var i,n,a,r,o=this.owner,s=o.config,c=0;return e&&o.currentMaxTickWidths[t]?o.currentMaxTickWidths[t]:(o.svg&&(i=o.filterTargetsToShow(o.data.targets),"y"===t?(n=o.y.copy().domain(o.getYDomain(i,"y")),a=this.getYAxis(n,o.yOrient,s.axis_y_tick_format,o.yAxisTickValues,!1,!0,!0)):"y2"===t?(n=o.y2.copy().domain(o.getYDomain(i,"y2")),a=this.getYAxis(n,o.y2Orient,s.axis_y2_tick_format,o.y2AxisTickValues,!1,!0,!0)):(n=o.x.copy().domain(o.getXDomain(i)),a=this.getXAxis(n,o.xOrient,o.xAxisTickFormat,o.xAxisTickValues,!1,!0,!0),this.updateXAxisTickValues(i,a)),(r=o.d3.select("body").append("div").classed("c3",!0)).append("svg").style("visibility","hidden").style("position","fixed").style("top",0).style("left",0).append("g").call(a).each(function(){o.d3.select(this).selectAll("text").each(function(){var t=this.getBoundingClientRect();c2){for(o=n-2,a=t[0],s=((r=t[t.length-1])-a)/(o+1),u=[a],c=0;c=0&&D.select(this).style("display",e%V?"none":"block")})}else O.svg.selectAll("."+o.axisX+" .tick text").style("display","block");_=O.generateDrawArea?O.generateDrawArea(X,!1):void 0,x=O.generateDrawBar?O.generateDrawBar(k):void 0,y=O.generateDrawLine?O.generateDrawLine(M,!1):void 0,S=O.generateXYForText(X,k,M,!0),w=O.generateXYForText(X,k,M,!1),i&&(O.subY.domain(O.getYDomain(z,"y")),O.subY2.domain(O.getYDomain(z,"y2"))),O.updateXgridFocus(),R.select("text."+o.text+"."+o.empty).attr("x",O.width/2).attr("y",O.height/2).text(F.data_empty_label_text).transition().style("opacity",z.length?0:1),O.updateGrid(v),O.updateRegion(v),O.updateBar(b),O.updateLine(b),O.updateArea(b),O.updateCircle(),O.hasDataLabel()&&O.updateText(b),O.redrawTitle&&O.redrawTitle(),O.redrawArc&&O.redrawArc(v,b,c),O.redrawSubchart&&O.redrawSubchart(n,e,v,b,X,k,M),R.selectAll("."+o.selectedCircles).filter(O.isBarType.bind(O)).selectAll("circle").remove(),F.interaction_enabled&&!t.flow&&g&&(O.redrawEventRect(),O.updateZoom&&O.updateZoom()),O.updateCircleY(),E=(O.config.axis_rotated?O.circleY:O.circleX).bind(O),I=(O.config.axis_rotated?O.circleX:O.circleY).bind(O),t.flow&&(P=O.generateFlow({targets:z,flow:t.flow,duration:t.flow.duration,drawBar:x,drawLine:y,drawArea:_,cx:E,cy:I,xv:B,xForText:S,yForText:w})),(v||P)&&O.isTabVisible()?D.transition().duration(v).each(function(){var e=[];[O.redrawBar(x,!0),O.redrawLine(y,!0),O.redrawArea(_,!0),O.redrawCircle(E,I,!0),O.redrawText(S,w,t.flow,!0),O.redrawRegion(!0),O.redrawGrid(!0)].forEach(function(t){t.forEach(function(t){e.push(t)})}),T=O.generateWait(),e.forEach(function(t){T.add(t)})}).call(T,function(){P&&P(),F.onrendered&&F.onrendered.call(O)}):(O.redrawBar(x),O.redrawLine(y),O.redrawArea(_),O.redrawCircle(E,I),O.redrawText(S,w,t.flow),O.redrawRegion(),O.redrawGrid(),F.onrendered&&F.onrendered.call(O)),O.mapToIds(O.data.targets).forEach(function(t){O.withoutFadeIn[t]=!0})},A.updateAndRedraw=function(t){var e,i=this,n=i.config;(t=t||{}).withTransition=m(t,"withTransition",!0),t.withTransform=m(t,"withTransform",!1),t.withLegend=m(t,"withLegend",!1),t.withUpdateXDomain=!0,t.withUpdateOrgXDomain=!0,t.withTransitionForExit=!1,t.withTransitionForTransform=m(t,"withTransitionForTransform",t.withTransition),i.updateSizes(),t.withLegend&&n.legend_show||(e=i.axis.generateTransitions(t.withTransitionForAxis?n.transition_duration:0),i.updateScales(),i.updateSvgSize(),i.transformAll(t.withTransitionForTransform,e)),i.redraw(t,e)},A.redrawWithoutRescale=function(){this.redraw({withY:!1,withSubchart:!1,withEventRect:!1,withTransitionForAxis:!1})},A.isTimeSeries=function(){return"timeseries"===this.config.axis_x_type},A.isCategorized=function(){return this.config.axis_x_type.indexOf("categor")>=0},A.isCustomX=function(){var t=this,e=t.config;return!t.isTimeSeries()&&(e.data_x||y(e.data_xs))},A.isTimeSeriesY=function(){return"timeseries"===this.config.axis_y_type},A.getTranslate=function(t){var e,i,n=this,a=n.config;return"main"===t?(e=f(n.margin.left),i=f(n.margin.top)):"context"===t?(e=f(n.margin2.left),i=f(n.margin2.top)):"legend"===t?(e=n.margin3.left,i=n.margin3.top):"x"===t?(e=0,i=a.axis_rotated?0:n.height):"y"===t?(e=0,i=a.axis_rotated?n.height:0):"y2"===t?(e=a.axis_rotated?0:n.width,i=a.axis_rotated?1:0):"subx"===t?(e=0,i=a.axis_rotated?0:n.height2):"arc"===t&&(e=n.arcWidth/2,i=n.arcHeight/2),"translate("+e+","+i+")"},A.initialOpacity=function(t){return null!==t.value&&this.withoutFadeIn[t.id]?1:0},A.initialOpacityForCircle=function(t){return null!==t.value&&this.withoutFadeIn[t.id]?this.opacityForCircle(t):0},A.opacityForCircle=function(t){var e=(d(this.config.point_show)?this.config.point_show(t):this.config.point_show)?1:0;return c(t.value)?this.isScatterType(t)?.5:e:0},A.opacityForText=function(){return this.hasDataLabel()?1:0},A.xx=function(t){return t?this.x(t.x):null},A.xv=function(t){var e=this,i=t.value;return e.isTimeSeries()?i=e.parseDate(t.value):e.isCategorized()&&"string"==typeof t.value&&(i=e.config.axis_x_categories.indexOf(t.value)),Math.ceil(e.x(i))},A.yv=function(t){var e=this,i=t.axis&&"y2"===t.axis?e.y2:e.y;return Math.ceil(i(t.value))},A.subxx=function(t){return t?this.subX(t.x):null},A.transformMain=function(t,e){var i,n,a,r=this;e&&e.axisX?i=e.axisX:(i=r.main.select("."+o.axisX),t&&(i=i.transition())),e&&e.axisY?n=e.axisY:(n=r.main.select("."+o.axisY),t&&(n=n.transition())),e&&e.axisY2?a=e.axisY2:(a=r.main.select("."+o.axisY2),t&&(a=a.transition())),(t?r.main.transition():r.main).attr("transform",r.getTranslate("main")),i.attr("transform",r.getTranslate("x")),n.attr("transform",r.getTranslate("y")),a.attr("transform",r.getTranslate("y2")),r.main.select("."+o.chartArcs).attr("transform",r.getTranslate("arc"))},A.transformAll=function(t,e){var i=this;i.transformMain(t,e),i.config.subchart_show&&i.transformContext(t,e),i.legend&&i.transformLegend(t)},A.updateSvgSize=function(){var t=this,e=t.svg.select(".c3-brush .background");t.svg.attr("width",t.currentWidth).attr("height",t.currentHeight),t.svg.selectAll(["#"+t.clipId,"#"+t.clipIdForGrid]).select("rect").attr("width",t.width).attr("height",t.height),t.svg.select("#"+t.clipIdForXAxis).select("rect").attr("x",t.getXAxisClipX.bind(t)).attr("y",t.getXAxisClipY.bind(t)).attr("width",t.getXAxisClipWidth.bind(t)).attr("height",t.getXAxisClipHeight.bind(t)),t.svg.select("#"+t.clipIdForYAxis).select("rect").attr("x",t.getYAxisClipX.bind(t)).attr("y",t.getYAxisClipY.bind(t)).attr("width",t.getYAxisClipWidth.bind(t)).attr("height",t.getYAxisClipHeight.bind(t)),t.svg.select("#"+t.clipIdForSubchart).select("rect").attr("width",t.width).attr("height",e.size()?e.attr("height"):0),t.svg.select("."+o.zoomRect).attr("width",t.width).attr("height",t.height),t.selectChart.style("max-height",t.currentHeight+"px")},A.updateDimension=function(t){var e=this;t||(e.config.axis_rotated?(e.axes.x.call(e.xAxis),e.axes.subx.call(e.subXAxis)):(e.axes.y.call(e.yAxis),e.axes.y2.call(e.y2Axis))),e.updateSizes(),e.updateScales(),e.updateSvgSize(),e.transformAll(!1)},A.observeInserted=function(t){var e,i=this;"undefined"!=typeof MutationObserver?(e=new MutationObserver(function(n){n.forEach(function(n){"childList"===n.type&&n.previousSibling&&(e.disconnect(),i.intervalForObserveInserted=window.setInterval(function(){t.node().parentNode&&(window.clearInterval(i.intervalForObserveInserted),i.updateDimension(),i.brush&&i.brush.update(),i.config.oninit.call(i),i.redraw({withTransform:!0,withUpdateXDomain:!0,withUpdateOrgXDomain:!0,withTransition:!1,withTransitionForTransform:!1,withLegend:!0}),t.transition().style("opacity",1))},10))})})).observe(t.node(),{attributes:!0,childList:!0,characterData:!0}):window.console.error("MutationObserver not defined.")},A.bindResize=function(){var t=this,e=t.config;if(t.resizeFunction=t.generateResize(),t.resizeFunction.add(function(){e.onresize.call(t)}),e.resize_auto&&t.resizeFunction.add(function(){void 0!==t.resizeTimeout&&window.clearTimeout(t.resizeTimeout),t.resizeTimeout=window.setTimeout(function(){delete t.resizeTimeout,t.api.flush()},100)}),t.resizeFunction.add(function(){e.onresized.call(t)}),window.attachEvent)window.attachEvent("onresize",t.resizeFunction);else if(window.addEventListener)window.addEventListener("resize",t.resizeFunction,!1);else{var i=window.onresize;i?i.add&&i.remove||(i=t.generateResize()).add(window.onresize):i=t.generateResize(),i.add(t.resizeFunction),window.onresize=i}},A.generateResize=function(){function t(){e.forEach(function(t){t()})}var e=[];return t.add=function(t){e.push(t)},t.remove=function(t){for(var i=0;ie.getTotalLength())break;i--}while(i>0);return i})),"SVGPathSegList"in window||(window.SVGPathSegList=function(t){this._pathElement=t,this._list=this._parsePath(this._pathElement.getAttribute("d")),this._mutationObserverConfig={attributes:!0,attributeFilter:["d"]},this._pathElementMutationObserver=new MutationObserver(this._updateListFromPathMutations.bind(this)),this._pathElementMutationObserver.observe(this._pathElement,this._mutationObserverConfig)},window.SVGPathSegList.prototype.classname="SVGPathSegList",Object.defineProperty(window.SVGPathSegList.prototype,"numberOfItems",{get:function(){return this._checkPathSynchronizedToList(),this._list.length},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"pathSegList",{get:function(){return this._pathSegList||(this._pathSegList=new window.SVGPathSegList(this)),this._pathSegList},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"normalizedPathSegList",{get:function(){return this.pathSegList},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"animatedPathSegList",{get:function(){return this.pathSegList},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"animatedNormalizedPathSegList",{get:function(){return this.pathSegList},enumerable:!0}),window.SVGPathSegList.prototype._checkPathSynchronizedToList=function(){this._updateListFromPathMutations(this._pathElementMutationObserver.takeRecords())},window.SVGPathSegList.prototype._updateListFromPathMutations=function(t){if(this._pathElement){var e=!1;t.forEach(function(t){"d"==t.attributeName&&(e=!0)}),e&&(this._list=this._parsePath(this._pathElement.getAttribute("d")))}},window.SVGPathSegList.prototype._writeListToPath=function(){this._pathElementMutationObserver.disconnect(),this._pathElement.setAttribute("d",window.SVGPathSegList._pathSegArrayAsString(this._list)),this._pathElementMutationObserver.observe(this._pathElement,this._mutationObserverConfig)},window.SVGPathSegList.prototype.segmentChanged=function(t){this._writeListToPath()},window.SVGPathSegList.prototype.clear=function(){this._checkPathSynchronizedToList(),this._list.forEach(function(t){t._owningPathSegList=null}),this._list=[],this._writeListToPath()},window.SVGPathSegList.prototype.initialize=function(t){return this._checkPathSynchronizedToList(),this._list=[t],t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList.prototype._checkValidIndex=function(t){if(isNaN(t)||t<0||t>=this.numberOfItems)throw"INDEX_SIZE_ERR"},window.SVGPathSegList.prototype.getItem=function(t){return this._checkPathSynchronizedToList(),this._checkValidIndex(t),this._list[t]},window.SVGPathSegList.prototype.insertItemBefore=function(t,e){return this._checkPathSynchronizedToList(),e>this.numberOfItems&&(e=this.numberOfItems),t._owningPathSegList&&(t=t.clone()),this._list.splice(e,0,t),t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList.prototype.replaceItem=function(t,e){return this._checkPathSynchronizedToList(),t._owningPathSegList&&(t=t.clone()),this._checkValidIndex(e),this._list[e]=t,t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList.prototype.removeItem=function(t){this._checkPathSynchronizedToList(),this._checkValidIndex(t);var e=this._list[t];return this._list.splice(t,1),this._writeListToPath(),e},window.SVGPathSegList.prototype.appendItem=function(t){return this._checkPathSynchronizedToList(),t._owningPathSegList&&(t=t.clone()),this._list.push(t),t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList._pathSegArrayAsString=function(t){var e="",i=!0;return t.forEach(function(t){i?(i=!1,e+=t._asPathString()):e+=" "+t._asPathString()}),e},window.SVGPathSegList.prototype._parsePath=function(t){if(!t||0==t.length)return[];var e=this,i=function(){this.pathSegList=[]};i.prototype.appendSegment=function(t){this.pathSegList.push(t)};var n=function(t){this._string=t,this._currentIndex=0,this._endIndex=this._string.length,this._previousCommand=window.SVGPathSeg.PATHSEG_UNKNOWN,this._skipOptionalSpaces()};n.prototype._isCurrentSpace=function(){var t=this._string[this._currentIndex];return t<=" "&&(" "==t||"\n"==t||"\t"==t||"\r"==t||"\f"==t)},n.prototype._skipOptionalSpaces=function(){for(;this._currentIndex="0"&&t<="9")&&e!=window.SVGPathSeg.PATHSEG_CLOSEPATH?e==window.SVGPathSeg.PATHSEG_MOVETO_ABS?window.SVGPathSeg.PATHSEG_LINETO_ABS:e==window.SVGPathSeg.PATHSEG_MOVETO_REL?window.SVGPathSeg.PATHSEG_LINETO_REL:e:window.SVGPathSeg.PATHSEG_UNKNOWN},n.prototype.initialCommandIsMoveTo=function(){if(!this.hasMoreData())return!0;var t=this.peekSegmentType();return t==window.SVGPathSeg.PATHSEG_MOVETO_ABS||t==window.SVGPathSeg.PATHSEG_MOVETO_REL},n.prototype._parseNumber=function(){var t=0,e=0,i=1,n=0,a=1,r=1,o=this._currentIndex;if(this._skipOptionalSpaces(),this._currentIndex"9")&&"."!=this._string.charAt(this._currentIndex))){for(var s=this._currentIndex;this._currentIndex="0"&&this._string.charAt(this._currentIndex)<="9";)this._currentIndex++;if(this._currentIndex!=s)for(var c=this._currentIndex-1,d=1;c>=s;)e+=d*(this._string.charAt(c--)-"0"),d*=10;if(this._currentIndex=this._endIndex||this._string.charAt(this._currentIndex)<"0"||this._string.charAt(this._currentIndex)>"9")return;for(;this._currentIndex="0"&&this._string.charAt(this._currentIndex)<="9";)i*=10,n+=(this._string.charAt(this._currentIndex)-"0")/i,this._currentIndex+=1}if(this._currentIndex!=o&&this._currentIndex+1=this._endIndex||this._string.charAt(this._currentIndex)<"0"||this._string.charAt(this._currentIndex)>"9")return;for(;this._currentIndex="0"&&this._string.charAt(this._currentIndex)<="9";)t*=10,t+=this._string.charAt(this._currentIndex)-"0",this._currentIndex++}var l=e+n;if(l*=a,t&&(l*=Math.pow(10,r*t)),o!=this._currentIndex)return this._skipOptionalSpacesOrDelimiter(),l}},n.prototype._parseArcFlag=function(){if(!(this._currentIndex>=this._endIndex)){var t=!1,e=this._string.charAt(this._currentIndex++);if("0"==e)t=!1;else{if("1"!=e)return;t=!0}return this._skipOptionalSpacesOrDelimiter(),t}},n.prototype.parseSegment=function(){var t=this._string[this._currentIndex],i=this._pathSegTypeFromChar(t);if(i==window.SVGPathSeg.PATHSEG_UNKNOWN){if(this._previousCommand==window.SVGPathSeg.PATHSEG_UNKNOWN)return null;if((i=this._nextCommandHelper(t,this._previousCommand))==window.SVGPathSeg.PATHSEG_UNKNOWN)return null}else this._currentIndex++;switch(this._previousCommand=i,i){case window.SVGPathSeg.PATHSEG_MOVETO_REL:return new window.SVGPathSegMovetoRel(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_MOVETO_ABS:return new window.SVGPathSegMovetoAbs(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_REL:return new window.SVGPathSegLinetoRel(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_ABS:return new window.SVGPathSegLinetoAbs(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_REL:return new window.SVGPathSegLinetoHorizontalRel(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_ABS:return new window.SVGPathSegLinetoHorizontalAbs(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_VERTICAL_REL:return new window.SVGPathSegLinetoVerticalRel(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_VERTICAL_ABS:return new window.SVGPathSegLinetoVerticalAbs(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_CLOSEPATH:return this._skipOptionalSpaces(),new window.SVGPathSegClosePath(e);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_REL:return n={x1:this._parseNumber(),y1:this._parseNumber(),x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicRel(e,n.x,n.y,n.x1,n.y1,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_ABS:return n={x1:this._parseNumber(),y1:this._parseNumber(),x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicAbs(e,n.x,n.y,n.x1,n.y1,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_REL:return n={x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicSmoothRel(e,n.x,n.y,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_ABS:return n={x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicSmoothAbs(e,n.x,n.y,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_REL:return n={x1:this._parseNumber(),y1:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoQuadraticRel(e,n.x,n.y,n.x1,n.y1);case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_ABS:return n={x1:this._parseNumber(),y1:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoQuadraticAbs(e,n.x,n.y,n.x1,n.y1);case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL:return new window.SVGPathSegCurvetoQuadraticSmoothRel(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS:return new window.SVGPathSegCurvetoQuadraticSmoothAbs(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_ARC_REL:return n={x1:this._parseNumber(),y1:this._parseNumber(),arcAngle:this._parseNumber(),arcLarge:this._parseArcFlag(),arcSweep:this._parseArcFlag(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegArcRel(e,n.x,n.y,n.x1,n.y1,n.arcAngle,n.arcLarge,n.arcSweep);case window.SVGPathSeg.PATHSEG_ARC_ABS:var n={x1:this._parseNumber(),y1:this._parseNumber(),arcAngle:this._parseNumber(),arcLarge:this._parseArcFlag(),arcSweep:this._parseArcFlag(),x:this._parseNumber(),y:this._parseNumber()};return new window.SVGPathSegArcAbs(e,n.x,n.y,n.x1,n.y1,n.arcAngle,n.arcLarge,n.arcSweep);default:throw"Unknown path seg type."}};var a=new i,r=new n(t);if(!r.initialCommandIsMoveTo())return[];for(;r.hasMoreData();){var o=r.parseSegment();if(!o)return[];a.appendSegment(o)}return a.pathSegList}),b.axis=function(){},b.axis.labels=function(t){var e=this.internal;arguments.length&&(Object.keys(t).forEach(function(i){e.axis.setLabelText(i,t[i])}),e.axis.updateLabels())},b.axis.max=function(t){var e=this.internal,i=e.config;if(!arguments.length)return{x:i.axis_x_max,y:i.axis_y_max,y2:i.axis_y2_max};"object"===(void 0===t?"undefined":s(t))?(c(t.x)&&(i.axis_x_max=t.x),c(t.y)&&(i.axis_y_max=t.y),c(t.y2)&&(i.axis_y2_max=t.y2)):i.axis_y_max=i.axis_y2_max=t,e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})},b.axis.min=function(t){var e=this.internal,i=e.config;if(!arguments.length)return{x:i.axis_x_min,y:i.axis_y_min,y2:i.axis_y2_min};"object"===(void 0===t?"undefined":s(t))?(c(t.x)&&(i.axis_x_min=t.x),c(t.y)&&(i.axis_y_min=t.y),c(t.y2)&&(i.axis_y2_min=t.y2)):i.axis_y_min=i.axis_y2_min=t,e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})},b.axis.range=function(t){if(!arguments.length)return{max:this.axis.max(),min:this.axis.min()};void 0!==t.max&&this.axis.max(t.max),void 0!==t.min&&this.axis.min(t.min)},b.category=function(t,e){var i=this.internal,n=i.config;return arguments.length>1&&(n.axis_x_categories[t]=e,i.redraw()),n.axis_x_categories[t]},b.categories=function(t){var e=this.internal,i=e.config;return arguments.length?(i.axis_x_categories=t,e.redraw(),i.axis_x_categories):i.axis_x_categories},b.resize=function(t){var e=this.internal.config;e.size_width=t?t.width:null,e.size_height=t?t.height:null,this.flush()},b.flush=function(){this.internal.updateAndRedraw({withLegend:!0,withTransition:!1,withTransitionForTransform:!1})},b.destroy=function(){var t=this.internal;if(window.clearInterval(t.intervalForObserveInserted),void 0!==t.resizeTimeout&&window.clearTimeout(t.resizeTimeout),window.detachEvent)window.detachEvent("onresize",t.resizeFunction);else if(window.removeEventListener)window.removeEventListener("resize",t.resizeFunction);else{var e=window.onresize;e&&e.add&&e.remove&&e.remove(t.resizeFunction)}return t.selectChart.classed("c3",!1).html(""),Object.keys(t).forEach(function(e){t[e]=null}),null},b.color=function(t){return this.internal.color(t)},b.data=function(t){var e=this.internal.data.targets;return void 0===t?e:e.filter(function(e){return[].concat(t).indexOf(e.id)>=0})},b.data.shown=function(t){return this.internal.filterTargetsToShow(this.data(t))},b.data.values=function(t){var e,i=null;return t&&(i=(e=this.data(t))[0]?e[0].values.map(function(t){return t.value}):null),i},b.data.names=function(t){return this.internal.clearLegendItemTextBoxCache(),this.internal.updateDataAttributes("names",t)},b.data.colors=function(t){return this.internal.updateDataAttributes("colors",t)},b.data.axes=function(t){return this.internal.updateDataAttributes("axes",t)},b.flow=function(t){var e,i,n,a,r,o,s,d=this.internal,l=[],u=d.getMaxDataCount(),h=0,g=0;if(t.json)i=d.convertJsonToData(t.json,t.keys);else if(t.rows)i=d.convertRowsToData(t.rows);else{if(!t.columns)return;i=d.convertColumnsToData(t.columns)}e=d.convertDataToTargets(i,!0),d.data.targets.forEach(function(t){var i,n,a=!1;for(i=0;i1?a.values[a.values.length-1].x-r.x:r.x-d.getXDomain(d.data.targets)[0]:1,n=[r.x-o,r.x],d.updateXDomain(null,!0,!0,!1,n)),d.updateTargets(d.data.targets),d.redraw({flow:{index:r.index,length:h,duration:c(t.duration)?t.duration:d.config.transition_duration,done:t.done,orgDataCount:u},withLegend:!0,withTransition:u>1,withTrimXDomain:!1,withUpdateXAxis:!0})},A.generateFlow=function(t){var e=this,i=e.config,n=e.d3;return function(){var a,r,s,c=t.targets,d=t.flow,l=t.drawBar,u=t.drawLine,h=t.drawArea,g=t.cx,p=t.cy,f=t.xv,x=t.xForText,y=t.yForText,m=t.duration,S=1,w=d.index,v=d.length,b=e.getValueOnIndex(e.data.targets[0].values,w),A=e.getValueOnIndex(e.data.targets[0].values,w+v),T=e.x.domain(),P=d.duration||m,L=d.done||function(){},C=e.generateWait(),V=e.xgrid||n.selectAll([]),G=e.xgridLines||n.selectAll([]),E=e.mainRegion||n.selectAll([]),I=e.mainText||n.selectAll([]),O=e.mainBar||n.selectAll([]),R=e.mainLine||n.selectAll([]),D=e.mainArea||n.selectAll([]),F=e.mainCircle||n.selectAll([]);e.flowing=!0,e.data.targets.forEach(function(t){t.values.splice(0,v)}),s=e.updateXDomain(c,!0,!0),e.updateXGrid&&e.updateXGrid(!0),d.orgDataCount?a=1===d.orgDataCount||(b&&b.x)===(A&&A.x)?e.x(T[0])-e.x(s[0]):e.isTimeSeries()?e.x(T[0])-e.x(s[0]):e.x(b.x)-e.x(A.x):1!==e.data.targets[0].values.length?a=e.x(T[0])-e.x(s[0]):e.isTimeSeries()?(b=e.getValueOnIndex(e.data.targets[0].values,0),A=e.getValueOnIndex(e.data.targets[0].values,e.data.targets[0].values.length-1),a=e.x(b.x)-e.x(A.x)):a=_(s)/2,S=_(T)/_(s),r="translate("+a+",0) scale("+S+",1)",e.hideXGridFocus(),n.transition().ease("linear").duration(P).each(function(){C.add(e.axes.x.transition().call(e.xAxis)),C.add(O.transition().attr("transform",r)),C.add(R.transition().attr("transform",r)),C.add(D.transition().attr("transform",r)),C.add(F.transition().attr("transform",r)),C.add(I.transition().attr("transform",r)),C.add(E.filter(e.isRegionOnX).transition().attr("transform",r)),C.add(V.transition().attr("transform",r)),C.add(G.transition().attr("transform",r))}).call(C,function(){var t,n=[],a=[],r=[];if(v){for(t=0;t=0&&(e=!0)}),!e)}),r.regions},b.selected=function(t){var e=this.internal,i=e.d3;return i.merge(e.main.selectAll("."+o.shapes+e.getTargetSelectorSuffix(t)).selectAll("."+o.shape).filter(function(){return i.select(this).classed(o.SELECTED)}).map(function(t){return t.map(function(t){var e=t.__data__;return e.data?e.data:e})}))},b.select=function(t,e,i){var n=this.internal,a=n.d3,r=n.config;r.data_selection_enabled&&n.main.selectAll("."+o.shapes).selectAll("."+o.shape).each(function(s,c){var d=a.select(this),l=s.data?s.data.id:s.id,u=n.getToggle(this,s).bind(n),h=r.data_selection_grouped||!t||t.indexOf(l)>=0,g=!e||e.indexOf(c)>=0,p=d.classed(o.SELECTED);d.classed(o.line)||d.classed(o.area)||(h&&g?r.data_selection_isselectable(s)&&!p&&u(!0,d.classed(o.SELECTED,!0),s,c):void 0!==i&&i&&p&&u(!1,d.classed(o.SELECTED,!1),s,c))})},b.unselect=function(t,e){var i=this.internal,n=i.d3,a=i.config;a.data_selection_enabled&&i.main.selectAll("."+o.shapes).selectAll("."+o.shape).each(function(r,s){var c=n.select(this),d=r.data?r.data.id:r.id,l=i.getToggle(this,r).bind(i),u=a.data_selection_grouped||!t||t.indexOf(d)>=0,h=!e||e.indexOf(s)>=0,g=c.classed(o.SELECTED);c.classed(o.line)||c.classed(o.area)||u&&h&&a.data_selection_isselectable(r)&&g&&l(!1,c.classed(o.SELECTED,!1),r,s)})},b.show=function(t,e){var i,n=this.internal;t=n.mapToTargetIds(t),e=e||{},n.removeHiddenTargetIds(t),(i=n.svg.selectAll(n.selectorTargets(t))).transition().style("opacity",1,"important").call(n.endall,function(){i.style("opacity",null).style("opacity",1)}),e.withLegend&&n.showLegend(t),n.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0,withLegend:!0})},b.hide=function(t,e){var i,n=this.internal;t=n.mapToTargetIds(t),e=e||{},n.addHiddenTargetIds(t),(i=n.svg.selectAll(n.selectorTargets(t))).transition().style("opacity",0,"important").call(n.endall,function(){i.style("opacity",null).style("opacity",0)}),e.withLegend&&n.hideLegend(t),n.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0,withLegend:!0})},b.toggle=function(t,e){var i=this,n=this.internal;n.mapToTargetIds(t).forEach(function(t){n.isTargetToShow(t)?i.hide(t,e):i.show(t,e)})},b.tooltip=function(){},b.tooltip.show=function(t){var e,i,n=this.internal;t.mouse&&(i=t.mouse),t.data?n.isMultipleX()?(i=[n.x(t.data.x),n.getYScale(t.data.id)(t.data.value)],e=null):e=c(t.data.index)?t.data.index:n.getIndexByX(t.data.x):void 0!==t.x?e=n.getIndexByX(t.x):void 0!==t.index&&(e=t.index),n.dispatchEvent("mouseover",e,i),n.dispatchEvent("mousemove",e,i),n.config.tooltip_onshow.call(n,t.data)},b.tooltip.hide=function(){this.internal.dispatchEvent("mouseout",0),this.internal.config.tooltip_onhide.call(this)},b.transform=function(t,e){var i=this.internal,n=["pie","donut"].indexOf(t)>=0?{withTransform:!0}:null;i.transformTo(e,t,n)},A.transformTo=function(t,e,i){var n=this,a=!n.hasArcType(),r=i||{withTransitionForAxis:a};r.withTransitionForTransform=!1,n.transiting=!1,n.setTargetType(t,e),n.updateTargets(n.data.targets),n.updateAndRedraw(r)},b.x=function(t){var e=this.internal;return arguments.length&&(e.updateTargetX(e.data.targets,t),e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})),e.data.xs},b.xs=function(t){var e=this.internal;return arguments.length&&(e.updateTargetXs(e.data.targets,t),e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})),e.data.xs},b.zoom=function(t){var e=this.internal;return t&&(e.isTimeSeries()&&(t=t.map(function(t){return e.parseDate(t)})),e.brush.extent(t),e.redraw({withUpdateXDomain:!0,withY:e.config.zoom_rescale}),e.config.zoom_onzoom.call(this,e.x.orgDomain())),e.brush.extent()},b.zoom.enable=function(t){var e=this.internal;e.config.zoom_enabled=t,e.updateAndRedraw()},b.unzoom=function(){var t=this.internal;t.brush.clear().update(),t.redraw({withUpdateXDomain:!0})},b.zoom.max=function(t){var e=this.internal,i=e.config,n=e.d3;if(0!==t&&!t)return i.zoom_x_max;i.zoom_x_max=n.max([e.orgXDomain[1],t])},b.zoom.min=function(t){var e=this.internal,i=e.config,n=e.d3;if(0!==t&&!t)return i.zoom_x_min;i.zoom_x_min=n.min([e.orgXDomain[0],t])},b.zoom.range=function(t){if(!arguments.length)return{max:this.domain.max(),min:this.domain.min()};void 0!==t.max&&this.domain.max(t.max),void 0!==t.min&&this.domain.min(t.min)},A.initPie=function(){var t=this,e=t.d3,i=t.config;t.pie=e.layout.pie().value(function(t){return t.values.reduce(function(t,e){return t+e.value},0)}),i.data_order||t.pie.sort(null)},A.updateRadius=function(){var t=this,e=t.config,i=e.gauge_width||e.donut_width;t.radiusExpanded=Math.min(t.arcWidth,t.arcHeight)/2,t.radius=.95*t.radiusExpanded,t.innerRadiusRatio=i?(t.radius-i)/t.radius:.6,t.innerRadius=t.hasType("donut")||t.hasType("gauge")?t.radius*t.innerRadiusRatio:0},A.updateArc=function(){var t=this;t.svgArc=t.getSvgArc(),t.svgArcExpanded=t.getSvgArcExpanded(),t.svgArcExpandedSub=t.getSvgArcExpanded(.98)},A.updateAngle=function(t){var e,i,n,a,r=this,o=r.config,s=!1,c=0;return o?(r.pie(r.filterTargetsToShow(r.data.targets)).forEach(function(e){s||e.data.id!==t.data.id||(s=!0,(t=e).index=c),c++}),isNaN(t.startAngle)&&(t.startAngle=0),isNaN(t.endAngle)&&(t.endAngle=t.startAngle),r.isGaugeType(t.data)&&(e=o.gauge_min,i=o.gauge_max,n=Math.PI*(o.gauge_fullCircle?2:1)/(i-e),a=t.value.375?1.175-36/o.radius:.8)*o.radius/a:0)+","+n*r+")"),l},A.getArcRatio=function(t){var e=this,i=e.config,n=Math.PI*(e.hasType("gauge")&&!i.gauge_fullCircle?1:2);return t?(t.endAngle-t.startAngle)/n:null},A.convertToArcData=function(t){return this.addName({id:t.data.id,value:t.value,ratio:this.getArcRatio(t),index:t.index})},A.textForArcLabel=function(t){var e,i,n,a,r,o=this;return o.shouldShowArcLabel()?(e=o.updateAngle(t),i=e?e.value:null,n=o.getArcRatio(e),a=t.data.id,o.hasType("gauge")||o.meetsArcLabelThreshold(n)?(r=o.getArcLabelFormat(),r?r(i,n,a):o.defaultArcValueFormat(i,n)):""):""},A.textForGaugeMinMax=function(t,e){var i=this.getGaugeLabelExtents();return i?i(t,e):t},A.expandArc=function(t){var e,i=this;i.transiting?e=window.setInterval(function(){i.transiting||(window.clearInterval(e),i.legend.selectAll(".c3-legend-item-focused").size()>0&&i.expandArc(t))},10):(t=i.mapToTargetIds(t),i.svg.selectAll(i.selectorTargets(t,"."+o.chartArc)).each(function(t){i.shouldExpand(t.data.id)&&i.d3.select(this).selectAll("path").transition().duration(i.expandDuration(t.data.id)).attr("d",i.svgArcExpanded).transition().duration(2*i.expandDuration(t.data.id)).attr("d",i.svgArcExpandedSub).each(function(t){i.isDonutType(t.data)})}))},A.unexpandArc=function(t){var e=this;e.transiting||(t=e.mapToTargetIds(t),e.svg.selectAll(e.selectorTargets(t,"."+o.chartArc)).selectAll("path").transition().duration(function(t){return e.expandDuration(t.data.id)}).attr("d",e.svgArc),e.svg.selectAll("."+o.arc))},A.expandDuration=function(t){var e=this,i=e.config;return e.isDonutType(t)?i.donut_expand_duration:e.isGaugeType(t)?i.gauge_expand_duration:e.isPieType(t)?i.pie_expand_duration:50},A.shouldExpand=function(t){var e=this,i=e.config;return e.isDonutType(t)&&i.donut_expand||e.isGaugeType(t)&&i.gauge_expand||e.isPieType(t)&&i.pie_expand},A.shouldShowArcLabel=function(){var t=this,e=t.config,i=!0;return t.hasType("donut")?i=e.donut_label_show:t.hasType("pie")&&(i=e.pie_label_show),i},A.meetsArcLabelThreshold=function(t){var e=this,i=e.config;return t>=(e.hasType("donut")?i.donut_label_threshold:i.pie_label_threshold)},A.getArcLabelFormat=function(){var t=this,e=t.config,i=e.pie_label_format;return t.hasType("gauge")?i=e.gauge_label_format:t.hasType("donut")&&(i=e.donut_label_format),i},A.getGaugeLabelExtents=function(){return this.config.gauge_label_extents},A.getArcTitle=function(){var t=this;return t.hasType("donut")?t.config.donut_title:""},A.updateTargetsForArc=function(t){var e,i=this,n=i.main,a=i.classChartArc.bind(i),r=i.classArcs.bind(i),s=i.classFocus.bind(i);(e=n.select("."+o.chartArcs).selectAll("."+o.chartArc).data(i.pie(t)).attr("class",function(t){return a(t)+s(t.data)}).enter().append("g").attr("class",a)).append("g").attr("class",r),e.append("text").attr("dy",i.hasType("gauge")?"-.1em":".35em").style("opacity",0).style("text-anchor","middle").style("pointer-events","none")},A.initArc=function(){var t=this;t.arcs=t.main.select("."+o.chart).append("g").attr("class",o.chartArcs).attr("transform",t.getTranslate("arc")),t.arcs.append("text").attr("class",o.chartArcsTitle).style("text-anchor","middle").text(t.getArcTitle())},A.redrawArc=function(t,e,i){var n,a=this,r=a.d3,s=a.config,c=a.main;(n=c.selectAll("."+o.arcs).selectAll("."+o.arc).data(a.arcData.bind(a))).enter().append("path").attr("class",a.classArc.bind(a)).style("fill",function(t){return a.color(t.data)}).style("cursor",function(t){return s.interaction_enabled&&s.data_selection_isselectable(t)?"pointer":null}).each(function(t){a.isGaugeType(t.data)&&(t.startAngle=t.endAngle=s.gauge_startingAngle),this._current=t}),n.attr("transform",function(t){return!a.isGaugeType(t.data)&&i?"scale(0)":""}).on("mouseover",s.interaction_enabled?function(t){var e,i;a.transiting||(e=a.updateAngle(t))&&(i=a.convertToArcData(e),a.expandArc(e.data.id),a.api.focus(e.data.id),a.toggleFocusLegend(e.data.id,!0),a.config.data_onmouseover(i,this))}:null).on("mousemove",s.interaction_enabled?function(t){var e,i=a.updateAngle(t);i&&(e=[a.convertToArcData(i)],a.showTooltip(e,this))}:null).on("mouseout",s.interaction_enabled?function(t){var e,i;a.transiting||(e=a.updateAngle(t))&&(i=a.convertToArcData(e),a.unexpandArc(e.data.id),a.api.revert(),a.revertLegend(),a.hideTooltip(),a.config.data_onmouseout(i,this))}:null).on("click",s.interaction_enabled?function(t,e){var i,n=a.updateAngle(t);n&&(i=a.convertToArcData(n),a.toggleShape&&a.toggleShape(this,i,e),a.config.data_onclick.call(a.api,i,this))}:null).each(function(){a.transiting=!0}).transition().duration(t).attrTween("d",function(t){var e,i=a.updateAngle(t);return i?(isNaN(this._current.startAngle)&&(this._current.startAngle=0),isNaN(this._current.endAngle)&&(this._current.endAngle=this._current.startAngle),e=r.interpolate(this._current,i),this._current=e(0),function(i){var n=e(i);return n.data=t.data,a.getArc(n,!0)}):function(){return"M 0 0"}}).attr("transform",i?"scale(1)":"").style("fill",function(t){return a.levelColor?a.levelColor(t.data.values[0].value):a.color(t.data.id)}).call(a.endall,function(){a.transiting=!1}),n.exit().transition().duration(e).style("opacity",0).remove(),c.selectAll("."+o.chartArc).select("text").style("opacity",0).attr("class",function(t){return a.isGaugeType(t.data)?o.gaugeValue:""}).text(a.textForArcLabel.bind(a)).attr("transform",a.transformForArcLabel.bind(a)).style("font-size",function(t){return a.isGaugeType(t.data)?Math.round(a.radius/5)+"px":""}).transition().duration(t).style("opacity",function(t){return a.isTargetToShow(t.data.id)&&a.isArcType(t.data)?1:0}),c.select("."+o.chartArcsTitle).style("opacity",a.hasType("donut")||a.hasType("gauge")?1:0),a.hasType("gauge")&&(a.arcs.select("."+o.chartArcsBackground).attr("d",function(){var t={data:[{value:s.gauge_max}],startAngle:s.gauge_startingAngle,endAngle:-1*s.gauge_startingAngle};return a.getArc(t,!0,!0)}),a.arcs.select("."+o.chartArcsGaugeUnit).attr("dy",".75em").text(s.gauge_label_show?s.gauge_units:""),a.arcs.select("."+o.chartArcsGaugeMin).attr("dx",-1*(a.innerRadius+(a.radius-a.innerRadius)/(s.gauge_fullCircle?1:2))+"px").attr("dy","1.2em").text(s.gauge_label_show?a.textForGaugeMinMax(s.gauge_min,!1):""),a.arcs.select("."+o.chartArcsGaugeMax).attr("dx",a.innerRadius+(a.radius-a.innerRadius)/(s.gauge_fullCircle?1:2)+"px").attr("dy","1.2em").text(s.gauge_label_show?a.textForGaugeMinMax(s.gauge_max,!0):""))},A.initGauge=function(){var t=this.arcs;this.hasType("gauge")&&(t.append("path").attr("class",o.chartArcsBackground),t.append("text").attr("class",o.chartArcsGaugeUnit).style("text-anchor","middle").style("pointer-events","none"),t.append("text").attr("class",o.chartArcsGaugeMin).style("text-anchor","middle").style("pointer-events","none"),t.append("text").attr("class",o.chartArcsGaugeMax).style("text-anchor","middle").style("pointer-events","none"))},A.getGaugeLabelHeight=function(){return this.config.gauge_label_show?20:0},A.hasCaches=function(t){for(var e=0;e=0?o.focused:"")},A.classDefocused=function(t){return" "+(this.defocusedTargetIds.indexOf(t.id)>=0?o.defocused:"")},A.classChartText=function(t){return o.chartText+this.classTarget(t.id)},A.classChartLine=function(t){return o.chartLine+this.classTarget(t.id)},A.classChartBar=function(t){return o.chartBar+this.classTarget(t.id)},A.classChartArc=function(t){return o.chartArc+this.classTarget(t.data.id)},A.getTargetSelectorSuffix=function(t){return t||0===t?("-"+t).replace(/[\s?!@#$%^&*()_=+,.<>'":;\[\]\/|~`{}\\]/g,"-"):""},A.selectorTarget=function(t,e){return(e||"")+"."+o.target+this.getTargetSelectorSuffix(t)},A.selectorTargets=function(t,e){var i=this;return t=t||[],t.length?t.map(function(t){return i.selectorTarget(t,e)}):null},A.selectorLegend=function(t){return"."+o.legendItem+this.getTargetSelectorSuffix(t)},A.selectorLegends=function(t){var e=this;return t&&t.length?t.map(function(t){return e.selectorLegend(t)}):null},A.getClipPath=function(t){return"url("+(window.navigator.appVersion.toLowerCase().indexOf("msie 9.")>=0?"":document.URL.split("#")[0])+"#"+t+")"},A.appendClip=function(t,e){return t.append("clipPath").attr("id",e).append("rect")},A.getAxisClipX=function(t){var e=Math.max(30,this.margin.left);return t?-(1+e):-(e-1)},A.getAxisClipY=function(t){return t?-20:-this.margin.top},A.getXAxisClipX=function(){var t=this;return t.getAxisClipX(!t.config.axis_rotated)},A.getXAxisClipY=function(){var t=this;return t.getAxisClipY(!t.config.axis_rotated)},A.getYAxisClipX=function(){var t=this;return t.config.axis_y_inner?-1:t.getAxisClipX(t.config.axis_rotated)},A.getYAxisClipY=function(){var t=this;return t.getAxisClipY(t.config.axis_rotated)},A.getAxisClipWidth=function(t){var e=this,i=Math.max(30,e.margin.left),n=Math.max(30,e.margin.right);return t?e.width+2+i+n:e.margin.left+20},A.getAxisClipHeight=function(t){return(t?this.margin.bottom:this.margin.top+this.height)+20},A.getXAxisClipWidth=function(){var t=this;return t.getAxisClipWidth(!t.config.axis_rotated)},A.getXAxisClipHeight=function(){var t=this;return t.getAxisClipHeight(!t.config.axis_rotated)},A.getYAxisClipWidth=function(){var t=this;return t.getAxisClipWidth(t.config.axis_rotated)+(t.config.axis_y_inner?20:0)},A.getYAxisClipHeight=function(){var t=this;return t.getAxisClipHeight(t.config.axis_rotated)},A.generateColor=function(){var t=this,e=t.config,i=t.d3,n=e.data_colors,a=y(e.color_pattern)?e.color_pattern:i.scale.category10().range(),r=e.data_color,o=[];return function(t){var e,i=t.id||t.data&&t.data.id||t;return n[i]instanceof Function?e=n[i](t):n[i]?e=n[i]:(o.indexOf(i)<0&&o.push(i),e=a[o.indexOf(i)%a.length],n[i]=e),r instanceof Function?r(e,t):e}},A.generateLevelColor=function(){var t=this.config,e=t.color_pattern,i=t.color_threshold,n="value"===i.unit,a=i.values&&i.values.length?i.values:[],r=i.max||100;return y(t.color_threshold)?function(t){var i,o=e[e.length-1];for(i=0;i=0?n.data.xs[i]=(e&&n.data.xs[i]?n.data.xs[i]:[]).concat(t.map(function(t){return t[r]}).filter(c).map(function(t,e){return n.generateTargetX(t,i,e)})):a.data_x?n.data.xs[i]=n.getOtherTargetXs():y(a.data_xs)&&(n.data.xs[i]=n.getXValuesOfXKey(r,n.data.targets)):n.data.xs[i]=t.map(function(t,e){return e})}),r.forEach(function(t){if(!n.data.xs[t])throw new Error('x is not defined for id = "'+t+'".')}),(i=r.map(function(e,i){var r=a.data_idConverter(e);return{id:r,id_org:e,values:t.map(function(t,o){var s,c=t[n.getXKey(e)],d=null===t[e]||isNaN(t[e])?null:+t[e];return n.isCustomX()&&n.isCategorized()&&void 0!==c?(0===i&&0===o&&(a.axis_x_categories=[]),-1===(s=a.axis_x_categories.indexOf(c))&&(s=a.axis_x_categories.length,a.axis_x_categories.push(c))):s=n.generateTargetX(c,e,o),(void 0===t[e]||n.data.xs[e].length<=o)&&(s=void 0),{x:s,value:d,id:r}}).filter(function(t){return g(t.x)})}})).forEach(function(t){var e;a.data_xSort&&(t.values=t.values.sort(function(t,e){return(t.x||0===t.x?t.x:1/0)-(e.x||0===e.x?e.x:1/0)})),e=0,t.values.forEach(function(t){t.index=e++}),n.data.xs[t.id].sort(function(t,e){return t-e})}),n.hasNegativeValue=n.hasNegativeValueInTargets(i),n.hasPositiveValue=n.hasPositiveValueInTargets(i),a.data_type&&n.setTargetType(n.mapToIds(i).filter(function(t){return!(t in a.data_types)}),a.data_type),i.forEach(function(t){n.addCache(t.id_org,t)}),i},A.isX=function(t){var e=this.config;return e.data_x&&t===e.data_x||y(e.data_xs)&&S(e.data_xs,t)},A.isNotX=function(t){return!this.isX(t)},A.getXKey=function(t){var e=this.config;return e.data_x?e.data_x:y(e.data_xs)?e.data_xs[t]:null},A.getXValuesOfXKey=function(t,e){var i,n=this;return(e&&y(e)?n.mapToIds(e):[]).forEach(function(e){n.getXKey(e)===t&&(i=n.data.xs[e])}),i},A.getIndexByX=function(t){var e=this,i=e.filterByX(e.data.targets,t);return i.length?i[0].index:null},A.getXValue=function(t,e){var i=this;return t in i.data.xs&&i.data.xs[t]&&c(i.data.xs[t][e])?i.data.xs[t][e]:e},A.getOtherTargetXs=function(){var t=this,e=Object.keys(t.data.xs);return e.length?t.data.xs[e[0]]:null},A.getOtherTargetX=function(t){var e=this.getOtherTargetXs();return e&&t1},A.isMultipleX=function(){return y(this.config.data_xs)||!this.config.data_xSort||this.hasType("scatter")},A.addName=function(t){var e,i=this;return t&&(e=i.config.data_names[t.id],t.name=void 0!==e?e:t.id),t},A.getValueOnIndex=function(t,e){var i=t.filter(function(t){return t.index===e});return i.length?i[0]:null},A.updateTargetX=function(t,e){var i=this;t.forEach(function(t){t.values.forEach(function(n,a){n.x=i.generateTargetX(e[a],t.id,a)}),i.data.xs[t.id]=e})},A.updateTargetXs=function(t,e){var i=this;t.forEach(function(t){e[t.id]&&i.updateTargetX([t],e[t.id])})},A.generateTargetX=function(t,e,i){var n=this;return n.isTimeSeries()?t?n.parseDate(t):n.parseDate(n.getXValue(e,i)):n.isCustomX()&&!n.isCategorized()?c(t)?+t:n.getXValue(e,i):i},A.cloneTarget=function(t){return{id:t.id,id_org:t.id_org,values:t.values.map(function(t){return{x:t.x,value:t.value,id:t.id}})}},A.updateXs=function(){var t=this;t.data.targets.length&&(t.xs=[],t.data.targets[0].values.forEach(function(e){t.xs[e.index]=e.x}))},A.getPrevX=function(t){var e=this.xs[t-1];return void 0!==e?e:null},A.getNextX=function(t){var e=this.xs[t+1];return void 0!==e?e:null},A.getMaxDataCount=function(){var t=this;return t.d3.max(t.data.targets,function(t){return t.values.length})},A.getMaxDataCountTarget=function(t){var e,i=t.length,n=0;return i>1?t.forEach(function(t){t.values.length>n&&(e=t,n=t.values.length)}):e=i?t[0]:null,e},A.getEdgeX=function(t){var e=this;return t.length?[e.d3.min(t,function(t){return t.values[0].x}),e.d3.max(t,function(t){return t.values[t.values.length-1].x})]:[0,0]},A.mapToIds=function(t){return t.map(function(t){return t.id})},A.mapToTargetIds=function(t){var e=this;return t?[].concat(t):e.mapToIds(e.data.targets)},A.hasTarget=function(t,e){var i,n=this.mapToIds(t);for(i=0;ie?1:t>=e?0:NaN})},A.addHiddenTargetIds=function(t){t=t instanceof Array?t:new Array(t);for(var e=0;e0})},A.isOrderDesc=function(){var t=this.config;return"string"==typeof t.data_order&&"desc"===t.data_order.toLowerCase()},A.isOrderAsc=function(){var t=this.config;return"string"==typeof t.data_order&&"asc"===t.data_order.toLowerCase()},A.orderTargets=function(t){var e=this,i=e.config,n=e.isOrderAsc(),a=e.isOrderDesc();return n||a?t.sort(function(t,e){var i=function(t,e){return t+Math.abs(e.value)},a=t.values.reduce(i,0),r=e.values.reduce(i,0);return n?r-a:a-r}):d(i.data_order)?t.sort(i.data_order):l(i.data_order)&&t.sort(function(t,e){return i.data_order.indexOf(t.id)-i.data_order.indexOf(e.id)}),t},A.filterByX=function(t,e){return this.d3.merge(t.map(function(t){return t.values})).filter(function(t){return t.x-e==0})},A.filterRemoveNull=function(t){return t.filter(function(t){return c(t.value)})},A.filterByXDomain=function(t,e){return t.map(function(t){return{id:t.id,id_org:t.id_org,values:t.values.filter(function(t){return e[0]<=t.x&&t.x<=e[1]})}})},A.hasDataLabel=function(){var t=this.config;return!("boolean"!=typeof t.data_labels||!t.data_labels)||!("object"!==s(t.data_labels)||!y(t.data_labels))},A.getDataLabelLength=function(t,e,i){var n=this,a=[0,0];return n.selectChart.select("svg").selectAll(".dummy").data([t,e]).enter().append("text").text(function(t){return n.dataLabelFormat(t.id)(t)}).each(function(t,e){a[e]=1.3*this.getBoundingClientRect()[i]}).remove(),a},A.isNoneArc=function(t){return this.hasTarget(this.data.targets,t.id)},A.isArc=function(t){return"data"in t&&this.hasTarget(this.data.targets,t.data.id)},A.findSameXOfValues=function(t,e){var i,n=t[e].x,a=[];for(i=e-1;i>=0&&n===t[i].x;i--)a.push(t[i]);for(i=e;i0)for(o=s.hasNegativeValueInTargets(t),e=0;e=0})).length)for(n=a[0],o&&l[n]&&l[n].forEach(function(t,e){l[n][e]=t<0?t:0}),i=1;i0||(l[n][e]+=+t)});return s.d3.min(Object.keys(l).map(function(t){return s.d3.min(l[t])}))},A.getYDomainMax=function(t){var e,i,n,a,r,o,s=this,c=s.config,d=s.mapToIds(t),l=s.getValuesAsIdKeyed(t);if(c.data_groups.length>0)for(o=s.hasPositiveValueInTargets(t),e=0;e=0})).length)for(n=a[0],o&&l[n]&&l[n].forEach(function(t,e){l[n][e]=t>0?t:0}),i=1;i=0&&b>=0,g=v<=0&&b<=0,(c(S)&&h||c(w)&&g)&&(T=!1),T&&(h&&(v=0),g&&(b=0)),a=Math.abs(b-v),r=o=.1*a,void 0!==A&&(b=A+(s=Math.max(Math.abs(v),Math.abs(b))),v=A-s),L?(d=p.getDataLabelLength(v,b,"width"),l=_(p.y.range()),r+=a*((u=[d[0]/l,d[1]/l])[1]/(1-u[0]-u[1])),o+=a*(u[0]/(1-u[0]-u[1]))):C&&(d=p.getDataLabelLength(v,b,"height"),r+=p.axis.convertPixelsToAxisPadding(d[1],a),o+=p.axis.convertPixelsToAxisPadding(d[0],a)),"y"===e&&y(f.axis_y_padding)&&(r=p.axis.getPadding(f.axis_y_padding,"top",r,a),o=p.axis.getPadding(f.axis_y_padding,"bottom",o,a)),"y2"===e&&y(f.axis_y2_padding)&&(r=p.axis.getPadding(f.axis_y2_padding,"top",r,a),o=p.axis.getPadding(f.axis_y2_padding,"bottom",o,a)),T&&(h&&(o=v),g&&(r=-b)),n=[v-o,b+r],P?n.reverse():n)},A.getXDomainMin=function(t){var e=this,i=e.config;return void 0!==i.axis_x_min?e.isTimeSeries()?this.parseDate(i.axis_x_min):i.axis_x_min:e.d3.min(t,function(t){return e.d3.min(t.values,function(t){return t.x})})},A.getXDomainMax=function(t){var e=this,i=e.config;return void 0!==i.axis_x_max?e.isTimeSeries()?this.parseDate(i.axis_x_max):i.axis_x_max:e.d3.max(t,function(t){return e.d3.max(t.values,function(t){return t.x})})},A.getXDomainPadding=function(t){var e,i,n,a,r=this,o=r.config,d=t[1]-t[0];return i=r.isCategorized()?0:r.hasType("bar")?(e=r.getMaxDataCount())>1?d/(e-1)/2:.5:.01*d,"object"===s(o.axis_x_padding)&&y(o.axis_x_padding)?(n=c(o.axis_x_padding.left)?o.axis_x_padding.left:i,a=c(o.axis_x_padding.right)?o.axis_x_padding.right:i):n=a="number"==typeof o.axis_x_padding?o.axis_x_padding:i,{left:n,right:a}},A.getXDomain=function(t){var e=this,i=[e.getXDomainMin(t),e.getXDomainMax(t)],n=i[0],a=i[1],r=e.getXDomainPadding(i),o=0,s=0;return n-a!=0||e.isCategorized()||(e.isTimeSeries()?(n=new Date(.5*n.getTime()),a=new Date(1.5*a.getTime())):(n=0===n?1:.5*n,a=0===a?-1:1.5*a)),(n||0===n)&&(o=e.isTimeSeries()?new Date(n.getTime()-r.left):n-r.left),(a||0===a)&&(s=e.isTimeSeries()?new Date(a.getTime()+r.right):a+r.right),[o,s]},A.updateXDomain=function(t,e,i,n,a){var r=this,o=r.config;return i&&(r.x.domain(a||r.d3.extent(r.getXDomain(t))),r.orgXDomain=r.x.domain(),o.zoom_enabled&&r.zoom.scale(r.x).updateScaleExtent(),r.subX.domain(r.x.domain()),r.brush&&r.brush.scale(r.subX)),e&&(r.x.domain(a||(!r.brush||r.brush.empty()?r.orgXDomain:r.brush.extent())),o.zoom_enabled&&r.zoom.scale(r.x).updateScaleExtent()),n&&r.x.domain(r.trimXDomain(r.x.orgDomain())),r.x.domain()},A.trimXDomain=function(t){var e=this.getZoomDomain(),i=e[0],n=e[1];return t[0]<=i&&(t[1]=+t[1]+(i-t[0]),t[0]=i),n<=t[1]&&(t[0]=+t[0]-(t[1]-n),t[1]=n),t},A.drag=function(t){var e,i,n,a,r,s,c,d,l=this,u=l.config,h=l.main,g=l.d3;l.hasArcType()||u.data_selection_enabled&&(u.zoom_enabled&&!l.zoom.altDomain||u.data_selection_multiple&&(e=l.dragStart[0],i=l.dragStart[1],n=t[0],a=t[1],r=Math.min(e,n),s=Math.max(e,n),c=u.data_selection_grouped?l.margin.top:Math.min(i,a),d=u.data_selection_grouped?l.height:Math.max(i,a),h.select("."+o.dragarea).attr("x",r).attr("y",c).attr("width",s-r).attr("height",d-c),h.selectAll("."+o.shapes).selectAll("."+o.shape).filter(function(t){return u.data_selection_isselectable(t)}).each(function(t,e){var i,n,a,u,h,p,f=g.select(this),_=f.classed(o.SELECTED),x=f.classed(o.INCLUDED),y=!1;if(f.classed(o.circle))i=1*f.attr("cx"),n=1*f.attr("cy"),h=l.togglePoint,y=rd&&(c=c.filter(function(t){return(""+t).indexOf(".")<0}));return c},A.getGridFilterToRemove=function(t){return t?function(e){var i=!1;return[].concat(t).forEach(function(t){("value"in t&&e.value===t.value||"class"in t&&e.class===t.class)&&(i=!0)}),i}:function(){return!0}},A.removeGridLines=function(t,e){var i=this,n=i.config,a=i.getGridFilterToRemove(t),r=function(t){return!a(t)},s=e?o.xgridLines:o.ygridLines,c=e?o.xgridLine:o.ygridLine;i.main.select("."+s).selectAll("."+c).filter(a).transition().duration(n.transition_duration).style("opacity",0).remove(),e?n.grid_x_lines=n.grid_x_lines.filter(r):n.grid_y_lines=n.grid_y_lines.filter(r)},A.initEventRect=function(){this.main.select("."+o.chart).append("g").attr("class",o.eventRects).style("fill-opacity",0)},A.redrawEventRect=function(){var t,e,i=this,n=i.config,a=i.isMultipleX(),r=i.main.select("."+o.eventRects).style("cursor",n.zoom_enabled?n.axis_rotated?"ns-resize":"ew-resize":null).classed(o.eventRectsMultiple,a).classed(o.eventRectsSingle,!a);r.selectAll("."+o.eventRect).remove(),i.eventRect=r.selectAll("."+o.eventRect),a?(t=i.eventRect.data([0]),i.generateEventRectsForMultipleXs(t.enter()),i.updateEventRect(t)):(e=i.getMaxDataCountTarget(i.data.targets),r.datum(e?e.values:[]),i.eventRect=r.selectAll("."+o.eventRect),t=i.eventRect.data(function(t){return t}),i.generateEventRectsForSingleX(t.enter()),i.updateEventRect(t),t.exit().remove())},A.updateEventRect=function(t){var e,i,n,a,r,o,s=this,c=s.config;t=t||s.eventRect.data(function(t){return t}),s.isMultipleX()?(e=0,i=0,n=s.width,a=s.height):(!s.isCustomX()&&!s.isTimeSeries()||s.isCategorized()?(r=s.getEventRectWidth(),o=function(t){return s.x(t.x)-r/2}):(s.updateXs(),r=function(t){var e=s.getPrevX(t.index),i=s.getNextX(t.index);return null===e&&null===i?c.axis_rotated?s.height:s.width:(null===e&&(e=s.x.domain()[0]),null===i&&(i=s.x.domain()[1]),Math.max(0,(s.x(i)-s.x(e))/2))},o=function(t){var e=s.getPrevX(t.index),i=s.getNextX(t.index),n=s.data.xs[t.id][t.index];return null===e&&null===i?0:(null===e&&(e=s.x.domain()[0]),(s.x(n)+s.x(e))/2)}),e=c.axis_rotated?0:o,i=c.axis_rotated?o:0,n=c.axis_rotated?s.width:r,a=c.axis_rotated?r:s.height),t.attr("class",s.classEvent.bind(s)).attr("x",e).attr("y",i).attr("width",n).attr("height",a)},A.generateEventRectsForSingleX=function(t){var e=this,i=e.d3,n=e.config;t.append("rect").attr("class",e.classEvent.bind(e)).style("cursor",n.data_selection_enabled&&n.data_selection_grouped?"pointer":null).on("mouseover",function(t){var i=t.index;e.dragging||e.flowing||e.hasArcType()||(n.point_focus_expand_enabled&&e.expandCircles(i,null,!0),e.expandBars(i,null,!0),e.main.selectAll("."+o.shape+"-"+i).each(function(t){n.data_onmouseover.call(e.api,t)}))}).on("mouseout",function(t){var i=t.index;e.config&&(e.hasArcType()||(e.hideXGridFocus(),e.hideTooltip(),e.unexpandCircles(),e.unexpandBars(),e.main.selectAll("."+o.shape+"-"+i).each(function(t){n.data_onmouseout.call(e.api,t)})))}).on("mousemove",function(t){var a,r=t.index,s=e.svg.select("."+o.eventRect+"-"+r);e.dragging||e.flowing||e.hasArcType()||(e.isStepType(t)&&"step-after"===e.config.line_step_type&&i.mouse(this)[0]=0}).classed(o.legendItemFocused,e).transition().duration(100).style("opacity",function(){return(e?i.opacityForLegend:i.opacityForUnfocusedLegend).call(i,i.d3.select(this))})},A.revertLegend=function(){var t=this,e=t.d3;t.legend.selectAll("."+o.legendItem).classed(o.legendItemFocused,!1).transition().duration(100).style("opacity",function(){return t.opacityForLegend(e.select(this))})},A.showLegend=function(t){var e=this,i=e.config;i.legend_show||(i.legend_show=!0,e.legend.style("visibility","visible"),e.legendHasRendered||e.updateLegendWithDefaults()),e.removeHiddenLegendIds(t),e.legend.selectAll(e.selectorLegends(t)).style("visibility","visible").transition().style("opacity",function(){return e.opacityForLegend(e.d3.select(this))})},A.hideLegend=function(t){var e=this,i=e.config;i.legend_show&&x(t)&&(i.legend_show=!1,e.legend.style("visibility","hidden")),e.addHiddenLegendIds(t),e.legend.selectAll(e.selectorLegends(t)).style("opacity",0).style("visibility","hidden")},A.clearLegendItemTextBoxCache=function(){this.legendItemTextBox={}},A.updateLegend=function(t,e,i){function n(t,e){return b.legendItemTextBox[e]||(b.legendItemTextBox[e]=b.getTextRect(t.textContent,o.legendItem,t)),b.legendItemTextBox[e]}function a(e,i,a){function r(t,e){e||(o=(p-E-g)/2)=L)&&(L=u),(!C||h>=C)&&(C=h),s=b.isLegendRight||b.isLegendInset?C:L,A.legend_equally?(Object.keys(O).forEach(function(t){O[t]=L}),Object.keys(R).forEach(function(t){R[t]=C}),(o=(p-s*t.length)/2)0&&0===v.size()&&(v=b.legend.insert("g","."+o.legendItem).attr("class",o.legendBackground).append("rect")),y=b.legend.selectAll("text").data(t).text(function(t){return void 0!==A.data_names[t]?A.data_names[t]:t}).each(function(t,e){a(this,t,e)}),(_?y.transition():y).attr("x",s).attr("y",l),S=b.legend.selectAll("rect."+o.legendItemEvent).data(t),(_?S.transition():S).attr("width",function(t){return O[t]}).attr("height",function(t){return R[t]}).attr("x",c).attr("y",u),w=b.legend.selectAll("line."+o.legendItemTile).data(t),(_?w.transition():w).style("stroke",b.color).attr("x1",h).attr("y1",p).attr("x2",g).attr("y2",p),v&&(_?v.transition():v).attr("height",b.getLegendHeight()-12).attr("width",L*(X+1)+10),b.legend.selectAll("."+o.legendItem).classed(o.legendItemHidden,function(t){return!b.isTargetToShow(t)}),b.updateLegendItemWidth(L),b.updateLegendItemHeight(C),b.updateLegendStep(X),b.updateSizes(),b.updateScales(),b.updateSvgSize(),b.transformAll(x,i),b.legendHasRendered=!0},A.initRegion=function(){var t=this;t.region=t.main.append("g").attr("clip-path",t.clipPath).attr("class",o.regions)},A.updateRegion=function(t){var e=this,i=e.config;e.region.style("visibility",e.hasArcType()?"hidden":"visible"),e.mainRegion=e.main.select("."+o.regions).selectAll("."+o.region).data(i.regions),e.mainRegion.enter().append("g").append("rect").style("fill-opacity",0),e.mainRegion.attr("class",e.classRegion.bind(e)),e.mainRegion.exit().transition().duration(t).style("opacity",0).remove()},A.redrawRegion=function(t){var e=this,i=e.mainRegion.selectAll("rect").each(function(){var t=e.d3.select(this.parentNode).datum();e.d3.select(this).datum(t)}),n=e.regionX.bind(e),a=e.regionY.bind(e),r=e.regionWidth.bind(e),o=e.regionHeight.bind(e);return[(t?i.transition():i).attr("x",n).attr("y",a).attr("width",r).attr("height",o).style("fill-opacity",function(t){return c(t.opacity)?t.opacity:.1})]},A.regionX=function(t){var e=this,i=e.config,n="y"===t.axis?e.y:e.y2;return"y"===t.axis||"y2"===t.axis?i.axis_rotated&&"start"in t?n(t.start):0:i.axis_rotated?0:"start"in t?e.x(e.isTimeSeries()?e.parseDate(t.start):t.start):0},A.regionY=function(t){var e=this,i=e.config,n="y"===t.axis?e.y:e.y2;return"y"===t.axis||"y2"===t.axis?i.axis_rotated?0:"end"in t?n(t.end):0:i.axis_rotated&&"start"in t?e.x(e.isTimeSeries()?e.parseDate(t.start):t.start):0},A.regionWidth=function(t){var e,i=this,n=i.config,a=i.regionX(t),r="y"===t.axis?i.y:i.y2;return e="y"===t.axis||"y2"===t.axis?n.axis_rotated&&"end"in t?r(t.end):i.width:n.axis_rotated?i.width:"end"in t?i.x(i.isTimeSeries()?i.parseDate(t.end):t.end):i.width,ei.bar_width_max?i.bar_width_max:n},A.getBars=function(t,e){var i=this;return(e?i.main.selectAll("."+o.bars+i.getTargetSelectorSuffix(e)):i.main).selectAll("."+o.bar+(c(t)?"-"+t:""))},A.expandBars=function(t,e,i){var n=this;i&&n.unexpandBars(),n.getBars(t,e).classed(o.EXPANDED,!0)},A.unexpandBars=function(t){this.getBars(t).classed(o.EXPANDED,!1)},A.generateDrawBar=function(t,e){var i=this,n=i.config,a=i.generateGetBarPoints(t,e);return function(t,e){var i=a(t,e),r=n.axis_rotated?1:0,o=n.axis_rotated?0:1;return"M "+i[0][r]+","+i[0][o]+" L"+i[1][r]+","+i[1][o]+" L"+i[2][r]+","+i[2][o]+" L"+i[3][r]+","+i[3][o]+" z"}},A.generateGetBarPoints=function(t,e){var i=this,n=e?i.subXAxis:i.xAxis,a=t.__max__+1,r=i.getBarW(n,a),o=i.getShapeX(r,a,t,!!e),s=i.getShapeY(!!e),c=i.getShapeOffset(i.isBarType,t,!!e),d=r*(i.config.bar_space/2),l=e?i.getSubYScale:i.getYScale;return function(t,e){var n=l.call(i,t.id)(0),a=c(t,e)||n,u=o(t),h=s(t);return i.config.axis_rotated&&(0=0&&(d+=s(a[o].value)-c))}),d}},A.isWithinShape=function(t,e){var i,n=this,a=n.d3.select(t);return n.isTargetToShow(e.id)?"circle"===t.nodeName?i=n.isStepType(e)?n.isWithinStep(t,n.getYScale(e.id)(e.value)):n.isWithinCircle(t,1.5*n.pointSelectR(e)):"path"===t.nodeName&&(i=!a.classed(o.bar)||n.isWithinBar(t)):i=!1,i},A.getInterpolate=function(t){var e=this,i=e.isInterpolationType(e.config.spline_interpolation_type)?e.config.spline_interpolation_type:"cardinal";return e.isSplineType(t)?i:e.isStepType(t)?e.config.line_step_type:"linear"},A.initLine=function(){this.main.select("."+o.chart).append("g").attr("class",o.chartLines)},A.updateTargetsForLine=function(t){var e,i=this,n=i.config,a=i.classChartLine.bind(i),r=i.classLines.bind(i),s=i.classAreas.bind(i),c=i.classCircles.bind(i),d=i.classFocus.bind(i);(e=i.main.select("."+o.chartLines).selectAll("."+o.chartLine).data(t).attr("class",function(t){return a(t)+d(t)}).enter().append("g").attr("class",a).style("opacity",0).style("pointer-events","none")).append("g").attr("class",r),e.append("g").attr("class",s),e.append("g").attr("class",function(t){return i.generateClass(o.selectedCircles,t.id)}),e.append("g").attr("class",c).style("cursor",function(t){return n.data_selection_isselectable(t)?"pointer":null}),t.forEach(function(t){i.main.selectAll("."+o.selectedCircles+i.getTargetSelectorSuffix(t.id)).selectAll("."+o.selectedCircle).each(function(e){e.value=t.values[e.index].value})})},A.updateLine=function(t){var e=this;e.mainLine=e.main.selectAll("."+o.lines).selectAll("."+o.line).data(e.lineData.bind(e)),e.mainLine.enter().append("path").attr("class",e.classLine.bind(e)).style("stroke",e.color),e.mainLine.style("opacity",e.initialOpacity.bind(e)).style("shape-rendering",function(t){return e.isStepType(t)?"crispEdges":""}).attr("transform",null),e.mainLine.exit().transition().duration(t).style("opacity",0).remove()},A.redrawLine=function(t,e){return[(e?this.mainLine.transition(Math.random().toString()):this.mainLine).attr("d",t).style("stroke",this.color).style("opacity",1)]},A.generateDrawLine=function(t,e){var i=this,n=i.config,a=i.d3.svg.line(),r=i.generateGetLinePoints(t,e),o=e?i.getSubYScale:i.getYScale,s=function(t){return(e?i.subxx:i.xx).call(i,t)},c=function(t,e){return n.data_groups.length>0?r(t,e)[0][1]:o.call(i,t.id)(t.value)};return a=n.axis_rotated?a.x(c).y(s):a.x(s).y(c),n.line_connectNull||(a=a.defined(function(t){return null!=t.value})),function(t){var r,s=n.line_connectNull?i.filterRemoveNull(t.values):t.values,c=e?i.x:i.subX,d=o.call(i,t.id),l=0,u=0;return i.isLineType(t)?n.data_regions[t.id]?r=i.lineWithRegions(s,c,d,n.data_regions[t.id]):(i.isStepType(t)&&(s=i.convertValuesToStep(s)),r=a.interpolate(i.getInterpolate(t))(s)):(s[0]&&(l=c(s[0].x),u=d(s[0].value)),r=n.axis_rotated?"M "+u+" "+l:"M "+l+" "+u),r||"M 0 0"}},A.generateGetLinePoints=function(t,e){var i=this,n=i.config,a=t.__max__+1,r=i.getShapeX(0,a,t,!!e),o=i.getShapeY(!!e),s=i.getShapeOffset(i.isLineType,t,!!e),c=e?i.getSubYScale:i.getYScale;return function(t,e){var a=c.call(i,t.id)(0),d=s(t,e)||a,l=r(t),u=o(t);return n.axis_rotated&&(00?r(t,e)[0][1]:o.call(i,t.id)(i.getAreaBaseValue(t.id))},d=function(t,e){return n.data_groups.length>0?r(t,e)[1][1]:o.call(i,t.id)(t.value)};return a=n.axis_rotated?a.x0(c).x1(d).y(s):a.x(s).y0(n.area_above?0:c).y1(d),n.line_connectNull||(a=a.defined(function(t){return null!==t.value})),function(t){var e,r=n.line_connectNull?i.filterRemoveNull(t.values):t.values,o=0,s=0;return i.isAreaType(t)?(i.isStepType(t)&&(r=i.convertValuesToStep(r)),e=a.interpolate(i.getInterpolate(t))(r)):(r[0]&&(o=i.x(r[0].x),s=i.getYScale(t.id)(r[0].value)),e=n.axis_rotated?"M "+s+" "+o:"M "+o+" "+s),e||"M 0 0"}},A.getAreaBaseValue=function(){return 0},A.generateGetAreaPoints=function(t,e){var i=this,n=i.config,a=t.__max__+1,r=i.getShapeX(0,a,t,!!e),o=i.getShapeY(!!e),s=i.getShapeOffset(i.isAreaType,t,!!e),c=e?i.getSubYScale:i.getYScale;return function(t,e){var a=c.call(i,t.id)(0),d=s(t,e)||a,l=r(t),u=o(t);return n.axis_rotated&&(00?(t=i.getShapeIndices(i.isLineType),e=i.generateGetLinePoints(t),i.circleY=function(t,i){return e(t,i)[0][1]}):i.circleY=function(t){return i.getYScale(t.id)(t.value)}},A.getCircles=function(t,e){var i=this;return(e?i.main.selectAll("."+o.circles+i.getTargetSelectorSuffix(e)):i.main).selectAll("."+o.circle+(c(t)?"-"+t:""))},A.expandCircles=function(t,e,i){var n=this,a=n.pointExpandedR.bind(n);i&&n.unexpandCircles(),n.getCircles(t,e).classed(o.EXPANDED,!0).attr("r",a)},A.unexpandCircles=function(t){var e=this,i=e.pointR.bind(e);e.getCircles(t).filter(function(){return e.d3.select(this).classed(o.EXPANDED)}).classed(o.EXPANDED,!1).attr("r",i)},A.pointR=function(t){var e=this,i=e.config;return e.isStepType(t)?0:d(i.point_r)?i.point_r(t):i.point_r},A.pointExpandedR=function(t){var e=this,i=e.config;return i.point_focus_expand_enabled?i.point_focus_expand_r?i.point_focus_expand_r:1.75*e.pointR(t):e.pointR(t)},A.pointSelectR=function(t){var e=this,i=e.config;return d(i.point_select_r)?i.point_select_r(t):i.point_select_r?i.point_select_r:4*e.pointR(t)},A.isWithinCircle=function(t,e){var i=this.d3,n=i.mouse(t),a=i.select(t),r=+a.attr("cx"),o=+a.attr("cy");return Math.sqrt(Math.pow(r-n[0],2)+Math.pow(o-n[1],2))0?i:320/(t.hasType("gauge")&&!e.gauge_fullCircle?2:1)},A.getCurrentPaddingTop=function(){var t=this,e=t.config,i=c(e.padding_top)?e.padding_top:0;return t.title&&t.title.node()&&(i+=t.getTitlePadding()),i},A.getCurrentPaddingBottom=function(){var t=this.config;return c(t.padding_bottom)?t.padding_bottom:0},A.getCurrentPaddingLeft=function(t){var e=this,i=e.config;return c(i.padding_left)?i.padding_left:i.axis_rotated?i.axis_x_show?Math.max(p(e.getAxisWidthByAxisId("x",t)),40):1:!i.axis_y_show||i.axis_y_inner?e.axis.getYAxisLabelPosition().isOuter?30:1:p(e.getAxisWidthByAxisId("y",t))},A.getCurrentPaddingRight=function(){var t=this,e=t.config,i=t.isLegendRight?t.getLegendWidth()+20:0;return c(e.padding_right)?e.padding_right+1:e.axis_rotated?10+i:!e.axis_y2_show||e.axis_y2_inner?2+i+(t.axis.getY2AxisLabelPosition().isOuter?20:0):p(t.getAxisWidthByAxisId("y2"))+i},A.getParentRectValue=function(t){for(var e,i=this.selectChart.node();i&&"BODY"!==i.tagName;){try{e=i.getBoundingClientRect()[t]}catch(n){"width"===t&&(e=i.offsetWidth)}if(e)break;i=i.parentNode}return e},A.getParentWidth=function(){return this.getParentRectValue("width")},A.getParentHeight=function(){var t=this.selectChart.style("height");return t.indexOf("px")>0?+t.replace("px",""):0},A.getSvgLeft=function(t){var e=this,i=e.config,n=i.axis_rotated||!i.axis_rotated&&!i.axis_y_inner,a=i.axis_rotated?o.axisX:o.axisY,r=e.main.select("."+a).node(),s=r&&n?r.getBoundingClientRect():{right:0},c=e.selectChart.node().getBoundingClientRect(),d=e.hasArcType(),l=s.right-c.left-(d?0:e.getCurrentPaddingLeft(t));return l>0?l:0},A.getAxisWidthByAxisId=function(t,e){var i=this,n=i.axis.getLabelPositionById(t);return i.axis.getMaxTickWidth(t,e)+(n.isInner?20:40)},A.getHorizontalAxisHeight=function(t){var e=this,i=e.config,n=30;return"x"!==t||i.axis_x_show?"x"===t&&i.axis_x_height?i.axis_x_height:"y"!==t||i.axis_y_show?"y2"!==t||i.axis_y2_show?("x"===t&&!i.axis_rotated&&i.axis_x_tick_rotate&&(n=30+e.axis.getMaxTickWidth(t)*Math.cos(Math.PI*(90-i.axis_x_tick_rotate)/180)),"y"===t&&i.axis_rotated&&i.axis_y_tick_rotate&&(n=30+e.axis.getMaxTickWidth(t)*Math.cos(Math.PI*(90-i.axis_y_tick_rotate)/180)),n+(e.axis.getLabelPositionById(t).isInner?0:10)+("y2"===t?-10:0)):e.rotated_padding_top:!i.legend_show||e.isLegendRight||e.isLegendInset?1:10:8},A.getEventRectWidth=function(){return Math.max(0,this.xAxis.tickInterval())},A.initBrush=function(){var t=this,e=t.d3;t.brush=e.svg.brush().on("brush",function(){t.redrawForBrush()}),t.brush.update=function(){return t.context&&t.context.select("."+o.brush).call(this),this},t.brush.scale=function(e){return t.config.axis_rotated?this.y(e):this.x(e)}},A.initSubchart=function(){var t=this,e=t.config,i=t.context=t.svg.append("g").attr("transform",t.getTranslate("context")),n=e.subchart_show?"visible":"hidden";i.style("visibility",n),i.append("g").attr("clip-path",t.clipPathForSubchart).attr("class",o.chart),i.select("."+o.chart).append("g").attr("class",o.chartBars),i.select("."+o.chart).append("g").attr("class",o.chartLines),i.append("g").attr("clip-path",t.clipPath).attr("class",o.brush).call(t.brush),t.axes.subx=i.append("g").attr("class",o.axisX).attr("transform",t.getTranslate("subx")).attr("clip-path",e.axis_rotated?"":t.clipPathForXAxis).style("visibility",e.subchart_axis_x_show?n:"hidden")},A.updateTargetsForSubchart=function(t){var e,i=this,n=i.context,a=i.config,r=i.classChartBar.bind(i),s=i.classBars.bind(i),c=i.classChartLine.bind(i),d=i.classLines.bind(i),l=i.classAreas.bind(i);a.subchart_show&&(n.select("."+o.chartBars).selectAll("."+o.chartBar).data(t).attr("class",r).enter().append("g").style("opacity",0).attr("class",r).append("g").attr("class",s),(e=n.select("."+o.chartLines).selectAll("."+o.chartLine).data(t).attr("class",c).enter().append("g").style("opacity",0).attr("class",c)).append("g").attr("class",d),e.append("g").attr("class",l),n.selectAll("."+o.brush+" rect").attr(a.axis_rotated?"width":"height",a.axis_rotated?i.width2:i.height2))},A.updateBarForSubchart=function(t){var e=this;e.contextBar=e.context.selectAll("."+o.bars).selectAll("."+o.bar).data(e.barData.bind(e)),e.contextBar.enter().append("path").attr("class",e.classBar.bind(e)).style("stroke","none").style("fill",e.color),e.contextBar.style("opacity",e.initialOpacity.bind(e)),e.contextBar.exit().transition().duration(t).style("opacity",0).remove()},A.redrawBarForSubchart=function(t,e,i){(e?this.contextBar.transition(Math.random().toString()).duration(i):this.contextBar).attr("d",t).style("opacity",1)},A.updateLineForSubchart=function(t){var e=this;e.contextLine=e.context.selectAll("."+o.lines).selectAll("."+o.line).data(e.lineData.bind(e)),e.contextLine.enter().append("path").attr("class",e.classLine.bind(e)).style("stroke",e.color),e.contextLine.style("opacity",e.initialOpacity.bind(e)),e.contextLine.exit().transition().duration(t).style("opacity",0).remove()},A.redrawLineForSubchart=function(t,e,i){(e?this.contextLine.transition(Math.random().toString()).duration(i):this.contextLine).attr("d",t).style("opacity",1)},A.updateAreaForSubchart=function(t){var e=this,i=e.d3;e.contextArea=e.context.selectAll("."+o.areas).selectAll("."+o.area).data(e.lineData.bind(e)),e.contextArea.enter().append("path").attr("class",e.classArea.bind(e)).style("fill",e.color).style("opacity",function(){return e.orgAreaOpacity=+i.select(this).style("opacity"),0}),e.contextArea.style("opacity",0),e.contextArea.exit().transition().duration(t).style("opacity",0).remove()},A.redrawAreaForSubchart=function(t,e,i){(e?this.contextArea.transition(Math.random().toString()).duration(i):this.contextArea).attr("d",t).style("fill",this.color).style("opacity",this.orgAreaOpacity)},A.redrawSubchart=function(t,e,i,n,a,r,o){var s,c,d,l=this,u=l.d3,h=l.config;l.context.style("visibility",h.subchart_show?"visible":"hidden"),h.subchart_show&&(u.event&&"zoom"===u.event.type&&l.brush.extent(l.x.orgDomain()).update(),t&&(l.brush.empty()||l.brush.extent(l.x.orgDomain()).update(),s=l.generateDrawArea(a,!0),c=l.generateDrawBar(r,!0),d=l.generateDrawLine(o,!0),l.updateBarForSubchart(i),l.updateLineForSubchart(i),l.updateAreaForSubchart(i),l.redrawBarForSubchart(c,i,i),l.redrawLineForSubchart(d,i,i),l.redrawAreaForSubchart(s,i,i)))},A.redrawForBrush=function(){var t=this,e=t.x;t.redraw({withTransition:!1,withY:t.config.zoom_rescale,withSubchart:!1,withUpdateXDomain:!0,withDimension:!1}),t.config.subchart_onbrush.call(t.api,e.orgDomain())},A.transformContext=function(t,e){var i,n=this;e&&e.axisSubX?i=e.axisSubX:(i=n.context.select("."+o.axisX),t&&(i=i.transition())),n.context.attr("transform",n.getTranslate("context")),i.attr("transform",n.getTranslate("subx"))},A.getDefaultExtent=function(){var t=this,e=t.config,i=d(e.axis_x_extent)?e.axis_x_extent(t.getXDomain(t.data.targets)):e.axis_x_extent;return t.isTimeSeries()&&(i=[t.parseDate(i[0]),t.parseDate(i[1])]),i},A.initText=function(){var t=this;t.main.select("."+o.chart).append("g").attr("class",o.chartTexts),t.mainText=t.d3.selectAll([])},A.updateTargetsForText=function(t){var e=this,i=e.classChartText.bind(e),n=e.classTexts.bind(e),a=e.classFocus.bind(e);e.main.select("."+o.chartTexts).selectAll("."+o.chartText).data(t).attr("class",function(t){return i(t)+a(t)}).enter().append("g").attr("class",i).style("opacity",0).style("pointer-events","none").append("g").attr("class",n)},A.updateText=function(t){var e=this,i=e.config,n=e.barOrLineData.bind(e),a=e.classText.bind(e);e.mainText=e.main.selectAll("."+o.texts).selectAll("."+o.text).data(n),e.mainText.enter().append("text").attr("class",a).attr("text-anchor",function(t){return i.axis_rotated?t.value<0?"end":"start":"middle"}).style("stroke","none").style("fill",function(t){return e.color(t)}).style("fill-opacity",0),e.mainText.text(function(t,i,n){return e.dataLabelFormat(t.id)(t.value,t.id,i,n)}),e.mainText.exit().transition().duration(t).style("fill-opacity",0).remove()},A.redrawText=function(t,e,i,n){return[(n?this.mainText.transition():this.mainText).attr("x",t).attr("y",e).style("fill",this.color).style("fill-opacity",i?0:this.opacityForText.bind(this))]},A.getTextRect=function(t,e,i){var n,a=this.d3.select("body").append("div").classed("c3",!0),r=a.append("svg").style("visibility","hidden").style("position","fixed").style("top",0).style("left",0),o=this.d3.select(i).style("font");return r.selectAll(".dummy").data([t]).enter().append("text").classed(e||"",!0).style("font",o).text(t).each(function(){n=this.getBoundingClientRect()}),a.remove(),n},A.generateXYForText=function(t,e,i,n){var a=this,r=a.generateGetAreaPoints(t,!1),o=a.generateGetBarPoints(e,!1),s=a.generateGetLinePoints(i,!1),c=n?a.getXForText:a.getYForText;return function(t,e){var i=a.isAreaType(t)?r:a.isBarType(t)?o:s;return c.call(a,i(t,e),t,this)}},A.getXForText=function(t,e,i){var n,a,r=this,o=i.getBoundingClientRect();return r.config.axis_rotated?(a=r.isBarType(e)?4:6,n=t[2][1]+a*(e.value<0?-1:1)):n=r.hasType("bar")?(t[2][0]+t[0][0])/2:t[0][0],null===e.value&&(n>r.width?n=r.width-o.width:n<0&&(n=4)),n},A.getYForText=function(t,e,i){var n,a=this,r=i.getBoundingClientRect();return a.config.axis_rotated?n=(t[0][0]+t[2][0]+.6*r.height)/2:(n=t[2][1],e.value<0||0===e.value&&!a.hasPositiveValue?(n+=r.height,a.isBarType(e)&&a.isSafari()?n-=3:!a.isBarType(e)&&a.isChrome()&&(n+=3)):n+=a.isBarType(e)?-3:-6),null!==e.value||a.config.axis_rotated||(nthis.height&&(n=this.height-4)),n},A.initTitle=function(){var t=this;t.title=t.svg.append("text").text(t.config.title_text).attr("class",t.CLASS.title)},A.redrawTitle=function(){var t=this;t.title.attr("x",t.xForTitle.bind(t)).attr("y",t.yForTitle.bind(t))},A.xForTitle=function(){var t=this,e=t.config,i=e.title_position||"left";return i.indexOf("right")>=0?t.currentWidth-t.getTextRect(t.title.node().textContent,t.CLASS.title,t.title.node()).width-e.title_padding.right:i.indexOf("center")>=0?(t.currentWidth-t.getTextRect(t.title.node().textContent,t.CLASS.title,t.title.node()).width)/2:e.title_padding.left},A.yForTitle=function(){var t=this;return t.config.title_padding.top+t.getTextRect(t.title.node().textContent,t.CLASS.title,t.title.node()).height},A.getTitlePadding=function(){var t=this;return t.yForTitle()+t.config.title_padding.bottom},A.initTooltip=function(){var t,e=this,i=e.config;if(e.tooltip=e.selectChart.style("position","relative").append("div").attr("class",o.tooltipContainer).style("position","absolute").style("pointer-events","none").style("display","none"),i.tooltip_init_show){if(e.isTimeSeries()&&u(i.tooltip_init_x)){for(i.tooltip_init_x=e.parseDate(i.tooltip_init_x),t=0;t"+(o||0===o?""+o+"":"")),void 0!==(s=w(p(t[r].value,t[r].ratio,t[r].id,t[r].index,t))))){if(null===t[r].name)continue;c=w(g(t[r].name,t[r].ratio,t[r].id,t[r].index)),d=l.levelColor?l.levelColor(t[r].value):n(t[r].id),a+="",a+=""+c+"",a+=""+s+"",a+=""}return a+""},A.tooltipPosition=function(t,e,i,n){var a,r,o,s,c,d=this,l=d.config,u=d.d3,h=d.hasArcType(),g=u.mouse(n);return h?(r=(d.width-(d.isLegendRight?d.getLegendWidth():0))/2+g[0],s=d.height/2+g[1]+20):(a=d.getSvgLeft(!0),l.axis_rotated?(o=(r=a+g[0]+100)+e,c=d.currentWidth-d.getCurrentPaddingRight(),s=d.x(t[0].x)+20):(o=(r=a+d.getCurrentPaddingLeft(!0)+d.x(t[0].x)+20)+e,c=a+d.currentWidth-d.getCurrentPaddingRight(),s=g[1]+15),o>c&&(r-=o-c+20),s+i>d.currentHeight&&(s-=i+30)),s<0&&(s=0),{top:s,left:r}},A.showTooltip=function(t,e){var i,n,a,r=this,o=r.config,s=r.hasArcType(),d=t.filter(function(t){return t&&c(t.value)}),l=o.tooltip_position||A.tooltipPosition;0!==d.length&&o.tooltip_show&&(r.tooltip.html(o.tooltip_contents.call(r,t,r.axis.getXAxisTickFormat(),r.getYFormat(s),r.color)).style("display","block"),i=r.tooltip.property("offsetWidth"),n=r.tooltip.property("offsetHeight"),a=l.call(this,d,i,n,e),r.tooltip.style("top",a.top+"px").style("left",a.left+"px"))},A.hideTooltip=function(){this.tooltip.style("display","none")},A.setTargetType=function(t,e){var i=this,n=i.config;i.mapToTargetIds(t).forEach(function(t){i.withoutFadeIn[t]=e===n.data_types[t],n.data_types[t]=e}),t||(n.data_type=e)},A.hasType=function(t,e){var i=this,n=i.config.data_types,a=!1;return e=e||i.data.targets,e&&e.length?e.forEach(function(e){var i=n[e.id];(i&&i.indexOf(t)>=0||!i&&"line"===t)&&(a=!0)}):Object.keys(n).length?Object.keys(n).forEach(function(e){n[e]===t&&(a=!0)}):a=i.config.data_type===t,a},A.hasArcType=function(t){return this.hasType("pie",t)||this.hasType("donut",t)||this.hasType("gauge",t)},A.isLineType=function(t){var e=this.config,i=u(t)?t:t.id;return!e.data_types[i]||["line","spline","area","area-spline","step","area-step"].indexOf(e.data_types[i])>=0},A.isStepType=function(t){var e=u(t)?t:t.id;return["step","area-step"].indexOf(this.config.data_types[e])>=0},A.isSplineType=function(t){var e=u(t)?t:t.id;return["spline","area-spline"].indexOf(this.config.data_types[e])>=0},A.isAreaType=function(t){var e=u(t)?t:t.id;return["area","area-spline","area-step"].indexOf(this.config.data_types[e])>=0},A.isBarType=function(t){var e=u(t)?t:t.id;return"bar"===this.config.data_types[e]},A.isScatterType=function(t){var e=u(t)?t:t.id;return"scatter"===this.config.data_types[e]},A.isPieType=function(t){var e=u(t)?t:t.id;return"pie"===this.config.data_types[e]},A.isGaugeType=function(t){var e=u(t)?t:t.id;return"gauge"===this.config.data_types[e]},A.isDonutType=function(t){var e=u(t)?t:t.id;return"donut"===this.config.data_types[e]},A.isArcType=function(t){return this.isPieType(t)||this.isDonutType(t)||this.isGaugeType(t)},A.lineData=function(t){return this.isLineType(t)?[t]:[]},A.arcData=function(t){return this.isArcType(t.data)?[t]:[]},A.barData=function(t){return this.isBarType(t)?t.values:[]},A.lineOrScatterData=function(t){return this.isLineType(t)||this.isScatterType(t)?t.values:[]},A.barOrLineData=function(t){return this.isBarType(t)||this.isLineType(t)?t.values:[]},A.isInterpolationType=function(t){return["linear","linear-closed","basis","basis-open","basis-closed","bundle","cardinal","cardinal-open","cardinal-closed","monotone"].indexOf(t)>=0},A.isSafari=function(){var t=window.navigator.userAgent;return t.indexOf("Safari")>=0&&t.indexOf("Chrome")<0},A.isChrome=function(){return window.navigator.userAgent.indexOf("Chrome")>=0},A.initZoom=function(){var t,e=this,i=e.d3,n=e.config;e.zoom=i.behavior.zoom().on("zoomstart",function(){t=i.event.sourceEvent,e.zoom.altDomain=i.event.sourceEvent.altKey?e.x.orgDomain():null,n.zoom_onzoomstart.call(e.api,i.event.sourceEvent)}).on("zoom",function(){e.redrawForZoom.call(e)}).on("zoomend",function(){var a=i.event.sourceEvent;a&&t.clientX===a.clientX&&t.clientY===a.clientY||(e.redrawEventRect(),e.updateZoom(),n.zoom_onzoomend.call(e.api,e.x.orgDomain()))}),e.zoom.scale=function(t){return n.axis_rotated?this.y(t):this.x(t)},e.zoom.orgScaleExtent=function(){var t=n.zoom_extent?n.zoom_extent:[1,10];return[t[0],Math.max(e.getMaxDataCount()/t[1],t[1])]},e.zoom.updateScaleExtent=function(){var t=_(e.x.orgDomain())/_(e.getZoomDomain()),i=this.orgScaleExtent();return this.scaleExtent([i[0]*t,i[1]*t]),this}},A.getZoomDomain=function(){var t=this,e=t.config,i=t.d3;return[i.min([t.orgXDomain[0],e.zoom_x_min]),i.max([t.orgXDomain[1],e.zoom_x_max])]},A.updateZoom=function(){var t=this,e=t.config.zoom_enabled?t.zoom:function(){};t.main.select("."+o.zoomRect).call(e).on("dblclick.zoom",null),t.main.selectAll("."+o.eventRect).call(e).on("dblclick.zoom",null)},A.redrawForZoom=function(){var t=this,e=t.d3,i=t.config,n=t.zoom,a=t.x;if(i.zoom_enabled&&0!==t.filterTargetsToShow(t.data.targets).length){if("mousemove"===e.event.sourceEvent.type&&n.altDomain)return a.domain(n.altDomain),void n.scale(a).updateScaleExtent();t.isCategorized()&&a.orgDomain()[0]===t.orgXDomain[0]&&a.domain([t.orgXDomain[0]-1e-10,a.orgDomain()[1]]),t.redraw({withTransition:!1,withY:i.zoom_rescale,withSubchart:!1,withEventRect:!1,withDimension:!1}),"mousemove"===e.event.sourceEvent.type&&(t.cancelClick=!0),i.zoom_onzoom.call(t.api,a.orgDomain())}},T}); \ No newline at end of file +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):t.c3=e()}(this,function(){"use strict";function t(t,e){function i(t,e){t.attr("transform",function(t){return"translate("+Math.ceil(e(t)+w)+", 0)"})}function n(t,e){t.attr("transform",function(t){return"translate(0,"+Math.ceil(e(t))+")"})}function a(t){var e=t[0],i=t[t.length-1];return e0&&n[0]>0&&n.unshift(n[0]-(n[1]-n[0])),n}function s(){var t,i=_.copy();return e.isCategory&&(t=_.domain(),i.domain([t[0],t[1]-1])),i}function c(t){var e=g?g(t):t;return void 0!==e?e:""}function d(t){if(r)return r;var e={h:11.5,w:5.5};return t.select("text").text(c).each(function(t){var i=this.getBoundingClientRect(),n=c(t),a=i.height,r=n?i.width/n.length:void 0;a&&r&&(e.h=a,e.w=r)}).text(""),r=e,e}function l(i){return e.withoutTransition?i:t.transition(i)}function u(r){r.each(function(){function r(t,i){function n(t,e){r=void 0;for(var s=1;s0?1:-1):N}(j)).style("text-anchor",function(t){return t?t>0?"start":"end":"middle"}(j)).attr("transform",function(t){return t?"rotate("+t+")":""}(j)),H.attr("x",0).attr("dy",g).attr("dx",function(t){return t?8*Math.sin(Math.PI*(t/180)):0}(j)),R.attr("d","M"+I[0]+","+h+"V0H"+I[1]+"V"+h);break;case"top":p=i,D.attr("y2",-y),X.attr("y",-N),F.attr("x2",0).attr("y2",-y),k.attr("x",0).attr("y",-N),M.style("text-anchor","middle"),H.attr("x",0).attr("dy","0em"),R.attr("d","M"+I[0]+","+-h+"V0H"+I[1]+"V"+-h);break;case"left":p=n,D.attr("x2",-y),X.attr("x",-N),F.attr("x2",-y).attr("y1",b).attr("y2",b),k.attr("x",-N).attr("y",w),M.style("text-anchor","end"),H.attr("x",-N).attr("dy",g),R.attr("d","M"+-h+","+I[0]+"H0V"+I[1]+"H"+-h);break;case"right":p=n,D.attr("x2",y),X.attr("x",N),F.attr("x2",y).attr("y2",0),k.attr("x",N).attr("y",0),M.style("text-anchor","start"),H.attr("x",N).attr("dy",g),R.attr("d","M"+h+","+I[0]+"H0V"+I[1]+"H"+h)}if(P.rangeBand){var U=P,W=U.rangeBand()/2;T=P=function(t){return U(t)+W}}else T.rangeBand?T=P:G.call(p,P);V.call(p,T),E.call(p,P)})}var h,g,p,f,_=t.scale.linear(),x="bottom",y=6,m=3,S=null,w=0,v=!0;return e=e||{},h=e.withOuterTick?6:0,u.scale=function(t){return arguments.length?(_=t,u):_},u.orient=function(t){return arguments.length?(x=t in{top:1,right:1,bottom:1,left:1}?t+"":"bottom",u):x},u.tickFormat=function(t){return arguments.length?(g=t,u):g},u.tickCentered=function(t){return arguments.length?(f=t,u):f},u.tickOffset=function(){return w},u.tickInterval=function(){var t;return t=e.isCategory?2*w:(u.g.select("path.domain").node().getTotalLength()-2*h)/u.g.selectAll("line").size(),t===1/0?0:t},u.ticks=function(){return arguments.length?(p=arguments,u):p},u.tickCulling=function(t){return arguments.length?(v=t,u):v},u.tickValues=function(t){if("function"==typeof t)S=function(){return t(_.domain())};else{if(!arguments.length)return S;S=t}return u},u}function e(t){i.call(this,t)}function i(t){this.owner=t}function n(t){var e=this.internal=new a(this);e.loadConfig(t),e.beforeInit(t),e.init(),e.afterInit(t),function t(e,i,n){Object.keys(e).forEach(function(a){i[a]=e[a].bind(n),Object.keys(e[a]).length>0&&t(e[a],i[a],n)})}(b,this,this)}function a(t){var e=this;e.d3=window.d3?window.d3:"undefined"!=typeof require?require("d3"):void 0,e.api=t,e.config=e.getDefaultConfig(),e.data={},e.cache={},e.axes={}}var r,o={target:"c3-target",chart:"c3-chart",chartLine:"c3-chart-line",chartLines:"c3-chart-lines",chartBar:"c3-chart-bar",chartBars:"c3-chart-bars",chartText:"c3-chart-text",chartTexts:"c3-chart-texts",chartArc:"c3-chart-arc",chartArcs:"c3-chart-arcs",chartArcsTitle:"c3-chart-arcs-title",chartArcsBackground:"c3-chart-arcs-background",chartArcsGaugeUnit:"c3-chart-arcs-gauge-unit",chartArcsGaugeMax:"c3-chart-arcs-gauge-max",chartArcsGaugeMin:"c3-chart-arcs-gauge-min",selectedCircle:"c3-selected-circle",selectedCircles:"c3-selected-circles",eventRect:"c3-event-rect",eventRects:"c3-event-rects",eventRectsSingle:"c3-event-rects-single",eventRectsMultiple:"c3-event-rects-multiple",zoomRect:"c3-zoom-rect",brush:"c3-brush",focused:"c3-focused",defocused:"c3-defocused",region:"c3-region",regions:"c3-regions",title:"c3-title",tooltipContainer:"c3-tooltip-container",tooltip:"c3-tooltip",tooltipName:"c3-tooltip-name",shape:"c3-shape",shapes:"c3-shapes",line:"c3-line",lines:"c3-lines",bar:"c3-bar",bars:"c3-bars",circle:"c3-circle",circles:"c3-circles",arc:"c3-arc",arcs:"c3-arcs",area:"c3-area",areas:"c3-areas",empty:"c3-empty",text:"c3-text",texts:"c3-texts",gaugeValue:"c3-gauge-value",grid:"c3-grid",gridLines:"c3-grid-lines",xgrid:"c3-xgrid",xgrids:"c3-xgrids",xgridLine:"c3-xgrid-line",xgridLines:"c3-xgrid-lines",xgridFocus:"c3-xgrid-focus",ygrid:"c3-ygrid",ygrids:"c3-ygrids",ygridLine:"c3-ygrid-line",ygridLines:"c3-ygrid-lines",axis:"c3-axis",axisX:"c3-axis-x",axisXLabel:"c3-axis-x-label",axisY:"c3-axis-y",axisYLabel:"c3-axis-y-label",axisY2:"c3-axis-y2",axisY2Label:"c3-axis-y2-label",legendBackground:"c3-legend-background",legendItem:"c3-legend-item",legendItemEvent:"c3-legend-item-event",legendItemTile:"c3-legend-item-tile",legendItemHidden:"c3-legend-item-hidden",legendItemFocused:"c3-legend-item-focused",dragarea:"c3-dragarea",EXPANDED:"_expanded_",SELECTED:"_selected_",INCLUDED:"_included_"},s="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},c=function(t){return t||0===t},d=function(t){return"function"==typeof t},l=function(t){return Array.isArray(t)},u=function(t){return"string"==typeof t},h=function(t){return void 0===t},g=function(t){return void 0!==t},p=function(t){return 10*Math.ceil(t/10)},f=function(t){return Math.ceil(t)+.5},_=function(t){return t[1]-t[0]},x=function(t){return void 0===t||null===t||u(t)&&0===t.length||"object"===(void 0===t?"undefined":s(t))&&0===Object.keys(t).length},y=function(t){return!A.isEmpty(t)},m=function(t,e,i){return void 0!==t[e]?t[e]:i},S=function(t,e){var i=!1;return Object.keys(t).forEach(function(n){t[n]===e&&(i=!0)}),i},w=function(t){return"string"==typeof t?t.replace(//g,">"):t},v=function(t){var e=t.getBoundingClientRect(),i=[t.pathSegList.getItem(0),t.pathSegList.getItem(1)];return{x:i[0].x,y:Math.min(i[0].y,i[1].y),width:e.width,height:e.height}};!function(t,e){if(Object.create)e.prototype=Object.create(t.prototype);else{var i=function(){};i.prototype=t.prototype,e.prototype=new i}e.prototype.constructor=e}(i,e),e.prototype.init=function(){var t=this.owner,e=t.config,i=t.main;t.axes.x=i.append("g").attr("class",o.axis+" "+o.axisX).attr("clip-path",t.clipPathForXAxis).attr("transform",t.getTranslate("x")).style("visibility",e.axis_x_show?"visible":"hidden"),t.axes.x.append("text").attr("class",o.axisXLabel).attr("transform",e.axis_rotated?"rotate(-90)":"").style("text-anchor",this.textAnchorForXAxisLabel.bind(this)),t.axes.y=i.append("g").attr("class",o.axis+" "+o.axisY).attr("clip-path",e.axis_y_inner?"":t.clipPathForYAxis).attr("transform",t.getTranslate("y")).style("visibility",e.axis_y_show?"visible":"hidden"),t.axes.y.append("text").attr("class",o.axisYLabel).attr("transform",e.axis_rotated?"":"rotate(-90)").style("text-anchor",this.textAnchorForYAxisLabel.bind(this)),t.axes.y2=i.append("g").attr("class",o.axis+" "+o.axisY2).attr("transform",t.getTranslate("y2")).style("visibility",e.axis_y2_show?"visible":"hidden"),t.axes.y2.append("text").attr("class",o.axisY2Label).attr("transform",e.axis_rotated?"":"rotate(-90)").style("text-anchor",this.textAnchorForY2AxisLabel.bind(this))},e.prototype.getXAxis=function(e,i,n,a,r,o,s){var c=this.owner,d=c.config,l={isCategory:c.isCategorized(),withOuterTick:r,tickMultiline:d.axis_x_tick_multiline,tickWidth:d.axis_x_tick_width,tickTextRotate:s?0:d.axis_x_tick_rotate,withoutTransition:o},u=t(c.d3,l).scale(e).orient(i);return c.isTimeSeries()&&a&&"function"!=typeof a&&(a=a.map(function(t){return c.parseDate(t)})),u.tickFormat(n).tickValues(a),c.isCategorized()&&(u.tickCentered(d.axis_x_tick_centered),x(d.axis_x_tick_culling)&&(d.axis_x_tick_culling=!1)),u},e.prototype.updateXAxisTickValues=function(t,e){var i,n=this.owner,a=n.config;return(a.axis_x_tick_fit||a.axis_x_tick_count)&&(i=this.generateTickValues(n.mapTargetsToUniqueXs(t),a.axis_x_tick_count,n.isTimeSeries())),e?e.tickValues(i):(n.xAxis.tickValues(i),n.subXAxis.tickValues(i)),i},e.prototype.getYAxis=function(e,i,n,a,r,o,s){var c=this.owner,d=c.config,l={withOuterTick:r,withoutTransition:o,tickTextRotate:s?0:d.axis_y_tick_rotate},u=t(c.d3,l).scale(e).orient(i).tickFormat(n);return c.isTimeSeriesY()?u.ticks(c.d3.time[d.axis_y_tick_time_value],d.axis_y_tick_time_interval):u.tickValues(a),u},e.prototype.getId=function(t){var e=this.owner.config;return t in e.data_axes?e.data_axes[t]:"y"},e.prototype.getXAxisTickFormat=function(){var t=this.owner,e=t.config,i=t.isTimeSeries()?t.defaultAxisTimeFormat:t.isCategorized()?t.categoryName:function(t){return t<0?t.toFixed(0):t};return e.axis_x_tick_format&&(d(e.axis_x_tick_format)?i=e.axis_x_tick_format:t.isTimeSeries()&&(i=function(i){return i?t.axisTimeFormat(e.axis_x_tick_format)(i):""})),d(i)?function(e){return i.call(t,e)}:i},e.prototype.getTickValues=function(t,e){return t||(e?e.tickValues():void 0)},e.prototype.getXAxisTickValues=function(){return this.getTickValues(this.owner.config.axis_x_tick_values,this.owner.xAxis)},e.prototype.getYAxisTickValues=function(){return this.getTickValues(this.owner.config.axis_y_tick_values,this.owner.yAxis)},e.prototype.getY2AxisTickValues=function(){return this.getTickValues(this.owner.config.axis_y2_tick_values,this.owner.y2Axis)},e.prototype.getLabelOptionByAxisId=function(t){var e,i=this.owner.config;return"y"===t?e=i.axis_y_label:"y2"===t?e=i.axis_y2_label:"x"===t&&(e=i.axis_x_label),e},e.prototype.getLabelText=function(t){var e=this.getLabelOptionByAxisId(t);return u(e)?e:e?e.text:null},e.prototype.setLabelText=function(t,e){var i=this.owner.config,n=this.getLabelOptionByAxisId(t);u(n)?"y"===t?i.axis_y_label=e:"y2"===t?i.axis_y2_label=e:"x"===t&&(i.axis_x_label=e):n&&(n.text=e)},e.prototype.getLabelPosition=function(t,e){var i=this.getLabelOptionByAxisId(t),n=i&&"object"===(void 0===i?"undefined":s(i))&&i.position?i.position:e;return{isInner:n.indexOf("inner")>=0,isOuter:n.indexOf("outer")>=0,isLeft:n.indexOf("left")>=0,isCenter:n.indexOf("center")>=0,isRight:n.indexOf("right")>=0,isTop:n.indexOf("top")>=0,isMiddle:n.indexOf("middle")>=0,isBottom:n.indexOf("bottom")>=0}},e.prototype.getXAxisLabelPosition=function(){return this.getLabelPosition("x",this.owner.config.axis_rotated?"inner-top":"inner-right")},e.prototype.getYAxisLabelPosition=function(){return this.getLabelPosition("y",this.owner.config.axis_rotated?"inner-right":"inner-top")},e.prototype.getY2AxisLabelPosition=function(){return this.getLabelPosition("y2",this.owner.config.axis_rotated?"inner-right":"inner-top")},e.prototype.getLabelPositionById=function(t){return"y2"===t?this.getY2AxisLabelPosition():"y"===t?this.getYAxisLabelPosition():this.getXAxisLabelPosition()},e.prototype.textForXAxisLabel=function(){return this.getLabelText("x")},e.prototype.textForYAxisLabel=function(){return this.getLabelText("y")},e.prototype.textForY2AxisLabel=function(){return this.getLabelText("y2")},e.prototype.xForAxisLabel=function(t,e){var i=this.owner;return t?e.isLeft?0:e.isCenter?i.width/2:i.width:e.isBottom?-i.height:e.isMiddle?-i.height/2:0},e.prototype.dxForAxisLabel=function(t,e){return t?e.isLeft?"0.5em":e.isRight?"-0.5em":"0":e.isTop?"-0.5em":e.isBottom?"0.5em":"0"},e.prototype.textAnchorForAxisLabel=function(t,e){return t?e.isLeft?"start":e.isCenter?"middle":"end":e.isBottom?"start":e.isMiddle?"middle":"end"},e.prototype.xForXAxisLabel=function(){return this.xForAxisLabel(!this.owner.config.axis_rotated,this.getXAxisLabelPosition())},e.prototype.xForYAxisLabel=function(){return this.xForAxisLabel(this.owner.config.axis_rotated,this.getYAxisLabelPosition())},e.prototype.xForY2AxisLabel=function(){return this.xForAxisLabel(this.owner.config.axis_rotated,this.getY2AxisLabelPosition())},e.prototype.dxForXAxisLabel=function(){return this.dxForAxisLabel(!this.owner.config.axis_rotated,this.getXAxisLabelPosition())},e.prototype.dxForYAxisLabel=function(){return this.dxForAxisLabel(this.owner.config.axis_rotated,this.getYAxisLabelPosition())},e.prototype.dxForY2AxisLabel=function(){return this.dxForAxisLabel(this.owner.config.axis_rotated,this.getY2AxisLabelPosition())},e.prototype.dyForXAxisLabel=function(){var t=this.owner.config,e=this.getXAxisLabelPosition();return t.axis_rotated?e.isInner?"1.2em":-25-this.getMaxTickWidth("x"):e.isInner?"-0.5em":t.axis_x_height?t.axis_x_height-10:"3em"},e.prototype.dyForYAxisLabel=function(){var t=this.owner,e=this.getYAxisLabelPosition();return t.config.axis_rotated?e.isInner?"-0.5em":"3em":e.isInner?"1.2em":-10-(t.config.axis_y_inner?0:this.getMaxTickWidth("y")+10)},e.prototype.dyForY2AxisLabel=function(){var t=this.owner,e=this.getY2AxisLabelPosition();return t.config.axis_rotated?e.isInner?"1.2em":"-2.2em":e.isInner?"-0.5em":15+(t.config.axis_y2_inner?0:this.getMaxTickWidth("y2")+15)},e.prototype.textAnchorForXAxisLabel=function(){var t=this.owner;return this.textAnchorForAxisLabel(!t.config.axis_rotated,this.getXAxisLabelPosition())},e.prototype.textAnchorForYAxisLabel=function(){var t=this.owner;return this.textAnchorForAxisLabel(t.config.axis_rotated,this.getYAxisLabelPosition())},e.prototype.textAnchorForY2AxisLabel=function(){var t=this.owner;return this.textAnchorForAxisLabel(t.config.axis_rotated,this.getY2AxisLabelPosition())},e.prototype.getMaxTickWidth=function(t,e){var i,n,a,r,o=this.owner,s=o.config,c=0;return e&&o.currentMaxTickWidths[t]?o.currentMaxTickWidths[t]:(o.svg&&(i=o.filterTargetsToShow(o.data.targets),"y"===t?(n=o.y.copy().domain(o.getYDomain(i,"y")),a=this.getYAxis(n,o.yOrient,s.axis_y_tick_format,o.yAxisTickValues,!1,!0,!0)):"y2"===t?(n=o.y2.copy().domain(o.getYDomain(i,"y2")),a=this.getYAxis(n,o.y2Orient,s.axis_y2_tick_format,o.y2AxisTickValues,!1,!0,!0)):(n=o.x.copy().domain(o.getXDomain(i)),a=this.getXAxis(n,o.xOrient,o.xAxisTickFormat,o.xAxisTickValues,!1,!0,!0),this.updateXAxisTickValues(i,a)),(r=o.d3.select("body").append("div").classed("c3",!0)).append("svg").style("visibility","hidden").style("position","fixed").style("top",0).style("left",0).append("g").call(a).each(function(){o.d3.select(this).selectAll("text").each(function(){var t=this.getBoundingClientRect();c2){for(o=n-2,a=t[0],s=((r=t[t.length-1])-a)/(o+1),u=[a],c=0;c=0&&D.select(this).style("display",e%V?"none":"block")})}else O.svg.selectAll("."+o.axisX+" .tick text").style("display","block");_=O.generateDrawArea?O.generateDrawArea(X,!1):void 0,x=O.generateDrawBar?O.generateDrawBar(k):void 0,y=O.generateDrawLine?O.generateDrawLine(M,!1):void 0,S=O.generateXYForText(X,k,M,!0),w=O.generateXYForText(X,k,M,!1),i&&(O.subY.domain(O.getYDomain(z,"y")),O.subY2.domain(O.getYDomain(z,"y2"))),O.updateXgridFocus(),R.select("text."+o.text+"."+o.empty).attr("x",O.width/2).attr("y",O.height/2).text(F.data_empty_label_text).transition().style("opacity",z.length?0:1),O.updateGrid(v),O.updateRegion(v),O.updateBar(b),O.updateLine(b),O.updateArea(b),O.updateCircle(),O.hasDataLabel()&&O.updateText(b),O.redrawTitle&&O.redrawTitle(),O.redrawArc&&O.redrawArc(v,b,c),O.redrawSubchart&&O.redrawSubchart(n,e,v,b,X,k,M),R.selectAll("."+o.selectedCircles).filter(O.isBarType.bind(O)).selectAll("circle").remove(),F.interaction_enabled&&!t.flow&&g&&(O.redrawEventRect(),O.updateZoom&&O.updateZoom()),O.updateCircleY(),E=(O.config.axis_rotated?O.circleY:O.circleX).bind(O),I=(O.config.axis_rotated?O.circleX:O.circleY).bind(O),t.flow&&(P=O.generateFlow({targets:z,flow:t.flow,duration:t.flow.duration,drawBar:x,drawLine:y,drawArea:_,cx:E,cy:I,xv:B,xForText:S,yForText:w})),(v||P)&&O.isTabVisible()?D.transition().duration(v).each(function(){var e=[];[O.redrawBar(x,!0),O.redrawLine(y,!0),O.redrawArea(_,!0),O.redrawCircle(E,I,!0),O.redrawText(S,w,t.flow,!0),O.redrawRegion(!0),O.redrawGrid(!0)].forEach(function(t){t.forEach(function(t){e.push(t)})}),T=O.generateWait(),e.forEach(function(t){T.add(t)})}).call(T,function(){P&&P(),F.onrendered&&F.onrendered.call(O)}):(O.redrawBar(x),O.redrawLine(y),O.redrawArea(_),O.redrawCircle(E,I),O.redrawText(S,w,t.flow),O.redrawRegion(),O.redrawGrid(),F.onrendered&&F.onrendered.call(O)),O.mapToIds(O.data.targets).forEach(function(t){O.withoutFadeIn[t]=!0})},A.updateAndRedraw=function(t){var e,i=this,n=i.config;(t=t||{}).withTransition=m(t,"withTransition",!0),t.withTransform=m(t,"withTransform",!1),t.withLegend=m(t,"withLegend",!1),t.withUpdateXDomain=!0,t.withUpdateOrgXDomain=!0,t.withTransitionForExit=!1,t.withTransitionForTransform=m(t,"withTransitionForTransform",t.withTransition),i.updateSizes(),t.withLegend&&n.legend_show||(e=i.axis.generateTransitions(t.withTransitionForAxis?n.transition_duration:0),i.updateScales(),i.updateSvgSize(),i.transformAll(t.withTransitionForTransform,e)),i.redraw(t,e)},A.redrawWithoutRescale=function(){this.redraw({withY:!1,withSubchart:!1,withEventRect:!1,withTransitionForAxis:!1})},A.isTimeSeries=function(){return"timeseries"===this.config.axis_x_type},A.isCategorized=function(){return this.config.axis_x_type.indexOf("categor")>=0},A.isCustomX=function(){var t=this,e=t.config;return!t.isTimeSeries()&&(e.data_x||y(e.data_xs))},A.isTimeSeriesY=function(){return"timeseries"===this.config.axis_y_type},A.getTranslate=function(t){var e,i,n=this,a=n.config;return"main"===t?(e=f(n.margin.left),i=f(n.margin.top)):"context"===t?(e=f(n.margin2.left),i=f(n.margin2.top)):"legend"===t?(e=n.margin3.left,i=n.margin3.top):"x"===t?(e=0,i=a.axis_rotated?0:n.height):"y"===t?(e=0,i=a.axis_rotated?n.height:0):"y2"===t?(e=a.axis_rotated?0:n.width,i=a.axis_rotated?1:0):"subx"===t?(e=0,i=a.axis_rotated?0:n.height2):"arc"===t&&(e=n.arcWidth/2,i=n.arcHeight/2),"translate("+e+","+i+")"},A.initialOpacity=function(t){return null!==t.value&&this.withoutFadeIn[t.id]?1:0},A.initialOpacityForCircle=function(t){return null!==t.value&&this.withoutFadeIn[t.id]?this.opacityForCircle(t):0},A.opacityForCircle=function(t){var e=(d(this.config.point_show)?this.config.point_show(t):this.config.point_show)?1:0;return c(t.value)?this.isScatterType(t)?.5:e:0},A.opacityForText=function(){return this.hasDataLabel()?1:0},A.xx=function(t){return t?this.x(t.x):null},A.xv=function(t){var e=this,i=t.value;return e.isTimeSeries()?i=e.parseDate(t.value):e.isCategorized()&&"string"==typeof t.value&&(i=e.config.axis_x_categories.indexOf(t.value)),Math.ceil(e.x(i))},A.yv=function(t){var e=this,i=t.axis&&"y2"===t.axis?e.y2:e.y;return Math.ceil(i(t.value))},A.subxx=function(t){return t?this.subX(t.x):null},A.transformMain=function(t,e){var i,n,a,r=this;e&&e.axisX?i=e.axisX:(i=r.main.select("."+o.axisX),t&&(i=i.transition())),e&&e.axisY?n=e.axisY:(n=r.main.select("."+o.axisY),t&&(n=n.transition())),e&&e.axisY2?a=e.axisY2:(a=r.main.select("."+o.axisY2),t&&(a=a.transition())),(t?r.main.transition():r.main).attr("transform",r.getTranslate("main")),i.attr("transform",r.getTranslate("x")),n.attr("transform",r.getTranslate("y")),a.attr("transform",r.getTranslate("y2")),r.main.select("."+o.chartArcs).attr("transform",r.getTranslate("arc"))},A.transformAll=function(t,e){var i=this;i.transformMain(t,e),i.config.subchart_show&&i.transformContext(t,e),i.legend&&i.transformLegend(t)},A.updateSvgSize=function(){var t=this,e=t.svg.select(".c3-brush .background");t.svg.attr("width",t.currentWidth).attr("height",t.currentHeight),t.svg.selectAll(["#"+t.clipId,"#"+t.clipIdForGrid]).select("rect").attr("width",t.width).attr("height",t.height),t.svg.select("#"+t.clipIdForXAxis).select("rect").attr("x",t.getXAxisClipX.bind(t)).attr("y",t.getXAxisClipY.bind(t)).attr("width",t.getXAxisClipWidth.bind(t)).attr("height",t.getXAxisClipHeight.bind(t)),t.svg.select("#"+t.clipIdForYAxis).select("rect").attr("x",t.getYAxisClipX.bind(t)).attr("y",t.getYAxisClipY.bind(t)).attr("width",t.getYAxisClipWidth.bind(t)).attr("height",t.getYAxisClipHeight.bind(t)),t.svg.select("#"+t.clipIdForSubchart).select("rect").attr("width",t.width).attr("height",e.size()?e.attr("height"):0),t.svg.select("."+o.zoomRect).attr("width",t.width).attr("height",t.height),t.selectChart.style("max-height",t.currentHeight+"px")},A.updateDimension=function(t){var e=this;t||(e.config.axis_rotated?(e.axes.x.call(e.xAxis),e.axes.subx.call(e.subXAxis)):(e.axes.y.call(e.yAxis),e.axes.y2.call(e.y2Axis))),e.updateSizes(),e.updateScales(),e.updateSvgSize(),e.transformAll(!1)},A.observeInserted=function(t){var e,i=this;"undefined"!=typeof MutationObserver?(e=new MutationObserver(function(n){n.forEach(function(n){"childList"===n.type&&n.previousSibling&&(e.disconnect(),i.intervalForObserveInserted=window.setInterval(function(){t.node().parentNode&&(window.clearInterval(i.intervalForObserveInserted),i.updateDimension(),i.brush&&i.brush.update(),i.config.oninit.call(i),i.redraw({withTransform:!0,withUpdateXDomain:!0,withUpdateOrgXDomain:!0,withTransition:!1,withTransitionForTransform:!1,withLegend:!0}),t.transition().style("opacity",1))},10))})})).observe(t.node(),{attributes:!0,childList:!0,characterData:!0}):window.console.error("MutationObserver not defined.")},A.bindResize=function(){var t=this,e=t.config;if(t.resizeFunction=t.generateResize(),t.resizeFunction.add(function(){e.onresize.call(t)}),e.resize_auto&&t.resizeFunction.add(function(){void 0!==t.resizeTimeout&&window.clearTimeout(t.resizeTimeout),t.resizeTimeout=window.setTimeout(function(){delete t.resizeTimeout,t.api.flush()},100)}),t.resizeFunction.add(function(){e.onresized.call(t)}),window.attachEvent)window.attachEvent("onresize",t.resizeFunction);else if(window.addEventListener)window.addEventListener("resize",t.resizeFunction,!1);else{var i=window.onresize;i?i.add&&i.remove||(i=t.generateResize()).add(window.onresize):i=t.generateResize(),i.add(t.resizeFunction),window.onresize=i}},A.generateResize=function(){function t(){e.forEach(function(t){t()})}var e=[];return t.add=function(t){e.push(t)},t.remove=function(t){for(var i=0;ie.getTotalLength())break;i--}while(i>0);return i})),"SVGPathSegList"in window||(window.SVGPathSegList=function(t){this._pathElement=t,this._list=this._parsePath(this._pathElement.getAttribute("d")),this._mutationObserverConfig={attributes:!0,attributeFilter:["d"]},this._pathElementMutationObserver=new MutationObserver(this._updateListFromPathMutations.bind(this)),this._pathElementMutationObserver.observe(this._pathElement,this._mutationObserverConfig)},window.SVGPathSegList.prototype.classname="SVGPathSegList",Object.defineProperty(window.SVGPathSegList.prototype,"numberOfItems",{get:function(){return this._checkPathSynchronizedToList(),this._list.length},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"pathSegList",{get:function(){return this._pathSegList||(this._pathSegList=new window.SVGPathSegList(this)),this._pathSegList},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"normalizedPathSegList",{get:function(){return this.pathSegList},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"animatedPathSegList",{get:function(){return this.pathSegList},enumerable:!0}),Object.defineProperty(window.SVGPathElement.prototype,"animatedNormalizedPathSegList",{get:function(){return this.pathSegList},enumerable:!0}),window.SVGPathSegList.prototype._checkPathSynchronizedToList=function(){this._updateListFromPathMutations(this._pathElementMutationObserver.takeRecords())},window.SVGPathSegList.prototype._updateListFromPathMutations=function(t){if(this._pathElement){var e=!1;t.forEach(function(t){"d"==t.attributeName&&(e=!0)}),e&&(this._list=this._parsePath(this._pathElement.getAttribute("d")))}},window.SVGPathSegList.prototype._writeListToPath=function(){this._pathElementMutationObserver.disconnect(),this._pathElement.setAttribute("d",window.SVGPathSegList._pathSegArrayAsString(this._list)),this._pathElementMutationObserver.observe(this._pathElement,this._mutationObserverConfig)},window.SVGPathSegList.prototype.segmentChanged=function(t){this._writeListToPath()},window.SVGPathSegList.prototype.clear=function(){this._checkPathSynchronizedToList(),this._list.forEach(function(t){t._owningPathSegList=null}),this._list=[],this._writeListToPath()},window.SVGPathSegList.prototype.initialize=function(t){return this._checkPathSynchronizedToList(),this._list=[t],t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList.prototype._checkValidIndex=function(t){if(isNaN(t)||t<0||t>=this.numberOfItems)throw"INDEX_SIZE_ERR"},window.SVGPathSegList.prototype.getItem=function(t){return this._checkPathSynchronizedToList(),this._checkValidIndex(t),this._list[t]},window.SVGPathSegList.prototype.insertItemBefore=function(t,e){return this._checkPathSynchronizedToList(),e>this.numberOfItems&&(e=this.numberOfItems),t._owningPathSegList&&(t=t.clone()),this._list.splice(e,0,t),t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList.prototype.replaceItem=function(t,e){return this._checkPathSynchronizedToList(),t._owningPathSegList&&(t=t.clone()),this._checkValidIndex(e),this._list[e]=t,t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList.prototype.removeItem=function(t){this._checkPathSynchronizedToList(),this._checkValidIndex(t);var e=this._list[t];return this._list.splice(t,1),this._writeListToPath(),e},window.SVGPathSegList.prototype.appendItem=function(t){return this._checkPathSynchronizedToList(),t._owningPathSegList&&(t=t.clone()),this._list.push(t),t._owningPathSegList=this,this._writeListToPath(),t},window.SVGPathSegList._pathSegArrayAsString=function(t){var e="",i=!0;return t.forEach(function(t){i?(i=!1,e+=t._asPathString()):e+=" "+t._asPathString()}),e},window.SVGPathSegList.prototype._parsePath=function(t){if(!t||0==t.length)return[];var e=this,i=function(){this.pathSegList=[]};i.prototype.appendSegment=function(t){this.pathSegList.push(t)};var n=function(t){this._string=t,this._currentIndex=0,this._endIndex=this._string.length,this._previousCommand=window.SVGPathSeg.PATHSEG_UNKNOWN,this._skipOptionalSpaces()};n.prototype._isCurrentSpace=function(){var t=this._string[this._currentIndex];return t<=" "&&(" "==t||"\n"==t||"\t"==t||"\r"==t||"\f"==t)},n.prototype._skipOptionalSpaces=function(){for(;this._currentIndex="0"&&t<="9")&&e!=window.SVGPathSeg.PATHSEG_CLOSEPATH?e==window.SVGPathSeg.PATHSEG_MOVETO_ABS?window.SVGPathSeg.PATHSEG_LINETO_ABS:e==window.SVGPathSeg.PATHSEG_MOVETO_REL?window.SVGPathSeg.PATHSEG_LINETO_REL:e:window.SVGPathSeg.PATHSEG_UNKNOWN},n.prototype.initialCommandIsMoveTo=function(){if(!this.hasMoreData())return!0;var t=this.peekSegmentType();return t==window.SVGPathSeg.PATHSEG_MOVETO_ABS||t==window.SVGPathSeg.PATHSEG_MOVETO_REL},n.prototype._parseNumber=function(){var t=0,e=0,i=1,n=0,a=1,r=1,o=this._currentIndex;if(this._skipOptionalSpaces(),this._currentIndex"9")&&"."!=this._string.charAt(this._currentIndex))){for(var s=this._currentIndex;this._currentIndex="0"&&this._string.charAt(this._currentIndex)<="9";)this._currentIndex++;if(this._currentIndex!=s)for(var c=this._currentIndex-1,d=1;c>=s;)e+=d*(this._string.charAt(c--)-"0"),d*=10;if(this._currentIndex=this._endIndex||this._string.charAt(this._currentIndex)<"0"||this._string.charAt(this._currentIndex)>"9")return;for(;this._currentIndex="0"&&this._string.charAt(this._currentIndex)<="9";)i*=10,n+=(this._string.charAt(this._currentIndex)-"0")/i,this._currentIndex+=1}if(this._currentIndex!=o&&this._currentIndex+1=this._endIndex||this._string.charAt(this._currentIndex)<"0"||this._string.charAt(this._currentIndex)>"9")return;for(;this._currentIndex="0"&&this._string.charAt(this._currentIndex)<="9";)t*=10,t+=this._string.charAt(this._currentIndex)-"0",this._currentIndex++}var l=e+n;if(l*=a,t&&(l*=Math.pow(10,r*t)),o!=this._currentIndex)return this._skipOptionalSpacesOrDelimiter(),l}},n.prototype._parseArcFlag=function(){if(!(this._currentIndex>=this._endIndex)){var t=!1,e=this._string.charAt(this._currentIndex++);if("0"==e)t=!1;else{if("1"!=e)return;t=!0}return this._skipOptionalSpacesOrDelimiter(),t}},n.prototype.parseSegment=function(){var t=this._string[this._currentIndex],i=this._pathSegTypeFromChar(t);if(i==window.SVGPathSeg.PATHSEG_UNKNOWN){if(this._previousCommand==window.SVGPathSeg.PATHSEG_UNKNOWN)return null;if((i=this._nextCommandHelper(t,this._previousCommand))==window.SVGPathSeg.PATHSEG_UNKNOWN)return null}else this._currentIndex++;switch(this._previousCommand=i,i){case window.SVGPathSeg.PATHSEG_MOVETO_REL:return new window.SVGPathSegMovetoRel(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_MOVETO_ABS:return new window.SVGPathSegMovetoAbs(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_REL:return new window.SVGPathSegLinetoRel(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_ABS:return new window.SVGPathSegLinetoAbs(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_REL:return new window.SVGPathSegLinetoHorizontalRel(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_HORIZONTAL_ABS:return new window.SVGPathSegLinetoHorizontalAbs(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_VERTICAL_REL:return new window.SVGPathSegLinetoVerticalRel(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_LINETO_VERTICAL_ABS:return new window.SVGPathSegLinetoVerticalAbs(e,this._parseNumber());case window.SVGPathSeg.PATHSEG_CLOSEPATH:return this._skipOptionalSpaces(),new window.SVGPathSegClosePath(e);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_REL:return n={x1:this._parseNumber(),y1:this._parseNumber(),x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicRel(e,n.x,n.y,n.x1,n.y1,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_ABS:return n={x1:this._parseNumber(),y1:this._parseNumber(),x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicAbs(e,n.x,n.y,n.x1,n.y1,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_REL:return n={x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicSmoothRel(e,n.x,n.y,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_CUBIC_SMOOTH_ABS:return n={x2:this._parseNumber(),y2:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoCubicSmoothAbs(e,n.x,n.y,n.x2,n.y2);case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_REL:return n={x1:this._parseNumber(),y1:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoQuadraticRel(e,n.x,n.y,n.x1,n.y1);case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_ABS:return n={x1:this._parseNumber(),y1:this._parseNumber(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegCurvetoQuadraticAbs(e,n.x,n.y,n.x1,n.y1);case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_REL:return new window.SVGPathSegCurvetoQuadraticSmoothRel(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_CURVETO_QUADRATIC_SMOOTH_ABS:return new window.SVGPathSegCurvetoQuadraticSmoothAbs(e,this._parseNumber(),this._parseNumber());case window.SVGPathSeg.PATHSEG_ARC_REL:return n={x1:this._parseNumber(),y1:this._parseNumber(),arcAngle:this._parseNumber(),arcLarge:this._parseArcFlag(),arcSweep:this._parseArcFlag(),x:this._parseNumber(),y:this._parseNumber()},new window.SVGPathSegArcRel(e,n.x,n.y,n.x1,n.y1,n.arcAngle,n.arcLarge,n.arcSweep);case window.SVGPathSeg.PATHSEG_ARC_ABS:var n={x1:this._parseNumber(),y1:this._parseNumber(),arcAngle:this._parseNumber(),arcLarge:this._parseArcFlag(),arcSweep:this._parseArcFlag(),x:this._parseNumber(),y:this._parseNumber()};return new window.SVGPathSegArcAbs(e,n.x,n.y,n.x1,n.y1,n.arcAngle,n.arcLarge,n.arcSweep);default:throw"Unknown path seg type."}};var a=new i,r=new n(t);if(!r.initialCommandIsMoveTo())return[];for(;r.hasMoreData();){var o=r.parseSegment();if(!o)return[];a.appendSegment(o)}return a.pathSegList}),b.axis=function(){},b.axis.labels=function(t){var e=this.internal;arguments.length&&(Object.keys(t).forEach(function(i){e.axis.setLabelText(i,t[i])}),e.axis.updateLabels())},b.axis.max=function(t){var e=this.internal,i=e.config;if(!arguments.length)return{x:i.axis_x_max,y:i.axis_y_max,y2:i.axis_y2_max};"object"===(void 0===t?"undefined":s(t))?(c(t.x)&&(i.axis_x_max=t.x),c(t.y)&&(i.axis_y_max=t.y),c(t.y2)&&(i.axis_y2_max=t.y2)):i.axis_y_max=i.axis_y2_max=t,e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})},b.axis.min=function(t){var e=this.internal,i=e.config;if(!arguments.length)return{x:i.axis_x_min,y:i.axis_y_min,y2:i.axis_y2_min};"object"===(void 0===t?"undefined":s(t))?(c(t.x)&&(i.axis_x_min=t.x),c(t.y)&&(i.axis_y_min=t.y),c(t.y2)&&(i.axis_y2_min=t.y2)):i.axis_y_min=i.axis_y2_min=t,e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})},b.axis.range=function(t){if(!arguments.length)return{max:this.axis.max(),min:this.axis.min()};void 0!==t.max&&this.axis.max(t.max),void 0!==t.min&&this.axis.min(t.min)},b.category=function(t,e){var i=this.internal,n=i.config;return arguments.length>1&&(n.axis_x_categories[t]=e,i.redraw()),n.axis_x_categories[t]},b.categories=function(t){var e=this.internal,i=e.config;return arguments.length?(i.axis_x_categories=t,e.redraw(),i.axis_x_categories):i.axis_x_categories},b.resize=function(t){var e=this.internal.config;e.size_width=t?t.width:null,e.size_height=t?t.height:null,this.flush()},b.flush=function(){this.internal.updateAndRedraw({withLegend:!0,withTransition:!1,withTransitionForTransform:!1})},b.destroy=function(){var t=this.internal;if(window.clearInterval(t.intervalForObserveInserted),void 0!==t.resizeTimeout&&window.clearTimeout(t.resizeTimeout),window.detachEvent)window.detachEvent("onresize",t.resizeFunction);else if(window.removeEventListener)window.removeEventListener("resize",t.resizeFunction);else{var e=window.onresize;e&&e.add&&e.remove&&e.remove(t.resizeFunction)}return t.selectChart.classed("c3",!1).html(""),Object.keys(t).forEach(function(e){t[e]=null}),null},b.color=function(t){return this.internal.color(t)},b.data=function(t){var e=this.internal.data.targets;return void 0===t?e:e.filter(function(e){return[].concat(t).indexOf(e.id)>=0})},b.data.shown=function(t){return this.internal.filterTargetsToShow(this.data(t))},b.data.values=function(t){var e,i=null;return t&&(i=(e=this.data(t))[0]?e[0].values.map(function(t){return t.value}):null),i},b.data.names=function(t){return this.internal.clearLegendItemTextBoxCache(),this.internal.updateDataAttributes("names",t)},b.data.colors=function(t){return this.internal.updateDataAttributes("colors",t)},b.data.axes=function(t){return this.internal.updateDataAttributes("axes",t)},b.flow=function(t){var e,i,n,a,r,o,s,d=this.internal,l=[],u=d.getMaxDataCount(),h=0,g=0;if(t.json)i=d.convertJsonToData(t.json,t.keys);else if(t.rows)i=d.convertRowsToData(t.rows);else{if(!t.columns)return;i=d.convertColumnsToData(t.columns)}e=d.convertDataToTargets(i,!0),d.data.targets.forEach(function(t){var i,n,a=!1;for(i=0;i1?a.values[a.values.length-1].x-r.x:r.x-d.getXDomain(d.data.targets)[0]:1,n=[r.x-o,r.x],d.updateXDomain(null,!0,!0,!1,n)),d.updateTargets(d.data.targets),d.redraw({flow:{index:r.index,length:h,duration:c(t.duration)?t.duration:d.config.transition_duration,done:t.done,orgDataCount:u},withLegend:!0,withTransition:u>1,withTrimXDomain:!1,withUpdateXAxis:!0})},A.generateFlow=function(t){var e=this,i=e.config,n=e.d3;return function(){var a,r,s,c=t.targets,d=t.flow,l=t.drawBar,u=t.drawLine,h=t.drawArea,g=t.cx,p=t.cy,f=t.xv,x=t.xForText,y=t.yForText,m=t.duration,S=1,w=d.index,v=d.length,b=e.getValueOnIndex(e.data.targets[0].values,w),A=e.getValueOnIndex(e.data.targets[0].values,w+v),T=e.x.domain(),P=d.duration||m,L=d.done||function(){},C=e.generateWait(),V=e.xgrid||n.selectAll([]),G=e.xgridLines||n.selectAll([]),E=e.mainRegion||n.selectAll([]),I=e.mainText||n.selectAll([]),O=e.mainBar||n.selectAll([]),R=e.mainLine||n.selectAll([]),D=e.mainArea||n.selectAll([]),F=e.mainCircle||n.selectAll([]);e.flowing=!0,e.data.targets.forEach(function(t){t.values.splice(0,v)}),s=e.updateXDomain(c,!0,!0),e.updateXGrid&&e.updateXGrid(!0),d.orgDataCount?a=1===d.orgDataCount||(b&&b.x)===(A&&A.x)?e.x(T[0])-e.x(s[0]):e.isTimeSeries()?e.x(T[0])-e.x(s[0]):e.x(b.x)-e.x(A.x):1!==e.data.targets[0].values.length?a=e.x(T[0])-e.x(s[0]):e.isTimeSeries()?(b=e.getValueOnIndex(e.data.targets[0].values,0),A=e.getValueOnIndex(e.data.targets[0].values,e.data.targets[0].values.length-1),a=e.x(b.x)-e.x(A.x)):a=_(s)/2,S=_(T)/_(s),r="translate("+a+",0) scale("+S+",1)",e.hideXGridFocus(),n.transition().ease("linear").duration(P).each(function(){C.add(e.axes.x.transition().call(e.xAxis)),C.add(O.transition().attr("transform",r)),C.add(R.transition().attr("transform",r)),C.add(D.transition().attr("transform",r)),C.add(F.transition().attr("transform",r)),C.add(I.transition().attr("transform",r)),C.add(E.filter(e.isRegionOnX).transition().attr("transform",r)),C.add(V.transition().attr("transform",r)),C.add(G.transition().attr("transform",r))}).call(C,function(){var t,n=[],a=[],r=[];if(v){for(t=0;t=0&&(e=!0)}),!e)}),r.regions},b.selected=function(t){var e=this.internal,i=e.d3;return i.merge(e.main.selectAll("."+o.shapes+e.getTargetSelectorSuffix(t)).selectAll("."+o.shape).filter(function(){return i.select(this).classed(o.SELECTED)}).map(function(t){return t.map(function(t){var e=t.__data__;return e.data?e.data:e})}))},b.select=function(t,e,i){var n=this.internal,a=n.d3,r=n.config;r.data_selection_enabled&&n.main.selectAll("."+o.shapes).selectAll("."+o.shape).each(function(s,c){var d=a.select(this),l=s.data?s.data.id:s.id,u=n.getToggle(this,s).bind(n),h=r.data_selection_grouped||!t||t.indexOf(l)>=0,g=!e||e.indexOf(c)>=0,p=d.classed(o.SELECTED);d.classed(o.line)||d.classed(o.area)||(h&&g?r.data_selection_isselectable(s)&&!p&&u(!0,d.classed(o.SELECTED,!0),s,c):void 0!==i&&i&&p&&u(!1,d.classed(o.SELECTED,!1),s,c))})},b.unselect=function(t,e){var i=this.internal,n=i.d3,a=i.config;a.data_selection_enabled&&i.main.selectAll("."+o.shapes).selectAll("."+o.shape).each(function(r,s){var c=n.select(this),d=r.data?r.data.id:r.id,l=i.getToggle(this,r).bind(i),u=a.data_selection_grouped||!t||t.indexOf(d)>=0,h=!e||e.indexOf(s)>=0,g=c.classed(o.SELECTED);c.classed(o.line)||c.classed(o.area)||u&&h&&a.data_selection_isselectable(r)&&g&&l(!1,c.classed(o.SELECTED,!1),r,s)})},b.show=function(t,e){var i,n=this.internal;t=n.mapToTargetIds(t),e=e||{},n.removeHiddenTargetIds(t),(i=n.svg.selectAll(n.selectorTargets(t))).transition().style("opacity",1,"important").call(n.endall,function(){i.style("opacity",null).style("opacity",1)}),e.withLegend&&n.showLegend(t),n.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0,withLegend:!0})},b.hide=function(t,e){var i,n=this.internal;t=n.mapToTargetIds(t),e=e||{},n.addHiddenTargetIds(t),(i=n.svg.selectAll(n.selectorTargets(t))).transition().style("opacity",0,"important").call(n.endall,function(){i.style("opacity",null).style("opacity",0)}),e.withLegend&&n.hideLegend(t),n.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0,withLegend:!0})},b.toggle=function(t,e){var i=this,n=this.internal;n.mapToTargetIds(t).forEach(function(t){n.isTargetToShow(t)?i.hide(t,e):i.show(t,e)})},b.tooltip=function(){},b.tooltip.show=function(t){var e,i,n=this.internal;t.mouse&&(i=t.mouse),t.data?n.isMultipleX()?(i=[n.x(t.data.x),n.getYScale(t.data.id)(t.data.value)],e=null):e=c(t.data.index)?t.data.index:n.getIndexByX(t.data.x):void 0!==t.x?e=n.getIndexByX(t.x):void 0!==t.index&&(e=t.index),n.dispatchEvent("mouseover",e,i),n.dispatchEvent("mousemove",e,i),n.config.tooltip_onshow.call(n,t.data)},b.tooltip.hide=function(){this.internal.dispatchEvent("mouseout",0),this.internal.config.tooltip_onhide.call(this)},b.transform=function(t,e){var i=this.internal,n=["pie","donut"].indexOf(t)>=0?{withTransform:!0}:null;i.transformTo(e,t,n)},A.transformTo=function(t,e,i){var n=this,a=!n.hasArcType(),r=i||{withTransitionForAxis:a};r.withTransitionForTransform=!1,n.transiting=!1,n.setTargetType(t,e),n.updateTargets(n.data.targets),n.updateAndRedraw(r)},b.x=function(t){var e=this.internal;return arguments.length&&(e.updateTargetX(e.data.targets,t),e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})),e.data.xs},b.xs=function(t){var e=this.internal;return arguments.length&&(e.updateTargetXs(e.data.targets,t),e.redraw({withUpdateOrgXDomain:!0,withUpdateXDomain:!0})),e.data.xs},b.zoom=function(t){var e=this.internal;return t&&(e.isTimeSeries()&&(t=t.map(function(t){return e.parseDate(t)})),e.brush.extent(t),e.redraw({withUpdateXDomain:!0,withY:e.config.zoom_rescale}),e.config.zoom_onzoom.call(this,e.x.orgDomain())),e.brush.extent()},b.zoom.enable=function(t){var e=this.internal;e.config.zoom_enabled=t,e.updateAndRedraw()},b.unzoom=function(){var t=this.internal;t.brush.clear().update(),t.redraw({withUpdateXDomain:!0})},b.zoom.max=function(t){var e=this.internal,i=e.config,n=e.d3;if(0!==t&&!t)return i.zoom_x_max;i.zoom_x_max=n.max([e.orgXDomain[1],t])},b.zoom.min=function(t){var e=this.internal,i=e.config,n=e.d3;if(0!==t&&!t)return i.zoom_x_min;i.zoom_x_min=n.min([e.orgXDomain[0],t])},b.zoom.range=function(t){if(!arguments.length)return{max:this.domain.max(),min:this.domain.min()};void 0!==t.max&&this.domain.max(t.max),void 0!==t.min&&this.domain.min(t.min)},A.initPie=function(){var t=this,e=t.d3,i=t.config;t.pie=e.layout.pie().value(function(t){return t.values.reduce(function(t,e){return t+e.value},0)}),i.data_order||t.pie.sort(null)},A.updateRadius=function(){var t=this,e=t.config,i=e.gauge_width||e.donut_width;t.radiusExpanded=Math.min(t.arcWidth,t.arcHeight)/2,t.radius=.95*t.radiusExpanded,t.innerRadiusRatio=i?(t.radius-i)/t.radius:.6,t.innerRadius=t.hasType("donut")||t.hasType("gauge")?t.radius*t.innerRadiusRatio:0},A.updateArc=function(){var t=this;t.svgArc=t.getSvgArc(),t.svgArcExpanded=t.getSvgArcExpanded(),t.svgArcExpandedSub=t.getSvgArcExpanded(.98)},A.updateAngle=function(t){var e,i,n,a,r=this,o=r.config,s=!1,c=0;return o?(r.pie(r.filterTargetsToShow(r.data.targets)).forEach(function(e){s||e.data.id!==t.data.id||(s=!0,(t=e).index=c),c++}),isNaN(t.startAngle)&&(t.startAngle=0),isNaN(t.endAngle)&&(t.endAngle=t.startAngle),r.isGaugeType(t.data)&&(e=o.gauge_min,i=o.gauge_max,n=Math.PI*(o.gauge_fullCircle?2:1)/(i-e),a=t.value.375?1.175-36/o.radius:.8)*o.radius/a:0)+","+n*r+")"),l},A.getArcRatio=function(t){var e=this,i=e.config,n=Math.PI*(e.hasType("gauge")&&!i.gauge_fullCircle?1:2);return t?(t.endAngle-t.startAngle)/n:null},A.convertToArcData=function(t){return this.addName({id:t.data.id,value:t.value,ratio:this.getArcRatio(t),index:t.index})},A.textForArcLabel=function(t){var e,i,n,a,r,o=this;return o.shouldShowArcLabel()?(e=o.updateAngle(t),i=e?e.value:null,n=o.getArcRatio(e),a=t.data.id,o.hasType("gauge")||o.meetsArcLabelThreshold(n)?(r=o.getArcLabelFormat(),r?r(i,n,a):o.defaultArcValueFormat(i,n)):""):""},A.textForGaugeMinMax=function(t,e){var i=this.getGaugeLabelExtents();return i?i(t,e):t},A.expandArc=function(t){var e,i=this;i.transiting?e=window.setInterval(function(){i.transiting||(window.clearInterval(e),i.legend.selectAll(".c3-legend-item-focused").size()>0&&i.expandArc(t))},10):(t=i.mapToTargetIds(t),i.svg.selectAll(i.selectorTargets(t,"."+o.chartArc)).each(function(t){i.shouldExpand(t.data.id)&&i.d3.select(this).selectAll("path").transition().duration(i.expandDuration(t.data.id)).attr("d",i.svgArcExpanded).transition().duration(2*i.expandDuration(t.data.id)).attr("d",i.svgArcExpandedSub).each(function(t){i.isDonutType(t.data)})}))},A.unexpandArc=function(t){var e=this;e.transiting||(t=e.mapToTargetIds(t),e.svg.selectAll(e.selectorTargets(t,"."+o.chartArc)).selectAll("path").transition().duration(function(t){return e.expandDuration(t.data.id)}).attr("d",e.svgArc),e.svg.selectAll("."+o.arc))},A.expandDuration=function(t){var e=this,i=e.config;return e.isDonutType(t)?i.donut_expand_duration:e.isGaugeType(t)?i.gauge_expand_duration:e.isPieType(t)?i.pie_expand_duration:50},A.shouldExpand=function(t){var e=this,i=e.config;return e.isDonutType(t)&&i.donut_expand||e.isGaugeType(t)&&i.gauge_expand||e.isPieType(t)&&i.pie_expand},A.shouldShowArcLabel=function(){var t=this,e=t.config,i=!0;return t.hasType("donut")?i=e.donut_label_show:t.hasType("pie")&&(i=e.pie_label_show),i},A.meetsArcLabelThreshold=function(t){var e=this,i=e.config;return t>=(e.hasType("donut")?i.donut_label_threshold:i.pie_label_threshold)},A.getArcLabelFormat=function(){var t=this,e=t.config,i=e.pie_label_format;return t.hasType("gauge")?i=e.gauge_label_format:t.hasType("donut")&&(i=e.donut_label_format),i},A.getGaugeLabelExtents=function(){return this.config.gauge_label_extents},A.getArcTitle=function(){var t=this;return t.hasType("donut")?t.config.donut_title:""},A.updateTargetsForArc=function(t){var e,i=this,n=i.main,a=i.classChartArc.bind(i),r=i.classArcs.bind(i),s=i.classFocus.bind(i);(e=n.select("."+o.chartArcs).selectAll("."+o.chartArc).data(i.pie(t)).attr("class",function(t){return a(t)+s(t.data)}).enter().append("g").attr("class",a)).append("g").attr("class",r),e.append("text").attr("dy",i.hasType("gauge")?"-.1em":".35em").style("opacity",0).style("text-anchor","middle").style("pointer-events","none")},A.initArc=function(){var t=this;t.arcs=t.main.select("."+o.chart).append("g").attr("class",o.chartArcs).attr("transform",t.getTranslate("arc")),t.arcs.append("text").attr("class",o.chartArcsTitle).style("text-anchor","middle").text(t.getArcTitle())},A.redrawArc=function(t,e,i){var n,a=this,r=a.d3,s=a.config,c=a.main;(n=c.selectAll("."+o.arcs).selectAll("."+o.arc).data(a.arcData.bind(a))).enter().append("path").attr("class",a.classArc.bind(a)).style("fill",function(t){return a.color(t.data)}).style("cursor",function(t){return s.interaction_enabled&&s.data_selection_isselectable(t)?"pointer":null}).each(function(t){a.isGaugeType(t.data)&&(t.startAngle=t.endAngle=s.gauge_startingAngle),this._current=t}),n.attr("transform",function(t){return!a.isGaugeType(t.data)&&i?"scale(0)":""}).on("mouseover",s.interaction_enabled?function(t){var e,i;a.transiting||(e=a.updateAngle(t))&&(i=a.convertToArcData(e),a.expandArc(e.data.id),a.api.focus(e.data.id),a.toggleFocusLegend(e.data.id,!0),a.config.data_onmouseover(i,this))}:null).on("mousemove",s.interaction_enabled?function(t){var e,i=a.updateAngle(t);i&&(e=[a.convertToArcData(i)],a.showTooltip(e,this))}:null).on("mouseout",s.interaction_enabled?function(t){var e,i;a.transiting||(e=a.updateAngle(t))&&(i=a.convertToArcData(e),a.unexpandArc(e.data.id),a.api.revert(),a.revertLegend(),a.hideTooltip(),a.config.data_onmouseout(i,this))}:null).on("click",s.interaction_enabled?function(t,e){var i,n=a.updateAngle(t);n&&(i=a.convertToArcData(n),a.toggleShape&&a.toggleShape(this,i,e),a.config.data_onclick.call(a.api,i,this))}:null).each(function(){a.transiting=!0}).transition().duration(t).attrTween("d",function(t){var e,i=a.updateAngle(t);return i?(isNaN(this._current.startAngle)&&(this._current.startAngle=0),isNaN(this._current.endAngle)&&(this._current.endAngle=this._current.startAngle),e=r.interpolate(this._current,i),this._current=e(0),function(i){var n=e(i);return n.data=t.data,a.getArc(n,!0)}):function(){return"M 0 0"}}).attr("transform",i?"scale(1)":"").style("fill",function(t){return a.levelColor?a.levelColor(t.data.values[0].value):a.color(t.data.id)}).call(a.endall,function(){a.transiting=!1}),n.exit().transition().duration(e).style("opacity",0).remove(),c.selectAll("."+o.chartArc).select("text").style("opacity",0).attr("class",function(t){return a.isGaugeType(t.data)?o.gaugeValue:""}).text(a.textForArcLabel.bind(a)).attr("transform",a.transformForArcLabel.bind(a)).style("font-size",function(t){return a.isGaugeType(t.data)?Math.round(a.radius/5)+"px":""}).transition().duration(t).style("opacity",function(t){return a.isTargetToShow(t.data.id)&&a.isArcType(t.data)?1:0}),c.select("."+o.chartArcsTitle).style("opacity",a.hasType("donut")||a.hasType("gauge")?1:0),a.hasType("gauge")&&(a.arcs.select("."+o.chartArcsBackground).attr("d",function(){var t={data:[{value:s.gauge_max}],startAngle:s.gauge_startingAngle,endAngle:-1*s.gauge_startingAngle};return a.getArc(t,!0,!0)}),a.arcs.select("."+o.chartArcsGaugeUnit).attr("dy",".75em").text(s.gauge_label_show?s.gauge_units:""),a.arcs.select("."+o.chartArcsGaugeMin).attr("dx",-1*(a.innerRadius+(a.radius-a.innerRadius)/(s.gauge_fullCircle?1:2))+"px").attr("dy","1.2em").text(s.gauge_label_show?a.textForGaugeMinMax(s.gauge_min,!1):""),a.arcs.select("."+o.chartArcsGaugeMax).attr("dx",a.innerRadius+(a.radius-a.innerRadius)/(s.gauge_fullCircle?1:2)+"px").attr("dy","1.2em").text(s.gauge_label_show?a.textForGaugeMinMax(s.gauge_max,!0):""))},A.initGauge=function(){var t=this.arcs;this.hasType("gauge")&&(t.append("path").attr("class",o.chartArcsBackground),t.append("text").attr("class",o.chartArcsGaugeUnit).style("text-anchor","middle").style("pointer-events","none"),t.append("text").attr("class",o.chartArcsGaugeMin).style("text-anchor","middle").style("pointer-events","none"),t.append("text").attr("class",o.chartArcsGaugeMax).style("text-anchor","middle").style("pointer-events","none"))},A.getGaugeLabelHeight=function(){return this.config.gauge_label_show?20:0},A.hasCaches=function(t){for(var e=0;e=0?o.focused:"")},A.classDefocused=function(t){return" "+(this.defocusedTargetIds.indexOf(t.id)>=0?o.defocused:"")},A.classChartText=function(t){return o.chartText+this.classTarget(t.id)},A.classChartLine=function(t){return o.chartLine+this.classTarget(t.id)},A.classChartBar=function(t){return o.chartBar+this.classTarget(t.id)},A.classChartArc=function(t){return o.chartArc+this.classTarget(t.data.id)},A.getTargetSelectorSuffix=function(t){return t||0===t?("-"+t).replace(/[\s?!@#$%^&*()_=+,.<>'":;\[\]\/|~`{}\\]/g,"-"):""},A.selectorTarget=function(t,e){return(e||"")+"."+o.target+this.getTargetSelectorSuffix(t)},A.selectorTargets=function(t,e){var i=this;return t=t||[],t.length?t.map(function(t){return i.selectorTarget(t,e)}):null},A.selectorLegend=function(t){return"."+o.legendItem+this.getTargetSelectorSuffix(t)},A.selectorLegends=function(t){var e=this;return t&&t.length?t.map(function(t){return e.selectorLegend(t)}):null},A.getClipPath=function(t){return"url("+(window.navigator.appVersion.toLowerCase().indexOf("msie 9.")>=0?"":document.URL.split("#")[0])+"#"+t+")"},A.appendClip=function(t,e){return t.append("clipPath").attr("id",e).append("rect")},A.getAxisClipX=function(t){var e=Math.max(30,this.margin.left);return t?-(1+e):-(e-1)},A.getAxisClipY=function(t){return t?-20:-this.margin.top},A.getXAxisClipX=function(){var t=this;return t.getAxisClipX(!t.config.axis_rotated)},A.getXAxisClipY=function(){var t=this;return t.getAxisClipY(!t.config.axis_rotated)},A.getYAxisClipX=function(){var t=this;return t.config.axis_y_inner?-1:t.getAxisClipX(t.config.axis_rotated)},A.getYAxisClipY=function(){var t=this;return t.getAxisClipY(t.config.axis_rotated)},A.getAxisClipWidth=function(t){var e=this,i=Math.max(30,e.margin.left),n=Math.max(30,e.margin.right);return t?e.width+2+i+n:e.margin.left+20},A.getAxisClipHeight=function(t){return(t?this.margin.bottom:this.margin.top+this.height)+20},A.getXAxisClipWidth=function(){var t=this;return t.getAxisClipWidth(!t.config.axis_rotated)},A.getXAxisClipHeight=function(){var t=this;return t.getAxisClipHeight(!t.config.axis_rotated)},A.getYAxisClipWidth=function(){var t=this;return t.getAxisClipWidth(t.config.axis_rotated)+(t.config.axis_y_inner?20:0)},A.getYAxisClipHeight=function(){var t=this;return t.getAxisClipHeight(t.config.axis_rotated)},A.generateColor=function(){var t=this,e=t.config,i=t.d3,n=e.data_colors,a=y(e.color_pattern)?e.color_pattern:i.scale.category10().range(),r=e.data_color,o=[];return function(t){var e,i=t.id||t.data&&t.data.id||t;return n[i]instanceof Function?e=n[i](t):n[i]?e=n[i]:(o.indexOf(i)<0&&o.push(i),e=a[o.indexOf(i)%a.length],n[i]=e),r instanceof Function?r(e,t):e}},A.generateLevelColor=function(){var t=this.config,e=t.color_pattern,i=t.color_threshold,n="value"===i.unit,a=i.values&&i.values.length?i.values:[],r=i.max||100;return y(t.color_threshold)?function(t){var i,o=e[e.length-1];for(i=0;i=0?n.data.xs[i]=(e&&n.data.xs[i]?n.data.xs[i]:[]).concat(t.map(function(t){return t[r]}).filter(c).map(function(t,e){return n.generateTargetX(t,i,e)})):a.data_x?n.data.xs[i]=n.getOtherTargetXs():y(a.data_xs)&&(n.data.xs[i]=n.getXValuesOfXKey(r,n.data.targets)):n.data.xs[i]=t.map(function(t,e){return e})}),r.forEach(function(t){if(!n.data.xs[t])throw new Error('x is not defined for id = "'+t+'".')}),(i=r.map(function(e,i){var r=a.data_idConverter(e);return{id:r,id_org:e,values:t.map(function(t,o){var s,c=t[n.getXKey(e)],d=null===t[e]||isNaN(t[e])?null:+t[e];return n.isCustomX()&&n.isCategorized()&&void 0!==c?(0===i&&0===o&&(a.axis_x_categories=[]),-1===(s=a.axis_x_categories.indexOf(c))&&(s=a.axis_x_categories.length,a.axis_x_categories.push(c))):s=n.generateTargetX(c,e,o),(void 0===t[e]||n.data.xs[e].length<=o)&&(s=void 0),{x:s,value:d,id:r}}).filter(function(t){return g(t.x)})}})).forEach(function(t){var e;a.data_xSort&&(t.values=t.values.sort(function(t,e){return(t.x||0===t.x?t.x:1/0)-(e.x||0===e.x?e.x:1/0)})),e=0,t.values.forEach(function(t){t.index=e++}),n.data.xs[t.id].sort(function(t,e){return t-e})}),n.hasNegativeValue=n.hasNegativeValueInTargets(i),n.hasPositiveValue=n.hasPositiveValueInTargets(i),a.data_type&&n.setTargetType(n.mapToIds(i).filter(function(t){return!(t in a.data_types)}),a.data_type),i.forEach(function(t){n.addCache(t.id_org,t)}),i},A.isX=function(t){var e=this.config;return e.data_x&&t===e.data_x||y(e.data_xs)&&S(e.data_xs,t)},A.isNotX=function(t){return!this.isX(t)},A.getXKey=function(t){var e=this.config;return e.data_x?e.data_x:y(e.data_xs)?e.data_xs[t]:null},A.getXValuesOfXKey=function(t,e){var i,n=this;return(e&&y(e)?n.mapToIds(e):[]).forEach(function(e){n.getXKey(e)===t&&(i=n.data.xs[e])}),i},A.getIndexByX=function(t){var e=this,i=e.filterByX(e.data.targets,t);return i.length?i[0].index:null},A.getXValue=function(t,e){var i=this;return t in i.data.xs&&i.data.xs[t]&&c(i.data.xs[t][e])?i.data.xs[t][e]:e},A.getOtherTargetXs=function(){var t=this,e=Object.keys(t.data.xs);return e.length?t.data.xs[e[0]]:null},A.getOtherTargetX=function(t){var e=this.getOtherTargetXs();return e&&t1},A.isMultipleX=function(){return y(this.config.data_xs)||!this.config.data_xSort||this.hasType("scatter")},A.addName=function(t){var e,i=this;return t&&(e=i.config.data_names[t.id],t.name=void 0!==e?e:t.id),t},A.getValueOnIndex=function(t,e){var i=t.filter(function(t){return t.index===e});return i.length?i[0]:null},A.updateTargetX=function(t,e){var i=this;t.forEach(function(t){t.values.forEach(function(n,a){n.x=i.generateTargetX(e[a],t.id,a)}),i.data.xs[t.id]=e})},A.updateTargetXs=function(t,e){var i=this;t.forEach(function(t){e[t.id]&&i.updateTargetX([t],e[t.id])})},A.generateTargetX=function(t,e,i){var n=this;return n.isTimeSeries()?t?n.parseDate(t):n.parseDate(n.getXValue(e,i)):n.isCustomX()&&!n.isCategorized()?c(t)?+t:n.getXValue(e,i):i},A.cloneTarget=function(t){return{id:t.id,id_org:t.id_org,values:t.values.map(function(t){return{x:t.x,value:t.value,id:t.id}})}},A.updateXs=function(){var t=this;t.data.targets.length&&(t.xs=[],t.data.targets[0].values.forEach(function(e){t.xs[e.index]=e.x}))},A.getPrevX=function(t){var e=this.xs[t-1];return void 0!==e?e:null},A.getNextX=function(t){var e=this.xs[t+1];return void 0!==e?e:null},A.getMaxDataCount=function(){var t=this;return t.d3.max(t.data.targets,function(t){return t.values.length})},A.getMaxDataCountTarget=function(t){var e,i=t.length,n=0;return i>1?t.forEach(function(t){t.values.length>n&&(e=t,n=t.values.length)}):e=i?t[0]:null,e},A.getEdgeX=function(t){var e=this;return t.length?[e.d3.min(t,function(t){return t.values[0].x}),e.d3.max(t,function(t){return t.values[t.values.length-1].x})]:[0,0]},A.mapToIds=function(t){return t.map(function(t){return t.id})},A.mapToTargetIds=function(t){var e=this;return t?[].concat(t):e.mapToIds(e.data.targets)},A.hasTarget=function(t,e){var i,n=this.mapToIds(t);for(i=0;ie?1:t>=e?0:NaN})},A.addHiddenTargetIds=function(t){t=t instanceof Array?t:new Array(t);for(var e=0;e0})},A.isOrderDesc=function(){var t=this.config;return"string"==typeof t.data_order&&"desc"===t.data_order.toLowerCase()},A.isOrderAsc=function(){var t=this.config;return"string"==typeof t.data_order&&"asc"===t.data_order.toLowerCase()},A.orderTargets=function(t){var e=this,i=e.config,n=e.isOrderAsc(),a=e.isOrderDesc();return n||a?t.sort(function(t,e){var i=function(t,e){return t+Math.abs(e.value)},a=t.values.reduce(i,0),r=e.values.reduce(i,0);return n?r-a:a-r}):d(i.data_order)?t.sort(i.data_order):l(i.data_order)&&t.sort(function(t,e){return i.data_order.indexOf(t.id)-i.data_order.indexOf(e.id)}),t},A.filterByX=function(t,e){return this.d3.merge(t.map(function(t){return t.values})).filter(function(t){return t.x-e==0})},A.filterRemoveNull=function(t){return t.filter(function(t){return c(t.value)})},A.filterByXDomain=function(t,e){return t.map(function(t){return{id:t.id,id_org:t.id_org,values:t.values.filter(function(t){return e[0]<=t.x&&t.x<=e[1]})}})},A.hasDataLabel=function(){var t=this.config;return!("boolean"!=typeof t.data_labels||!t.data_labels)||!("object"!==s(t.data_labels)||!y(t.data_labels))},A.getDataLabelLength=function(t,e,i){var n=this,a=[0,0];return n.selectChart.select("svg").selectAll(".dummy").data([t,e]).enter().append("text").text(function(t){return n.dataLabelFormat(t.id)(t)}).each(function(t,e){a[e]=1.3*this.getBoundingClientRect()[i]}).remove(),a},A.isNoneArc=function(t){return this.hasTarget(this.data.targets,t.id)},A.isArc=function(t){return"data"in t&&this.hasTarget(this.data.targets,t.data.id)},A.findSameXOfValues=function(t,e){var i,n=t[e].x,a=[];for(i=e-1;i>=0&&n===t[i].x;i--)a.push(t[i]);for(i=e;i0)for(o=s.hasNegativeValueInTargets(t),e=0;e=0})).length)for(n=a[0],o&&l[n]&&l[n].forEach(function(t,e){l[n][e]=t<0?t:0}),i=1;i0||(l[n][e]+=+t)});return s.d3.min(Object.keys(l).map(function(t){return s.d3.min(l[t])}))},A.getYDomainMax=function(t){var e,i,n,a,r,o,s=this,c=s.config,d=s.mapToIds(t),l=s.getValuesAsIdKeyed(t);if(c.data_groups.length>0)for(o=s.hasPositiveValueInTargets(t),e=0;e=0})).length)for(n=a[0],o&&l[n]&&l[n].forEach(function(t,e){l[n][e]=t>0?t:0}),i=1;i=0&&b>=0,g=v<=0&&b<=0,(c(S)&&h||c(w)&&g)&&(T=!1),T&&(h&&(v=0),g&&(b=0)),a=Math.abs(b-v),r=o=.1*a,void 0!==A&&(b=A+(s=Math.max(Math.abs(v),Math.abs(b))),v=A-s),L?(d=p.getDataLabelLength(v,b,"width"),l=_(p.y.range()),r+=a*((u=[d[0]/l,d[1]/l])[1]/(1-u[0]-u[1])),o+=a*(u[0]/(1-u[0]-u[1]))):C&&(d=p.getDataLabelLength(v,b,"height"),r+=p.axis.convertPixelsToAxisPadding(d[1],a),o+=p.axis.convertPixelsToAxisPadding(d[0],a)),"y"===e&&y(f.axis_y_padding)&&(r=p.axis.getPadding(f.axis_y_padding,"top",r,a),o=p.axis.getPadding(f.axis_y_padding,"bottom",o,a)),"y2"===e&&y(f.axis_y2_padding)&&(r=p.axis.getPadding(f.axis_y2_padding,"top",r,a),o=p.axis.getPadding(f.axis_y2_padding,"bottom",o,a)),T&&(h&&(o=v),g&&(r=-b)),n=[v-o,b+r],P?n.reverse():n)},A.getXDomainMin=function(t){var e=this,i=e.config;return void 0!==i.axis_x_min?e.isTimeSeries()?this.parseDate(i.axis_x_min):i.axis_x_min:e.d3.min(t,function(t){return e.d3.min(t.values,function(t){return t.x})})},A.getXDomainMax=function(t){var e=this,i=e.config;return void 0!==i.axis_x_max?e.isTimeSeries()?this.parseDate(i.axis_x_max):i.axis_x_max:e.d3.max(t,function(t){return e.d3.max(t.values,function(t){return t.x})})},A.getXDomainPadding=function(t){var e,i,n,a,r=this,o=r.config,d=t[1]-t[0];return i=r.isCategorized()?0:r.hasType("bar")?(e=r.getMaxDataCount())>1?d/(e-1)/2:.5:.01*d,"object"===s(o.axis_x_padding)&&y(o.axis_x_padding)?(n=c(o.axis_x_padding.left)?o.axis_x_padding.left:i,a=c(o.axis_x_padding.right)?o.axis_x_padding.right:i):n=a="number"==typeof o.axis_x_padding?o.axis_x_padding:i,{left:n,right:a}},A.getXDomain=function(t){var e=this,i=[e.getXDomainMin(t),e.getXDomainMax(t)],n=i[0],a=i[1],r=e.getXDomainPadding(i),o=0,s=0;return n-a!=0||e.isCategorized()||(e.isTimeSeries()?(n=new Date(.5*n.getTime()),a=new Date(1.5*a.getTime())):(n=0===n?1:.5*n,a=0===a?-1:1.5*a)),(n||0===n)&&(o=e.isTimeSeries()?new Date(n.getTime()-r.left):n-r.left),(a||0===a)&&(s=e.isTimeSeries()?new Date(a.getTime()+r.right):a+r.right),[o,s]},A.updateXDomain=function(t,e,i,n,a){var r=this,o=r.config;return i&&(r.x.domain(a||r.d3.extent(r.getXDomain(t))),r.orgXDomain=r.x.domain(),o.zoom_enabled&&r.zoom.scale(r.x).updateScaleExtent(),r.subX.domain(r.x.domain()),r.brush&&r.brush.scale(r.subX)),e&&(r.x.domain(a||(!r.brush||r.brush.empty()?r.orgXDomain:r.brush.extent())),o.zoom_enabled&&r.zoom.scale(r.x).updateScaleExtent()),n&&r.x.domain(r.trimXDomain(r.x.orgDomain())),r.x.domain()},A.trimXDomain=function(t){var e=this.getZoomDomain(),i=e[0],n=e[1];return t[0]<=i&&(t[1]=+t[1]+(i-t[0]),t[0]=i),n<=t[1]&&(t[0]=+t[0]-(t[1]-n),t[1]=n),t},A.drag=function(t){var e,i,n,a,r,s,c,d,l=this,u=l.config,h=l.main,g=l.d3;l.hasArcType()||u.data_selection_enabled&&(u.zoom_enabled&&!l.zoom.altDomain||u.data_selection_multiple&&(e=l.dragStart[0],i=l.dragStart[1],n=t[0],a=t[1],r=Math.min(e,n),s=Math.max(e,n),c=u.data_selection_grouped?l.margin.top:Math.min(i,a),d=u.data_selection_grouped?l.height:Math.max(i,a),h.select("."+o.dragarea).attr("x",r).attr("y",c).attr("width",s-r).attr("height",d-c),h.selectAll("."+o.shapes).selectAll("."+o.shape).filter(function(t){return u.data_selection_isselectable(t)}).each(function(t,e){var i,n,a,u,h,p,f=g.select(this),_=f.classed(o.SELECTED),x=f.classed(o.INCLUDED),y=!1;if(f.classed(o.circle))i=1*f.attr("cx"),n=1*f.attr("cy"),h=l.togglePoint,y=rd&&(c=c.filter(function(t){return(""+t).indexOf(".")<0}));return c},A.getGridFilterToRemove=function(t){return t?function(e){var i=!1;return[].concat(t).forEach(function(t){("value"in t&&e.value===t.value||"class"in t&&e.class===t.class)&&(i=!0)}),i}:function(){return!0}},A.removeGridLines=function(t,e){var i=this,n=i.config,a=i.getGridFilterToRemove(t),r=function(t){return!a(t)},s=e?o.xgridLines:o.ygridLines,c=e?o.xgridLine:o.ygridLine;i.main.select("."+s).selectAll("."+c).filter(a).transition().duration(n.transition_duration).style("opacity",0).remove(),e?n.grid_x_lines=n.grid_x_lines.filter(r):n.grid_y_lines=n.grid_y_lines.filter(r)},A.initEventRect=function(){this.main.select("."+o.chart).append("g").attr("class",o.eventRects).style("fill-opacity",0)},A.redrawEventRect=function(){var t,e,i=this,n=i.config,a=i.isMultipleX(),r=i.main.select("."+o.eventRects).style("cursor",n.zoom_enabled?n.axis_rotated?"ns-resize":"ew-resize":null).classed(o.eventRectsMultiple,a).classed(o.eventRectsSingle,!a);r.selectAll("."+o.eventRect).remove(),i.eventRect=r.selectAll("."+o.eventRect),a?(t=i.eventRect.data([0]),i.generateEventRectsForMultipleXs(t.enter()),i.updateEventRect(t)):(e=i.getMaxDataCountTarget(i.data.targets),r.datum(e?e.values:[]),i.eventRect=r.selectAll("."+o.eventRect),t=i.eventRect.data(function(t){return t}),i.generateEventRectsForSingleX(t.enter()),i.updateEventRect(t),t.exit().remove())},A.updateEventRect=function(t){var e,i,n,a,r,o,s=this,c=s.config;t=t||s.eventRect.data(function(t){return t}),s.isMultipleX()?(e=0,i=0,n=s.width,a=s.height):(!s.isCustomX()&&!s.isTimeSeries()||s.isCategorized()?(r=s.getEventRectWidth(),o=function(t){return s.x(t.x)-r/2}):(s.updateXs(),r=function(t){var e=s.getPrevX(t.index),i=s.getNextX(t.index);return null===e&&null===i?c.axis_rotated?s.height:s.width:(null===e&&(e=s.x.domain()[0]),null===i&&(i=s.x.domain()[1]),Math.max(0,(s.x(i)-s.x(e))/2))},o=function(t){var e=s.getPrevX(t.index),i=s.getNextX(t.index),n=s.data.xs[t.id][t.index];return null===e&&null===i?0:(null===e&&(e=s.x.domain()[0]),(s.x(n)+s.x(e))/2)}),e=c.axis_rotated?0:o,i=c.axis_rotated?o:0,n=c.axis_rotated?s.width:r,a=c.axis_rotated?r:s.height),t.attr("class",s.classEvent.bind(s)).attr("x",e).attr("y",i).attr("width",n).attr("height",a)},A.generateEventRectsForSingleX=function(t){var e=this,i=e.d3,n=e.config;t.append("rect").attr("class",e.classEvent.bind(e)).style("cursor",n.data_selection_enabled&&n.data_selection_grouped?"pointer":null).on("mouseover",function(t){var i=t.index;e.dragging||e.flowing||e.hasArcType()||(n.point_focus_expand_enabled&&e.expandCircles(i,null,!0),e.expandBars(i,null,!0),e.main.selectAll("."+o.shape+"-"+i).each(function(t){n.data_onmouseover.call(e.api,t)}))}).on("mouseout",function(t){var i=t.index;e.config&&(e.hasArcType()||(e.hideXGridFocus(),e.hideTooltip(),e.unexpandCircles(),e.unexpandBars(),e.main.selectAll("."+o.shape+"-"+i).each(function(t){n.data_onmouseout.call(e.api,t)})))}).on("mousemove",function(t){var a,r=t.index,s=e.svg.select("."+o.eventRect+"-"+r);e.dragging||e.flowing||e.hasArcType()||(e.isStepType(t)&&"step-after"===e.config.line_step_type&&i.mouse(this)[0]=0}).classed(o.legendItemFocused,e).transition().duration(100).style("opacity",function(){return(e?i.opacityForLegend:i.opacityForUnfocusedLegend).call(i,i.d3.select(this))})},A.revertLegend=function(){var t=this,e=t.d3;t.legend.selectAll("."+o.legendItem).classed(o.legendItemFocused,!1).transition().duration(100).style("opacity",function(){return t.opacityForLegend(e.select(this))})},A.showLegend=function(t){var e=this,i=e.config;i.legend_show||(i.legend_show=!0,e.legend.style("visibility","visible"),e.legendHasRendered||e.updateLegendWithDefaults()),e.removeHiddenLegendIds(t),e.legend.selectAll(e.selectorLegends(t)).style("visibility","visible").transition().style("opacity",function(){return e.opacityForLegend(e.d3.select(this))})},A.hideLegend=function(t){var e=this,i=e.config;i.legend_show&&x(t)&&(i.legend_show=!1,e.legend.style("visibility","hidden")),e.addHiddenLegendIds(t),e.legend.selectAll(e.selectorLegends(t)).style("opacity",0).style("visibility","hidden")},A.clearLegendItemTextBoxCache=function(){this.legendItemTextBox={}},A.updateLegend=function(t,e,i){function n(t,e){return b.legendItemTextBox[e]||(b.legendItemTextBox[e]=b.getTextRect(t.textContent,o.legendItem,t)),b.legendItemTextBox[e]}function a(e,i,a){function r(t,e){e||(o=(p-E-g)/2)=L)&&(L=u),(!C||h>=C)&&(C=h),s=b.isLegendRight||b.isLegendInset?C:L,A.legend_equally?(Object.keys(O).forEach(function(t){O[t]=L}),Object.keys(R).forEach(function(t){R[t]=C}),(o=(p-s*t.length)/2)0&&0===v.size()&&(v=b.legend.insert("g","."+o.legendItem).attr("class",o.legendBackground).append("rect")),y=b.legend.selectAll("text").data(t).text(function(t){return void 0!==A.data_names[t]?A.data_names[t]:t}).each(function(t,e){a(this,t,e)}),(_?y.transition():y).attr("x",s).attr("y",l),S=b.legend.selectAll("rect."+o.legendItemEvent).data(t),(_?S.transition():S).attr("width",function(t){return O[t]}).attr("height",function(t){return R[t]}).attr("x",c).attr("y",u),w=b.legend.selectAll("line."+o.legendItemTile).data(t),(_?w.transition():w).style("stroke",b.color).attr("x1",h).attr("y1",p).attr("x2",g).attr("y2",p),v&&(_?v.transition():v).attr("height",b.getLegendHeight()-12).attr("width",L*(X+1)+10),b.legend.selectAll("."+o.legendItem).classed(o.legendItemHidden,function(t){return!b.isTargetToShow(t)}),b.updateLegendItemWidth(L),b.updateLegendItemHeight(C),b.updateLegendStep(X),b.updateSizes(),b.updateScales(),b.updateSvgSize(),b.transformAll(x,i),b.legendHasRendered=!0},A.initRegion=function(){var t=this;t.region=t.main.append("g").attr("clip-path",t.clipPath).attr("class",o.regions)},A.updateRegion=function(t){var e=this,i=e.config;e.region.style("visibility",e.hasArcType()?"hidden":"visible"),e.mainRegion=e.main.select("."+o.regions).selectAll("."+o.region).data(i.regions),e.mainRegion.enter().append("g").append("rect").style("fill-opacity",0),e.mainRegion.attr("class",e.classRegion.bind(e)),e.mainRegion.exit().transition().duration(t).style("opacity",0).remove()},A.redrawRegion=function(t){var e=this,i=e.mainRegion.selectAll("rect").each(function(){var t=e.d3.select(this.parentNode).datum();e.d3.select(this).datum(t)}),n=e.regionX.bind(e),a=e.regionY.bind(e),r=e.regionWidth.bind(e),o=e.regionHeight.bind(e);return[(t?i.transition():i).attr("x",n).attr("y",a).attr("width",r).attr("height",o).style("fill-opacity",function(t){return c(t.opacity)?t.opacity:.1})]},A.regionX=function(t){var e=this,i=e.config,n="y"===t.axis?e.y:e.y2;return"y"===t.axis||"y2"===t.axis?i.axis_rotated&&"start"in t?n(t.start):0:i.axis_rotated?0:"start"in t?e.x(e.isTimeSeries()?e.parseDate(t.start):t.start):0},A.regionY=function(t){var e=this,i=e.config,n="y"===t.axis?e.y:e.y2;return"y"===t.axis||"y2"===t.axis?i.axis_rotated?0:"end"in t?n(t.end):0:i.axis_rotated&&"start"in t?e.x(e.isTimeSeries()?e.parseDate(t.start):t.start):0},A.regionWidth=function(t){var e,i=this,n=i.config,a=i.regionX(t),r="y"===t.axis?i.y:i.y2;return e="y"===t.axis||"y2"===t.axis?n.axis_rotated&&"end"in t?r(t.end):i.width:n.axis_rotated?i.width:"end"in t?i.x(i.isTimeSeries()?i.parseDate(t.end):t.end):i.width,ei.bar_width_max?i.bar_width_max:n},A.getBars=function(t,e){var i=this;return(e?i.main.selectAll("."+o.bars+i.getTargetSelectorSuffix(e)):i.main).selectAll("."+o.bar+(c(t)?"-"+t:""))},A.expandBars=function(t,e,i){var n=this;i&&n.unexpandBars(),n.getBars(t,e).classed(o.EXPANDED,!0)},A.unexpandBars=function(t){this.getBars(t).classed(o.EXPANDED,!1)},A.generateDrawBar=function(t,e){var i=this,n=i.config,a=i.generateGetBarPoints(t,e);return function(t,e){var i=a(t,e),r=n.axis_rotated?1:0,o=n.axis_rotated?0:1;return"M "+i[0][r]+","+i[0][o]+" L"+i[1][r]+","+i[1][o]+" L"+i[2][r]+","+i[2][o]+" L"+i[3][r]+","+i[3][o]+" z"}},A.generateGetBarPoints=function(t,e){var i=this,n=e?i.subXAxis:i.xAxis,a=t.__max__+1,r=i.getBarW(n,a),o=i.getShapeX(r,a,t,!!e),s=i.getShapeY(!!e),c=i.getShapeOffset(i.isBarType,t,!!e),d=r*(i.config.bar_space/2),l=e?i.getSubYScale:i.getYScale;return function(t,e){var n=l.call(i,t.id)(0),a=c(t,e)||n,u=o(t),h=s(t);return i.config.axis_rotated&&(0=0&&(d+=s(a[o].value)-c))}),d}},A.isWithinShape=function(t,e){var i,n=this,a=n.d3.select(t);return n.isTargetToShow(e.id)?"circle"===t.nodeName?i=n.isStepType(e)?n.isWithinStep(t,n.getYScale(e.id)(e.value)):n.isWithinCircle(t,1.5*n.pointSelectR(e)):"path"===t.nodeName&&(i=!a.classed(o.bar)||n.isWithinBar(t)):i=!1,i},A.getInterpolate=function(t){var e=this,i=e.isInterpolationType(e.config.spline_interpolation_type)?e.config.spline_interpolation_type:"cardinal";return e.isSplineType(t)?i:e.isStepType(t)?e.config.line_step_type:"linear"},A.initLine=function(){this.main.select("."+o.chart).append("g").attr("class",o.chartLines)},A.updateTargetsForLine=function(t){var e,i=this,n=i.config,a=i.classChartLine.bind(i),r=i.classLines.bind(i),s=i.classAreas.bind(i),c=i.classCircles.bind(i),d=i.classFocus.bind(i);(e=i.main.select("."+o.chartLines).selectAll("."+o.chartLine).data(t).attr("class",function(t){return a(t)+d(t)}).enter().append("g").attr("class",a).style("opacity",0).style("pointer-events","none")).append("g").attr("class",r),e.append("g").attr("class",s),e.append("g").attr("class",function(t){return i.generateClass(o.selectedCircles,t.id)}),e.append("g").attr("class",c).style("cursor",function(t){return n.data_selection_isselectable(t)?"pointer":null}),t.forEach(function(t){i.main.selectAll("."+o.selectedCircles+i.getTargetSelectorSuffix(t.id)).selectAll("."+o.selectedCircle).each(function(e){e.value=t.values[e.index].value})})},A.updateLine=function(t){var e=this;e.mainLine=e.main.selectAll("."+o.lines).selectAll("."+o.line).data(e.lineData.bind(e)),e.mainLine.enter().append("path").attr("class",e.classLine.bind(e)).style("stroke",e.color),e.mainLine.style("opacity",e.initialOpacity.bind(e)).style("shape-rendering",function(t){return e.isStepType(t)?"crispEdges":""}).attr("transform",null),e.mainLine.exit().transition().duration(t).style("opacity",0).remove()},A.redrawLine=function(t,e){return[(e?this.mainLine.transition(Math.random().toString()):this.mainLine).attr("d",t).style("stroke",this.color).style("opacity",1)]},A.generateDrawLine=function(t,e){var i=this,n=i.config,a=i.d3.svg.line(),r=i.generateGetLinePoints(t,e),o=e?i.getSubYScale:i.getYScale,s=function(t){return(e?i.subxx:i.xx).call(i,t)},c=function(t,e){return n.data_groups.length>0?r(t,e)[0][1]:o.call(i,t.id)(t.value)};return a=n.axis_rotated?a.x(c).y(s):a.x(s).y(c),n.line_connectNull||(a=a.defined(function(t){return null!=t.value})),function(t){var r,s=n.line_connectNull?i.filterRemoveNull(t.values):t.values,c=e?i.x:i.subX,d=o.call(i,t.id),l=0,u=0;return i.isLineType(t)?n.data_regions[t.id]?r=i.lineWithRegions(s,c,d,n.data_regions[t.id]):(i.isStepType(t)&&(s=i.convertValuesToStep(s)),r=a.interpolate(i.getInterpolate(t))(s)):(s[0]&&(l=c(s[0].x),u=d(s[0].value)),r=n.axis_rotated?"M "+u+" "+l:"M "+l+" "+u),r||"M 0 0"}},A.generateGetLinePoints=function(t,e){var i=this,n=i.config,a=t.__max__+1,r=i.getShapeX(0,a,t,!!e),o=i.getShapeY(!!e),s=i.getShapeOffset(i.isLineType,t,!!e),c=e?i.getSubYScale:i.getYScale;return function(t,e){var a=c.call(i,t.id)(0),d=s(t,e)||a,l=r(t),u=o(t);return n.axis_rotated&&(00?r(t,e)[0][1]:o.call(i,t.id)(i.getAreaBaseValue(t.id))},d=function(t,e){return n.data_groups.length>0?r(t,e)[1][1]:o.call(i,t.id)(t.value)};return a=n.axis_rotated?a.x0(c).x1(d).y(s):a.x(s).y0(n.area_above?0:c).y1(d),n.line_connectNull||(a=a.defined(function(t){return null!==t.value})),function(t){var e,r=n.line_connectNull?i.filterRemoveNull(t.values):t.values,o=0,s=0;return i.isAreaType(t)?(i.isStepType(t)&&(r=i.convertValuesToStep(r)),e=a.interpolate(i.getInterpolate(t))(r)):(r[0]&&(o=i.x(r[0].x),s=i.getYScale(t.id)(r[0].value)),e=n.axis_rotated?"M "+s+" "+o:"M "+o+" "+s),e||"M 0 0"}},A.getAreaBaseValue=function(){return 0},A.generateGetAreaPoints=function(t,e){var i=this,n=i.config,a=t.__max__+1,r=i.getShapeX(0,a,t,!!e),o=i.getShapeY(!!e),s=i.getShapeOffset(i.isAreaType,t,!!e),c=e?i.getSubYScale:i.getYScale;return function(t,e){var a=c.call(i,t.id)(0),d=s(t,e)||a,l=r(t),u=o(t);return n.axis_rotated&&(00?(t=i.getShapeIndices(i.isLineType),e=i.generateGetLinePoints(t),i.circleY=function(t,i){return e(t,i)[0][1]}):i.circleY=function(t){return i.getYScale(t.id)(t.value)}},A.getCircles=function(t,e){var i=this;return(e?i.main.selectAll("."+o.circles+i.getTargetSelectorSuffix(e)):i.main).selectAll("."+o.circle+(c(t)?"-"+t:""))},A.expandCircles=function(t,e,i){var n=this,a=n.pointExpandedR.bind(n);i&&n.unexpandCircles(),n.getCircles(t,e).classed(o.EXPANDED,!0).attr("r",a)},A.unexpandCircles=function(t){var e=this,i=e.pointR.bind(e);e.getCircles(t).filter(function(){return e.d3.select(this).classed(o.EXPANDED)}).classed(o.EXPANDED,!1).attr("r",i)},A.pointR=function(t){var e=this,i=e.config;return e.isStepType(t)?0:d(i.point_r)?i.point_r(t):i.point_r},A.pointExpandedR=function(t){var e=this,i=e.config;return i.point_focus_expand_enabled?i.point_focus_expand_r?i.point_focus_expand_r:1.75*e.pointR(t):e.pointR(t)},A.pointSelectR=function(t){var e=this,i=e.config;return d(i.point_select_r)?i.point_select_r(t):i.point_select_r?i.point_select_r:4*e.pointR(t)},A.isWithinCircle=function(t,e){var i=this.d3,n=i.mouse(t),a=i.select(t),r=+a.attr("cx"),o=+a.attr("cy");return Math.sqrt(Math.pow(r-n[0],2)+Math.pow(o-n[1],2))0?i:320/(t.hasType("gauge")&&!e.gauge_fullCircle?2:1)},A.getCurrentPaddingTop=function(){var t=this,e=t.config,i=c(e.padding_top)?e.padding_top:0;return t.title&&t.title.node()&&(i+=t.getTitlePadding()),i},A.getCurrentPaddingBottom=function(){var t=this.config;return c(t.padding_bottom)?t.padding_bottom:0},A.getCurrentPaddingLeft=function(t){var e=this,i=e.config;return c(i.padding_left)?i.padding_left:i.axis_rotated?i.axis_x_show?Math.max(p(e.getAxisWidthByAxisId("x",t)),40):1:!i.axis_y_show||i.axis_y_inner?e.axis.getYAxisLabelPosition().isOuter?30:1:p(e.getAxisWidthByAxisId("y",t))},A.getCurrentPaddingRight=function(){var t=this,e=t.config,i=t.isLegendRight?t.getLegendWidth()+20:0;return c(e.padding_right)?e.padding_right+1:e.axis_rotated?10+i:!e.axis_y2_show||e.axis_y2_inner?2+i+(t.axis.getY2AxisLabelPosition().isOuter?20:0):p(t.getAxisWidthByAxisId("y2"))+i},A.getParentRectValue=function(t){for(var e,i=this.selectChart.node();i&&"BODY"!==i.tagName;){try{e=i.getBoundingClientRect()[t]}catch(n){"width"===t&&(e=i.offsetWidth)}if(e)break;i=i.parentNode}return e},A.getParentWidth=function(){return this.getParentRectValue("width")},A.getParentHeight=function(){var t=this.selectChart.style("height");return t.indexOf("px")>0?+t.replace("px",""):0},A.getSvgLeft=function(t){var e=this,i=e.config,n=i.axis_rotated||!i.axis_rotated&&!i.axis_y_inner,a=i.axis_rotated?o.axisX:o.axisY,r=e.main.select("."+a).node(),s=r&&n?r.getBoundingClientRect():{right:0},c=e.selectChart.node().getBoundingClientRect(),d=e.hasArcType(),l=s.right-c.left-(d?0:e.getCurrentPaddingLeft(t));return l>0?l:0},A.getAxisWidthByAxisId=function(t,e){var i=this,n=i.axis.getLabelPositionById(t);return i.axis.getMaxTickWidth(t,e)+(n.isInner?20:40)},A.getHorizontalAxisHeight=function(t){var e=this,i=e.config,n=30;return"x"!==t||i.axis_x_show?"x"===t&&i.axis_x_height?i.axis_x_height:"y"!==t||i.axis_y_show?"y2"!==t||i.axis_y2_show?("x"===t&&!i.axis_rotated&&i.axis_x_tick_rotate&&(n=30+e.axis.getMaxTickWidth(t)*Math.cos(Math.PI*(90-i.axis_x_tick_rotate)/180)),"y"===t&&i.axis_rotated&&i.axis_y_tick_rotate&&(n=30+e.axis.getMaxTickWidth(t)*Math.cos(Math.PI*(90-i.axis_y_tick_rotate)/180)),n+(e.axis.getLabelPositionById(t).isInner?0:10)+("y2"===t?-10:0)):e.rotated_padding_top:!i.legend_show||e.isLegendRight||e.isLegendInset?1:10:8},A.getEventRectWidth=function(){return Math.max(0,this.xAxis.tickInterval())},A.initBrush=function(){var t=this,e=t.d3;t.brush=e.svg.brush().on("brush",function(){t.redrawForBrush()}),t.brush.update=function(){return t.context&&t.context.select("."+o.brush).call(this),this},t.brush.scale=function(e){return t.config.axis_rotated?this.y(e):this.x(e)}},A.initSubchart=function(){var t=this,e=t.config,i=t.context=t.svg.append("g").attr("transform",t.getTranslate("context")),n=e.subchart_show?"visible":"hidden";i.style("visibility",n),i.append("g").attr("clip-path",t.clipPathForSubchart).attr("class",o.chart),i.select("."+o.chart).append("g").attr("class",o.chartBars),i.select("."+o.chart).append("g").attr("class",o.chartLines),i.append("g").attr("clip-path",t.clipPath).attr("class",o.brush).call(t.brush),t.axes.subx=i.append("g").attr("class",o.axisX).attr("transform",t.getTranslate("subx")).attr("clip-path",e.axis_rotated?"":t.clipPathForXAxis).style("visibility",e.subchart_axis_x_show?n:"hidden")},A.updateTargetsForSubchart=function(t){var e,i=this,n=i.context,a=i.config,r=i.classChartBar.bind(i),s=i.classBars.bind(i),c=i.classChartLine.bind(i),d=i.classLines.bind(i),l=i.classAreas.bind(i);a.subchart_show&&(n.select("."+o.chartBars).selectAll("."+o.chartBar).data(t).attr("class",r).enter().append("g").style("opacity",0).attr("class",r).append("g").attr("class",s),(e=n.select("."+o.chartLines).selectAll("."+o.chartLine).data(t).attr("class",c).enter().append("g").style("opacity",0).attr("class",c)).append("g").attr("class",d),e.append("g").attr("class",l),n.selectAll("."+o.brush+" rect").attr(a.axis_rotated?"width":"height",a.axis_rotated?i.width2:i.height2))},A.updateBarForSubchart=function(t){var e=this;e.contextBar=e.context.selectAll("."+o.bars).selectAll("."+o.bar).data(e.barData.bind(e)),e.contextBar.enter().append("path").attr("class",e.classBar.bind(e)).style("stroke","none").style("fill",e.color),e.contextBar.style("opacity",e.initialOpacity.bind(e)),e.contextBar.exit().transition().duration(t).style("opacity",0).remove()},A.redrawBarForSubchart=function(t,e,i){(e?this.contextBar.transition(Math.random().toString()).duration(i):this.contextBar).attr("d",t).style("opacity",1)},A.updateLineForSubchart=function(t){var e=this;e.contextLine=e.context.selectAll("."+o.lines).selectAll("."+o.line).data(e.lineData.bind(e)),e.contextLine.enter().append("path").attr("class",e.classLine.bind(e)).style("stroke",e.color),e.contextLine.style("opacity",e.initialOpacity.bind(e)),e.contextLine.exit().transition().duration(t).style("opacity",0).remove()},A.redrawLineForSubchart=function(t,e,i){(e?this.contextLine.transition(Math.random().toString()).duration(i):this.contextLine).attr("d",t).style("opacity",1)},A.updateAreaForSubchart=function(t){var e=this,i=e.d3;e.contextArea=e.context.selectAll("."+o.areas).selectAll("."+o.area).data(e.lineData.bind(e)),e.contextArea.enter().append("path").attr("class",e.classArea.bind(e)).style("fill",e.color).style("opacity",function(){return e.orgAreaOpacity=+i.select(this).style("opacity"),0}),e.contextArea.style("opacity",0),e.contextArea.exit().transition().duration(t).style("opacity",0).remove()},A.redrawAreaForSubchart=function(t,e,i){(e?this.contextArea.transition(Math.random().toString()).duration(i):this.contextArea).attr("d",t).style("fill",this.color).style("opacity",this.orgAreaOpacity)},A.redrawSubchart=function(t,e,i,n,a,r,o){var s,c,d,l=this,u=l.d3,h=l.config;l.context.style("visibility",h.subchart_show?"visible":"hidden"),h.subchart_show&&(u.event&&"zoom"===u.event.type&&l.brush.extent(l.x.orgDomain()).update(),t&&(l.brush.empty()||l.brush.extent(l.x.orgDomain()).update(),s=l.generateDrawArea(a,!0),c=l.generateDrawBar(r,!0),d=l.generateDrawLine(o,!0),l.updateBarForSubchart(i),l.updateLineForSubchart(i),l.updateAreaForSubchart(i),l.redrawBarForSubchart(c,i,i),l.redrawLineForSubchart(d,i,i),l.redrawAreaForSubchart(s,i,i)))},A.redrawForBrush=function(){var t=this,e=t.x;t.redraw({withTransition:!1,withY:t.config.zoom_rescale,withSubchart:!1,withUpdateXDomain:!0,withDimension:!1}),t.config.subchart_onbrush.call(t.api,e.orgDomain())},A.transformContext=function(t,e){var i,n=this;e&&e.axisSubX?i=e.axisSubX:(i=n.context.select("."+o.axisX),t&&(i=i.transition())),n.context.attr("transform",n.getTranslate("context")),i.attr("transform",n.getTranslate("subx"))},A.getDefaultExtent=function(){var t=this,e=t.config,i=d(e.axis_x_extent)?e.axis_x_extent(t.getXDomain(t.data.targets)):e.axis_x_extent;return t.isTimeSeries()&&(i=[t.parseDate(i[0]),t.parseDate(i[1])]),i},A.initText=function(){var t=this;t.main.select("."+o.chart).append("g").attr("class",o.chartTexts),t.mainText=t.d3.selectAll([])},A.updateTargetsForText=function(t){var e=this,i=e.classChartText.bind(e),n=e.classTexts.bind(e),a=e.classFocus.bind(e);e.main.select("."+o.chartTexts).selectAll("."+o.chartText).data(t).attr("class",function(t){return i(t)+a(t)}).enter().append("g").attr("class",i).style("opacity",0).style("pointer-events","none").append("g").attr("class",n)},A.updateText=function(t){var e=this,i=e.config,n=e.barOrLineData.bind(e),a=e.classText.bind(e);e.mainText=e.main.selectAll("."+o.texts).selectAll("."+o.text).data(n),e.mainText.enter().append("text").attr("class",a).attr("text-anchor",function(t){return i.axis_rotated?t.value<0?"end":"start":"middle"}).style("stroke","none").style("fill",function(t){return e.color(t)}).style("fill-opacity",0),e.mainText.text(function(t,i,n){return e.dataLabelFormat(t.id)(t.value,t.id,i,n)}),e.mainText.exit().transition().duration(t).style("fill-opacity",0).remove()},A.redrawText=function(t,e,i,n){return[(n?this.mainText.transition():this.mainText).attr("x",t).attr("y",e).style("fill",this.color).style("fill-opacity",i?0:this.opacityForText.bind(this))]},A.getTextRect=function(t,e,i){var n,a=this.d3.select("body").append("div").classed("c3",!0),r=a.append("svg").style("visibility","hidden").style("position","fixed").style("top",0).style("left",0),o=this.d3.select(i).style("font");return r.selectAll(".dummy").data([t]).enter().append("text").classed(e||"",!0).style("font",o).text(t).each(function(){n=this.getBoundingClientRect()}),a.remove(),n},A.generateXYForText=function(t,e,i,n){var a=this,r=a.generateGetAreaPoints(t,!1),o=a.generateGetBarPoints(e,!1),s=a.generateGetLinePoints(i,!1),c=n?a.getXForText:a.getYForText;return function(t,e){var i=a.isAreaType(t)?r:a.isBarType(t)?o:s;return c.call(a,i(t,e),t,this)}},A.getXForText=function(t,e,i){var n,a,r=this,o=i.getBoundingClientRect();return r.config.axis_rotated?(a=r.isBarType(e)?4:6,n=t[2][1]+a*(e.value<0?-1:1)):n=r.hasType("bar")?(t[2][0]+t[0][0])/2:t[0][0],null===e.value&&(n>r.width?n=r.width-o.width:n<0&&(n=4)),n},A.getYForText=function(t,e,i){var n,a=this,r=i.getBoundingClientRect();return a.config.axis_rotated?n=(t[0][0]+t[2][0]+.6*r.height)/2:(n=t[2][1],e.value<0||0===e.value&&!a.hasPositiveValue?(n+=r.height,a.isBarType(e)&&a.isSafari()?n-=3:!a.isBarType(e)&&a.isChrome()&&(n+=3)):n+=a.isBarType(e)?-3:-6),null!==e.value||a.config.axis_rotated||(nthis.height&&(n=this.height-4)),n},A.initTitle=function(){var t=this;t.title=t.svg.append("text").text(t.config.title_text).attr("class",t.CLASS.title)},A.redrawTitle=function(){var t=this;t.title.attr("x",t.xForTitle.bind(t)).attr("y",t.yForTitle.bind(t))},A.xForTitle=function(){var t=this,e=t.config,i=e.title_position||"left";return i.indexOf("right")>=0?t.currentWidth-t.getTextRect(t.title.node().textContent,t.CLASS.title,t.title.node()).width-e.title_padding.right:i.indexOf("center")>=0?(t.currentWidth-t.getTextRect(t.title.node().textContent,t.CLASS.title,t.title.node()).width)/2:e.title_padding.left},A.yForTitle=function(){var t=this;return t.config.title_padding.top+t.getTextRect(t.title.node().textContent,t.CLASS.title,t.title.node()).height},A.getTitlePadding=function(){var t=this;return t.yForTitle()+t.config.title_padding.bottom},A.initTooltip=function(){var t,e=this,i=e.config;if(e.tooltip=e.selectChart.style("position","relative").append("div").attr("class",o.tooltipContainer).style("position","absolute").style("pointer-events","none").style("display","none"),i.tooltip_init_show){if(e.isTimeSeries()&&u(i.tooltip_init_x)){for(i.tooltip_init_x=e.parseDate(i.tooltip_init_x),t=0;t"+(o||0===o?""+o+"":"")),void 0!==(s=w(p(t[r].value,t[r].ratio,t[r].id,t[r].index,t))))){if(null===t[r].name)continue;c=w(g(t[r].name,t[r].ratio,t[r].id,t[r].index)),d=l.levelColor?l.levelColor(t[r].value):n(t[r].id),a+="",a+=""+c+"",a+=""+s+"",a+=""}return a+""},A.tooltipPosition=function(t,e,i,n){var a,r,o,s,c,d=this,l=d.config,u=d.d3,h=d.hasArcType(),g=u.mouse(n);return h?(r=(d.width-(d.isLegendRight?d.getLegendWidth():0))/2+g[0],s=d.height/2+g[1]+20):(a=d.getSvgLeft(!0),l.axis_rotated?(o=(r=a+g[0]+100)+e,c=d.currentWidth-d.getCurrentPaddingRight(),s=d.x(t[0].x)+20):(o=(r=a+d.getCurrentPaddingLeft(!0)+d.x(t[0].x)+20)+e,c=a+d.currentWidth-d.getCurrentPaddingRight(),s=g[1]+15),o>c&&(r-=o-c+20),s+i>d.currentHeight&&(s-=i+30)),s<0&&(s=0),{top:s,left:r}},A.showTooltip=function(t,e){var i,n,a,r=this,o=r.config,s=r.hasArcType(),d=t.filter(function(t){return t&&c(t.value)}),l=o.tooltip_position||A.tooltipPosition;0!==d.length&&o.tooltip_show&&(r.tooltip.html(o.tooltip_contents.call(r,t,r.axis.getXAxisTickFormat(),r.getYFormat(s),r.color)).style("display","block"),i=r.tooltip.property("offsetWidth"),n=r.tooltip.property("offsetHeight"),a=l.call(this,d,i,n,e),r.tooltip.style("top",a.top+"px").style("left",a.left+"px"))},A.hideTooltip=function(){this.tooltip.style("display","none")},A.setTargetType=function(t,e){var i=this,n=i.config;i.mapToTargetIds(t).forEach(function(t){i.withoutFadeIn[t]=e===n.data_types[t],n.data_types[t]=e}),t||(n.data_type=e)},A.hasType=function(t,e){var i=this,n=i.config.data_types,a=!1;return e=e||i.data.targets,e&&e.length?e.forEach(function(e){var i=n[e.id];(i&&i.indexOf(t)>=0||!i&&"line"===t)&&(a=!0)}):Object.keys(n).length?Object.keys(n).forEach(function(e){n[e]===t&&(a=!0)}):a=i.config.data_type===t,a},A.hasArcType=function(t){return this.hasType("pie",t)||this.hasType("donut",t)||this.hasType("gauge",t)},A.isLineType=function(t){var e=this.config,i=u(t)?t:t.id;return!e.data_types[i]||["line","spline","area","area-spline","step","area-step"].indexOf(e.data_types[i])>=0},A.isStepType=function(t){var e=u(t)?t:t.id;return["step","area-step"].indexOf(this.config.data_types[e])>=0},A.isSplineType=function(t){var e=u(t)?t:t.id;return["spline","area-spline"].indexOf(this.config.data_types[e])>=0},A.isAreaType=function(t){var e=u(t)?t:t.id;return["area","area-spline","area-step"].indexOf(this.config.data_types[e])>=0},A.isBarType=function(t){var e=u(t)?t:t.id;return"bar"===this.config.data_types[e]},A.isScatterType=function(t){var e=u(t)?t:t.id;return"scatter"===this.config.data_types[e]},A.isPieType=function(t){var e=u(t)?t:t.id;return"pie"===this.config.data_types[e]},A.isGaugeType=function(t){var e=u(t)?t:t.id;return"gauge"===this.config.data_types[e]},A.isDonutType=function(t){var e=u(t)?t:t.id;return"donut"===this.config.data_types[e]},A.isArcType=function(t){return this.isPieType(t)||this.isDonutType(t)||this.isGaugeType(t)},A.lineData=function(t){return this.isLineType(t)?[t]:[]},A.arcData=function(t){return this.isArcType(t.data)?[t]:[]},A.barData=function(t){return this.isBarType(t)?t.values:[]},A.lineOrScatterData=function(t){return this.isLineType(t)||this.isScatterType(t)?t.values:[]},A.barOrLineData=function(t){return this.isBarType(t)||this.isLineType(t)?t.values:[]},A.isInterpolationType=function(t){return["linear","linear-closed","basis","basis-open","basis-closed","bundle","cardinal","cardinal-open","cardinal-closed","monotone"].indexOf(t)>=0},A.isSafari=function(){var t=window.navigator.userAgent;return t.indexOf("Safari")>=0&&t.indexOf("Chrome")<0},A.isChrome=function(){return window.navigator.userAgent.indexOf("Chrome")>=0},A.initZoom=function(){var t,e=this,i=e.d3,n=e.config;e.zoom=i.behavior.zoom().on("zoomstart",function(){t=i.event.sourceEvent,e.zoom.altDomain=i.event.sourceEvent.altKey?e.x.orgDomain():null,n.zoom_onzoomstart.call(e.api,i.event.sourceEvent)}).on("zoom",function(){e.redrawForZoom.call(e)}).on("zoomend",function(){var a=i.event.sourceEvent;a&&t.clientX===a.clientX&&t.clientY===a.clientY||(e.redrawEventRect(),e.updateZoom(),n.zoom_onzoomend.call(e.api,e.x.orgDomain()))}),e.zoom.scale=function(t){return n.axis_rotated?this.y(t):this.x(t)},e.zoom.orgScaleExtent=function(){var t=n.zoom_extent?n.zoom_extent:[1,10];return[t[0],Math.max(e.getMaxDataCount()/t[1],t[1])]},e.zoom.updateScaleExtent=function(){var t=_(e.x.orgDomain())/_(e.getZoomDomain()),i=this.orgScaleExtent();return this.scaleExtent([i[0]*t,i[1]*t]),this}},A.getZoomDomain=function(){var t=this,e=t.config,i=t.d3;return[i.min([t.orgXDomain[0],e.zoom_x_min]),i.max([t.orgXDomain[1],e.zoom_x_max])]},A.updateZoom=function(){var t=this,e=t.config.zoom_enabled?t.zoom:function(){};t.main.select("."+o.zoomRect).call(e).on("dblclick.zoom",null),t.main.selectAll("."+o.eventRect).call(e).on("dblclick.zoom",null)},A.redrawForZoom=function(){var t=this,e=t.d3,i=t.config,n=t.zoom,a=t.x;if(i.zoom_enabled&&0!==t.filterTargetsToShow(t.data.targets).length){if("mousemove"===e.event.sourceEvent.type&&n.altDomain)return a.domain(n.altDomain),void n.scale(a).updateScaleExtent();t.isCategorized()&&a.orgDomain()[0]===t.orgXDomain[0]&&a.domain([t.orgXDomain[0]-1e-10,a.orgDomain()[1]]),t.redraw({withTransition:!1,withY:i.zoom_rescale,withSubchart:!1,withEventRect:!1,withDimension:!1}),"mousemove"===e.event.sourceEvent.type&&(t.cancelClick=!0),i.zoom_onzoom.call(t.api,a.orgDomain())}},T}); diff --git a/ui/js/coffee/account.coffee b/ui/js/coffee/account.coffee index d4b84d42..227e8c4d 100644 --- a/ui/js/coffee/account.coffee +++ b/ui/js/coffee/account.coffee @@ -47,7 +47,7 @@ validateSignup = (json, state) -> document.getElementById('signupmsg').innerHTML = "Account created! Please check your inbox for verification instructions." else document.getElementById('signupmsg').innerHTML = "

    Error: " + json.message + "

    " - + login = (form) -> if form.email.value.length > 5 and form.password.value.length > 0 cog(document.getElementById('loginmsg')) @@ -94,10 +94,9 @@ getResetToken = (json, state) -> btn = mk('input', { type: 'button', onclick: 'doResetPass()', value: 'Reset your password'}) form.setAttribute("onsubmit", "return doResetPass();") app(form, btn) - + resetpw = () -> email = get('email').value remail = email post('account',{ reset: email } , null, getResetToken) return false - \ No newline at end of file diff --git a/ui/js/coffee/charts_gauge.coffee b/ui/js/coffee/charts_gauge.coffee index 0ce2ea28..44dae448 100644 --- a/ui/js/coffee/charts_gauge.coffee +++ b/ui/js/coffee/charts_gauge.coffee @@ -1,7 +1,7 @@ charts_gaugechart = (obj, data) -> if data.gauge data = data.gauge - + config = { bindto: obj, data: { @@ -29,12 +29,11 @@ charts_gaugechart = (obj, data) -> gauge = (json, state) -> - + lmain = new HTML('div') state.widget.inject(lmain, true) - + if json.gauge and json.gauge.text lmain.inject(new HTML('p', {}, json.gauge.text)) - - gaugeChart = new Chart(lmain, 'gauge', json) + gaugeChart = new Chart(lmain, 'gauge', json) diff --git a/ui/js/coffee/charts_linechart.coffee b/ui/js/coffee/charts_linechart.coffee index 441a3a23..76bc7607 100644 --- a/ui/js/coffee/charts_linechart.coffee +++ b/ui/js/coffee/charts_linechart.coffee @@ -73,7 +73,7 @@ charts_linechart = (obj, data, options) -> keys = data.sortOrder else keys = Object.keys(xts) - + for key in keys val = xts[key] xx = [key] diff --git a/ui/js/coffee/charts_linked_map.coffee b/ui/js/coffee/charts_linked_map.coffee index 05a10d00..503ed562 100644 --- a/ui/js/coffee/charts_linked_map.coffee +++ b/ui/js/coffee/charts_linked_map.coffee @@ -11,27 +11,27 @@ charts_linked = (obj, nodes, links, options) -> bb = obj.getBoundingClientRect() llwidth = bb.width llheight = Math.max(600, bb.height) - - tooltip = d3.select("body").append("div") - .attr("class", "link_tooltip") + + tooltip = d3.select("body").append("div") + .attr("class", "link_tooltip") .style("opacity", 0); - + avg = links.length / nodes.length - + force = d3.layout.force() .gravity(0.015) .distance(llheight/8) .charge(-200/Math.log10(nodes.length)) .linkStrength(0.2/avg) .size([llwidth, llheight]) - + edges = [] links.forEach((e) -> sourceNode = nodes.filter((n) => n.id == e.source)[0] targetNode = nodes.filter((n) => n.id == e.target)[0] edges.push({source: sourceNode, target: targetNode, s: e.source, value: e.value, name: e.name, tooltip: e.tooltip}); ) - + force .nodes(nodes) .links(edges) @@ -39,7 +39,7 @@ charts_linked = (obj, nodes, links, options) -> lcolors = {} nodes.forEach((e) -> lcolors[e.id] = licolors[lla++] - + ) lla = 0 link = g.selectAll(".link") @@ -53,21 +53,21 @@ charts_linked = (obj, nodes, links, options) -> "stroke-width: #{d.value}; stroke: #{lcolors[d.s]};" ).on("mouseover", (d) -> if d.tooltip - tooltip.transition() - .duration(100) - .style("opacity", .9); + tooltip.transition() + .duration(100) + .style("opacity", .9); tooltip.html("#{d.name}:
    " + d.tooltip.replace("\n", "
    ")) - .style("left", (d3.event.pageX + 20) + "px") - .style("top", (d3.event.pageY - 28) + "px"); + .style("left", (d3.event.pageX + 20) + "px") + .style("top", (d3.event.pageY - 28) + "px"); ) .on("mouseout", (d) -> d3.select(this).style("stroke-opacity", "0.375") - tooltip.transition() - .duration(200) - .style("opacity", 0); + tooltip.transition() + .duration(200) + .style("opacity", 0); ) - + defs = svg.append("defs") nodes.forEach( (n) -> if n.gravatar @@ -86,27 +86,27 @@ charts_linked = (obj, nodes, links, options) -> .attr("xlink:href", "https://secure.gravatar.com/avatar/#{n.gravatar}.png?d=identicon") else n.gravatar = false - ) - + ) + node = g.selectAll(".node") .data(nodes) .enter().append("g") .attr("class", "link_node") .attr("data-source", (d) => d.id) .call(force.drag); - + lTargets = [] - + gatherTargets = (d, e) -> if e.source == d or e.target == d lTargets.push(e.source.id) lTargets.push(e.target.id) return true return false - + uptop = svg.append("g") x = null - + node.append("circle") .attr("class", "link_node") .attr("data-source", (d) => d.id) @@ -127,7 +127,7 @@ charts_linked = (obj, nodes, links, options) -> d3.selectAll("path").style("stroke-opacity", "0.075") d3.selectAll("path").filter((e) => gatherTargets(d,e) ).style("stroke-opacity", "1").style("z-index", "20") d3.selectAll("path").filter((e) => e.source == d or e.target).each((o) => - + x = d3.select(this).insert("g", ":first-child").style("stroke", "red !important") x.append("use").attr("xlink:href", "#" + o.name) ) @@ -142,32 +142,32 @@ charts_linked = (obj, nodes, links, options) -> d3.selectAll("text").style("opacity", null) d3.selectAll("path").style("stroke-opacity", null) ) - - + + node.append("a") .attr("href", (d) => if not d.gravatar then "#" else "contributors.html?page=biography&email=#{d.id}") .append("text") .attr("dx", 13) .attr("dy", ".35em") - .text((d) => d.name) + .text((d) => d.name) .on("mouseover", (d) -> if d.tooltip - tooltip.transition() - .duration(100) - .style("opacity", .9); + tooltip.transition() + .duration(100) + .style("opacity", .9); tooltip.html("#{d.name}:
    " + d.tooltip.replace("\n", "
    ")) - .style("left", (d3.event.pageX + 20) + "px") - .style("top", (d3.event.pageY - 28) + "px"); + .style("left", (d3.event.pageX + 20) + "px") + .style("top", (d3.event.pageY - 28) + "px"); ) .on("mouseout", (d) -> #d3.selectAll(".link").filter( (e) => e.source == this.id ).style("stroke-opacity", "0.375") - tooltip.transition() - .duration(200) - .style("opacity", 0); + tooltip.transition() + .duration(200) + .style("opacity", 0); ) - + force.on("tick", () -> link.attr("d", (d) -> dx = d.target.x - d.source.x @@ -192,7 +192,7 @@ charts_linked = (obj, nodes, links, options) -> svg .call( d3.behavior.zoom().center([llwidth / 2, llheight / 2]).scaleExtent([0.333, 4]).on("zoom", linked_zoom) ) - + return [ { svg: svg, @@ -215,5 +215,3 @@ charts_linked = (obj, nodes, links, options) -> .start() }, {linked: true}] - - diff --git a/ui/js/coffee/charts_punchcard.coffee b/ui/js/coffee/charts_punchcard.coffee index 8c07ecf4..77508f49 100644 --- a/ui/js/coffee/charts_punchcard.coffee +++ b/ui/js/coffee/charts_punchcard.coffee @@ -36,12 +36,12 @@ charts_punchcard = (obj, data, options) -> div = d3.select(div) data = data.timeseries days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] - + c = [] chart = d3.select(obj).append("svg").attr("width", '100%').attr("height", '100%') - - + + MAX = 0 for k, v of data m = k.split(/ - /) @@ -59,8 +59,8 @@ charts_punchcard = (obj, data, options) -> circles = chart.selectAll('svg').data(c).enter().append("circle"); labels = chart.selectAll('svg').data(days).enter().append('text') slots = chart.selectAll('svg').data([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]).enter().append('text') - - + + redraw = () -> xy = obj.getBoundingClientRect() xy.height = xy.width * 0.5 @@ -69,16 +69,16 @@ charts_punchcard = (obj, data, options) -> maxr = Math.sqrt(xy.width**2 + xy.height**2) / 80 cw = (0.03*xy.width) circles.attr("cx", (d) => (d.x*xy.width) + cw/2).attr("cy", (d) => 50 + d.y*xy.height ).attr("r", (d) => pval(d.r, MAX) * maxr).style("fill", (d) => punchcard_color(d.r, MAX)). - on("mouseover", (d) -> + on("mouseover", (d) -> div.transition() - .duration(200) + .duration(200) .style("opacity", .9) - div .html(d.h + d.r.pretty() + " commits") - .style("left", (d3.event.pageX) + "px") + div .html(d.h + d.r.pretty() + " commits") + .style("left", (d3.event.pageX) + "px") .style("top", (d3.event.pageY - 28) + "px"); ).on("mouseout", (d) -> div.transition() - .duration(200) + .duration(200) .style("opacity", 0) ) labels.attr('x', 20).attr('y', (d) => (55 + (0.04 + days.indexOf(d) * 0.10) * xy.height)).attr('font-size', maxr*1.75).text((d) => d) @@ -86,6 +86,5 @@ charts_punchcard = (obj, data, options) -> chart.node().addEventListener("resize", redraw) window.addEventListener("resize", redraw) redraw(); - - return [chart, {punchcard: true}] + return [chart, {punchcard: true}] diff --git a/ui/js/coffee/charts_radar.coffee b/ui/js/coffee/charts_radar.coffee index e2cc01bb..831c1796 100644 --- a/ui/js/coffee/charts_radar.coffee +++ b/ui/js/coffee/charts_radar.coffee @@ -34,7 +34,7 @@ charts_radarchart = (obj, data, options) -> Format = (edge) => Math.floor((edge/24)+0.5) + "↑ (" + (5**(edge/24)).pretty() + ")" d3.select(obj).select("svg").remove(); - + rect = obj.getBoundingClientRect() g = d3.select(obj) .append("svg") @@ -43,7 +43,7 @@ charts_radarchart = (obj, data, options) -> .append("g") .attr("transform", "translate(" + cfg.TranslateX + "," + cfg.TranslateY + ")"); ; - + # Indicator lines for j in [0...cfg.levels] levelFactor = cfg.factor * radius * ( (j+1) / cfg.levels) @@ -60,8 +60,8 @@ charts_radarchart = (obj, data, options) -> .style("stroke-opacity", "0.75") .style("stroke-width", "0.3px") .attr("transform", "translate(" + (cfg.w/2-levelFactor) + ", " + (cfg.h/2-levelFactor) + ")") - - + + # Levels for j in [0...cfg.levels] levelFactor = cfg.factor*radius*((j+1)/cfg.levels); @@ -77,16 +77,16 @@ charts_radarchart = (obj, data, options) -> .attr("transform", "translate(" + (cfg.w/2-levelFactor + cfg.ToRight) + ", " + (cfg.h/2-levelFactor) + ")") .attr("fill", "#737373") .text(Format((j+1)*cfg.maxValue/cfg.levels)) - - + + series = 0 - + axis = g.selectAll(".axis") .data(axes) .enter() .append("g") .attr("class", "axis") - + axis.append("line") .attr("x1", cfg.w/2) .attr("y1", cfg.h/2) @@ -95,7 +95,7 @@ charts_radarchart = (obj, data, options) -> .attr("class", "line") .style("stroke", "grey") .style("stroke-width", "1px"); - + axis.append("text") .attr("class", "legend") .text((d) => d) @@ -106,8 +106,8 @@ charts_radarchart = (obj, data, options) -> .attr("transform", (d,i) => "translate(0, -10)") .attr("x", (d,i) => cfg.w/2*(1-cfg.factorLegend*Math.sin(i*cfg.radians/total))-60*Math.sin(i*cfg.radians/total)) .attr("y", (d,i) => cfg.h/2*(1-Math.cos(i*cfg.radians/total))-20*Math.cos(i*cfg.radians/total)) - - + + d.forEach((y,x) -> dataValues = [] g.selectAll(".nodes") @@ -120,9 +120,9 @@ charts_radarchart = (obj, data, options) -> ]\ ) ) - + dataValues.push(dataValues[0]) - + g.selectAll(".area") .data([dataValues]) .enter() @@ -143,7 +143,7 @@ charts_radarchart = (obj, data, options) -> z = "polygon."+d3.select(this).attr("class"); g.selectAll("polygon") .transition(200) - .style("fill-opacity", 0.1); + .style("fill-opacity", 0.1); g.selectAll(z) .transition(200) .style("fill-opacity", .7); @@ -155,9 +155,9 @@ charts_radarchart = (obj, data, options) -> ) series++ ) - + series = 0 - + d.forEach( (y,x) -> g.selectAll(".nodes") .data(y).enter() @@ -168,7 +168,7 @@ charts_radarchart = (obj, data, options) -> .attr("cx", (j,i) -> dataValues = dataValues || [] dataValues.push([ - cfg.w/2*(1-(parseFloat(Math.max(j.value, 0))/cfg.maxValue)*cfg.factor*Math.sin(i*cfg.radians/total)), + cfg.w/2*(1-(parseFloat(Math.max(j.value, 0))/cfg.maxValue)*cfg.factor*Math.sin(i*cfg.radians/total)), cfg.h/2*(1-(parseFloat(Math.max(j.value, 0))/cfg.maxValue)*cfg.factor*Math.cos(i*cfg.radians/total)) ]) return cfg.w/2*(1-(Math.max(j.value, 0)/cfg.maxValue)*cfg.factor*Math.sin(i*cfg.radians/total)) @@ -181,14 +181,14 @@ charts_radarchart = (obj, data, options) -> .on('mouseover', (d) -> newX = parseFloat(d3.select(this).attr('cx')) - 10 newY = parseFloat(d3.select(this).attr('cy')) - 5 - + tooltip .attr('x', newX) .attr('y', newY) .text(Format(d.value)) .transition(200) .style('opacity', 1) - + z = "polygon."+d3.select(this).attr("class"); g.selectAll("polygon") .transition(200) @@ -207,23 +207,23 @@ charts_radarchart = (obj, data, options) -> ) .append("svg:title") .text((j) => Math.max(j.value, 0)); - + series++ ); - + # Tooltip tooltip = g.append('text') .style('opacity', 0) .style('font-family', 'sans-serif') .style('font-size', '13px'); - + legend = g.append("g") .attr("class", "legend") .attr("height", 100) .attr("width", 200) - .attr('transform', 'translate(90,20)') - - + .attr('transform', 'translate(90,20)') + + legend.selectAll('rect') .data(LegendOptions) .enter() @@ -233,8 +233,8 @@ charts_radarchart = (obj, data, options) -> .attr("width", 10) .attr("height", 10) .style("fill", (d,i) => cfg.color[i]) - - + + legend.selectAll('text') .data(LegendOptions) .enter() @@ -244,7 +244,6 @@ charts_radarchart = (obj, data, options) -> .attr("font-size", "11px") .attr("fill", "#737373") .text((d) => d) - + g.resize = () -> return true return [g, {}] - diff --git a/ui/js/coffee/charts_wrapper.coffee b/ui/js/coffee/charts_wrapper.coffee index 91c84051..150f5ae9 100644 --- a/ui/js/coffee/charts_wrapper.coffee +++ b/ui/js/coffee/charts_wrapper.coffee @@ -117,7 +117,7 @@ copyCSS = (destination, source) -> if (child.tagName in containerElements) copyCSS(child, source.childNodes[cd]) continue - + style = source.childNodes[cd].currentStyle || window.getComputedStyle(source.childNodes[cd]); if (style == "undefined" || style == null) continue @@ -129,7 +129,7 @@ downloadBlob = (name, uri) -> navigator.msSaveOrOpenBlob(uriToBlob(uri), name); else saveLink = document.createElement('a'); - + saveLink.download = name; saveLink.style.display = 'none'; document.body.appendChild(saveLink); @@ -141,26 +141,26 @@ downloadBlob = (name, uri) -> requestAnimationFrame( () -> URL.revokeObjectURL(url) ) - + catch e console.warn('This browser does not support object URLs. Falling back to string URL.'); saveLink.href = uri; - + saveLink.click() document.body.removeChild(saveLink) - - + + chartToSvg = (o, asSVG) -> - + doctype = '' svgdiv = o.chartdiv.getElementsByTagName('svg')[0] svgcopy = svgdiv.cloneNode(true) copyCSS(svgcopy, svgdiv) rect = o.main.getBoundingClientRect() svgcopy.setAttribute('xlink', 'http://www.w3.org/1999/xlink') - + source = (new XMLSerializer()).serializeToString(svgcopy) - + source = source.replace(/(\w+)?:?xlink=/g, 'xmlns:xlink=') source = source.replace(/NS\d+:href/g, 'xlink:href') blob = new Blob([ doctype + source], { type: 'image/svg+xml;charset=utf-8' }) @@ -174,12 +174,12 @@ chartToSvg = (o, asSVG) -> document.getElementById('chartWrapperHiddenMaster').appendChild(canvas) ctx = canvas.getContext('2d') ctx.drawImage(img, 0, 0) - + canvasUrl = canvas.toDataURL("image/png") downloadBlob('chart.png', canvasUrl) - + document.getElementById('chartWrapperHiddenMaster').appendChild(img) - + rotateTable = (list) -> newList = [] for x, i in list[0] @@ -188,7 +188,7 @@ rotateTable = (list) -> arr.push(el[i]) newList.push(arr) return newList - + dataTable = (o) -> modal = new HTML('div', { class: "chartModal"}) modalInner = new HTML('div', { class: "chartModalContent"}) @@ -223,7 +223,7 @@ switchChartType = (o, config, type) -> xtype = m[1] + v.split(/-/)[1]||v config.data.types[k] = xtype o.chartobj = c3.generate(config) - + stackChart = (o, config, chart) -> arr = [] for k, v of config.data.columns @@ -240,33 +240,33 @@ class Chart constructor: (parent, type, data, options) -> cid = parseInt((Math.random()*1000000)).toString(16) @cid = cid - + xxCharts[cid] = this - + # Make main div wrapper @main = new HTML('div', { class: "chartWrapper"}) @main.xThis = this @data = data - + # Make toolbar @toolbar = new HTML('div', {class: "chartToolbar"}) @main.inject(@toolbar) - + # Title bar @titlebar = new HTML('div', {class: "chartTitle"}, if (options and options.title) then options.title else "") @main.inject(@titlebar) - + i = 0 chartWrapperColors = genColors(16, 0.2, 0.75, true) - + # Default to generic buttons btns = chartWrapperButtons.generic.slice(0,999) - + # Line charts have more features than, say, donuts if type == 'line' for el in chartWrapperButtons.line btns.push(el) - + # Make the buttons appear @buttons = {} for btn in btns @@ -279,13 +279,13 @@ class Chart if btn.onclick do (btn, btnDiv) -> btnDiv.addEventListener('click', () -> chartOnclick(btn.onclick, cid)) - + i++ - + # Make inner chart @chartdiv = new HTML('div', { class: "chartChart"}) @main.inject(@chartdiv) - + if parent parent.appendChild(@main) else @@ -294,7 +294,7 @@ class Chart hObj = new HTML('div', { id: 'chartWrapperHiddenMaster', style: { visibility: "hidden"}}) document.body.appendChild(hObj) hObj.appendChild(@main) - + if type == 'line' [@chartobj, @config] = charts_linechart(@chartdiv, data, options) if type == 'donut' @@ -307,8 +307,8 @@ class Chart [@chartobj, @config] = charts_linked(@chartdiv, data.nodes, data.links, options) if type == 'punchcard' [@chartobj, @config] = charts_punchcard(@chartdiv, data, options) - - + + # If this data source has distinguishable categories # show a checkbox to toggle it. if data.distinguishable @@ -324,7 +324,7 @@ class Chart if this.checked distinguish = 'true' globArgs['distinguish'] = 'true' - + updateWidgets('line', null, { distinguish: distinguish }) updateWidgets('gauge', null, { distinguish: distinguish }) ) @@ -337,7 +337,7 @@ class Chart label.style.paddingLeft = '5px' label.appendChild(document.createTextNode('Toggle category breakdown')) @main.inject(label) - + # If this data source has relative weightings # show a checkbox to toggle it. if data.relativeMode @@ -353,7 +353,7 @@ class Chart if this.checked relative = 'true' globArgs['relative'] = 'true' - + updateWidgets('line', null, { relative: relative }) updateWidgets('gauge', null, { relative: relative }) ) @@ -366,6 +366,5 @@ class Chart label.style.paddingLeft = '5px' label.appendChild(document.createTextNode('Toggle relative/comparative mode')) @main.inject(label) - + return @main - \ No newline at end of file diff --git a/ui/js/coffee/colors.coffee b/ui/js/coffee/colors.coffee index 11f9c487..07237d2a 100644 --- a/ui/js/coffee/colors.coffee +++ b/ui/js/coffee/colors.coffee @@ -1,6 +1,6 @@ hsl2rgb = (h, s, l) -> - + h = h % 1; s = 1 if s > 1 l = 1 if l > 1 @@ -79,7 +79,7 @@ genColors = (numColors, saturation, lightness, hex) -> if baseHue < 0 baseHue += 1 c = hsl2rgb(baseHue, saturation, lightness) - if (hex) + if (hex) #h = ( Math.round(c.r*255*255*255) + Math.round(c.g * 255*255) + Math.round(c.b*255) ).toString(16) h = "#" + ("00" + (~ ~(c.r * 255)).toString(16)).slice(-2) + ("00" + (~ ~(c.g * 255)).toString(16)).slice(-2) + ("00" + (~ ~(c.b * 255)).toString(16)).slice(-2); cls.push(h); @@ -90,9 +90,9 @@ genColors = (numColors, saturation, lightness, hex) -> b: parseInt(c.b * 255) }) baseHue -= 0.37 - if (baseHue < 0) + if (baseHue < 0) baseHue += 1 - + return cls @@ -103,12 +103,12 @@ quickColors = (num) -> r = Math.random() g = Math.random() b = Math.random() - + pastel = 0.7 r = ((pastel+r)/2) g = ((pastel+g)/2) b = ((pastel+b)/2) - + c = "#" + ("00" + (~ ~(r * 205)).toString(16)).slice(-2) + ("00" + (~ ~(g * 205)).toString(16)).slice(-2) + ("00" + (~ ~(b * 205)).toString(16)).slice(-2); colors.push(c) return colors diff --git a/ui/js/coffee/combine.sh b/ui/js/coffee/combine.sh index 6d73e941..9505d661 100644 --- a/ui/js/coffee/combine.sh +++ b/ui/js/coffee/combine.sh @@ -1,3 +1,19 @@ #!/bin/bash -coffee -b --join ../kibble.v1.js -c *.coffee +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +coffee -b --join ../kibble.v1.js -c *.coffee diff --git a/ui/js/coffee/datepicker.coffee b/ui/js/coffee/datepicker.coffee index 5e3c7b84..7ed32019 100644 --- a/ui/js/coffee/datepicker.coffee +++ b/ui/js/coffee/datepicker.coffee @@ -39,7 +39,7 @@ updateTimeseriesWidgets = (range) -> updateWidgets('phonebook', null, { to: to, from: from}) updateWidgets('worldmap', null, { to: to, from: from}) updateWidgets('jsondump', null, { to: to, from: from}) - + datepicker = (widget) -> div = document.createElement('div') div.setAttribute("class", "well") @@ -72,10 +72,10 @@ datepicker = (widget) -> id = Math.floor(Math.random()*987654321).toString(16) input.setAttribute("id", id) group.appendChild(input) - + widget.inject(div) - - + + datePickerOptions = { startDate: if globArgs.from then moment(new Date(globArgs.from*1000)) else moment().subtract(6, 'months'), endDate: if globArgs.to then moment(new Date(globArgs.to*1000)) else moment(), @@ -118,9 +118,8 @@ datepicker = (widget) -> firstDay: 1 } }; - + $('#' + id).daterangepicker(datePickerOptions, (start, end, label) -> console.log(start._d.getTime()/1000) updateTimeseriesWidgets([Math.max(0, Math.floor(start._d.getTime()/1000)), Math.max(3600, Math.floor(end._d.getTime()/1000))]) ); - diff --git a/ui/js/coffee/error_modal.coffee b/ui/js/coffee/error_modal.coffee index 011263a2..818ea24a 100644 --- a/ui/js/coffee/error_modal.coffee +++ b/ui/js/coffee/error_modal.coffee @@ -8,10 +8,9 @@ badModal = (str) -> modalInner.inject(btndiv) btn = new HTML('button', {class: "btn btn-lg btn-success", onclick:"document.body.removeChild(this.parentNode.parentNode.parentNode);"}, "Gotcha!") btndiv.inject(btn) - + window.setTimeout(() -> modalInner.style.visibility = "visible" modalInner.style.opacity = 1 , 10 ) - diff --git a/ui/js/coffee/explorer.coffee b/ui/js/coffee/explorer.coffee index 8bff59d9..7cfe148f 100644 --- a/ui/js/coffee/explorer.coffee +++ b/ui/js/coffee/explorer.coffee @@ -14,7 +14,7 @@ # limitations under the License. explorer = (json, state) -> - + org = json.organisation h = document.createElement('h2') if json.tag @@ -46,12 +46,12 @@ explorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -67,9 +67,9 @@ explorer = (json, state) -> updateWidgets('punchcard', null, { source: source }) updateWidgets('jsondump', null, { source: source }) ) - - - + + + # Unique commits label id = Math.floor(Math.random() * 987654321).toString(16) chk = document.createElement('input') @@ -83,7 +83,7 @@ explorer = (json, state) -> if this.checked author = 'true' globArgs['author'] = 'true' - + updateWidgets('donut', null, { author: author }) updateWidgets('gauge', null, { author: author }) updateWidgets('line', null, { author: author }) @@ -107,10 +107,10 @@ explorer = (json, state) -> state.widget.inject(label) br = new HTML('br') p = new HTML('input', {id:'pathfilter', size: 32, type: 'text', value: globArgs.pathfilter, onChange: 'pathFilterGlob = this.value;',placeholder: 'optional path-filter'}) - + state.widget.inject(br) state.widget.inject(p) - + b = new HTML('input', {style: { marginLeft: '10px'}, class: 'btn btn-small btn-success', type: 'button', onClick: 'pathFilter();', value: "filter paths"}) rb = new HTML('input', {style: { marginLeft: '10px'}, class: 'btn btn-small btn-danger', type: 'button', onClick: 'get("pathfilter").value = ""; pathFilterGlob = ""; pathFilter();', value: "reset"}) state.widget.inject(b) @@ -118,7 +118,7 @@ explorer = (json, state) -> sourceexplorer = (json, state) -> - + org = json.organisation h = document.createElement('h4') if json.tag @@ -151,12 +151,12 @@ sourceexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -176,13 +176,13 @@ sourceexplorer = (json, state) -> mailexplorer = (json, state) -> - + org = json.organisation h = document.createElement('h4') if json.tag org.name += " (Filter: " + json.tag + ")" h.appendChild(document.createTextNode("Exploring " + org.name + ":")) - + state.widget.inject(h, true) list = document.createElement('select') state.widget.inject(list) @@ -209,12 +209,12 @@ mailexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -227,17 +227,17 @@ mailexplorer = (json, state) -> updateWidgets('trends', null, { source: source }) updateWidgets('punchcard', null, { source: source }) updateWidgets('relationship', null, { source: source }) - + ) - + logexplorer = (json, state) -> - + org = json.organisation h = document.createElement('h4') if json.tag org.name += " (Filter: " + json.tag + ")" h.appendChild(document.createTextNode("Exploring " + org.name + ":")) - + state.widget.inject(h, true) list = document.createElement('select') state.widget.inject(list) @@ -264,12 +264,12 @@ logexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -281,11 +281,11 @@ logexplorer = (json, state) -> updateWidgets('factors', null, { source: source }) updateWidgets('trends', null, { source: source }) updateWidgets('punchcard', null, { source: source }) - + ) - + issueexplorer = (json, state) -> - + org = json.organisation if json.tag org.name += " (Filter: " + json.tag + ")" @@ -310,7 +310,7 @@ issueexplorer = (json, state) -> opt = document.createElement('option') opt.value = item.sourceID ezURL = null - n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([-.A-Z0-9]+)$/i) + n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([-.A-Z0-9]+)$/i) m = item.sourceURL.match(/^([a-z]+:\/\/.+?)\s(.+)$/i) if n and n.length == 3 ezURL = "#{n[2]} - (#{n[1]})" @@ -320,12 +320,12 @@ issueexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -337,13 +337,13 @@ issueexplorer = (json, state) -> updateWidgets('factors', null, { source: source }) updateWidgets('trends', null, { source: source }) updateWidgets('punchcard', null, { source: source }) - + ) - + forumexplorer = (json, state) -> - + org = json.organisation if json.tag org.name += " (Filter: " + json.tag + ")" @@ -368,7 +368,7 @@ forumexplorer = (json, state) -> opt = document.createElement('option') opt.value = item.sourceID ezURL = null - n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([-.A-Z0-9]+)$/i) + n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([-.A-Z0-9]+)$/i) m = item.sourceURL.match(/^([a-z]+:\/\/.+?)\s(.+)$/i) if n and n.length == 3 ezURL = "#{n[2]} - (#{n[1]})" @@ -378,12 +378,12 @@ forumexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -395,13 +395,13 @@ forumexplorer = (json, state) -> updateWidgets('factors', null, { source: source }) updateWidgets('trends', null, { source: source }) updateWidgets('punchcard', null, { source: source }) - + ) - + imexplorer = (json, state) -> - + org = json.organisation if json.tag org.name += " (Filter: " + json.tag + ")" @@ -426,7 +426,7 @@ imexplorer = (json, state) -> opt = document.createElement('option') opt.value = item.sourceID ezURL = null - n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([#\S+]+)$/i) + n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([#\S+]+)$/i) m = item.sourceURL.match(/^([a-z]+:\/\/.+?)\s(.+)$/i) if n and n.length == 3 ezURL = "#{n[2]} - (#{n[1]})" @@ -436,12 +436,12 @@ imexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -453,13 +453,13 @@ imexplorer = (json, state) -> updateWidgets('factors', null, { source: source }) updateWidgets('trends', null, { source: source }) updateWidgets('punchcard', null, { source: source }) - + , false) $('select').chosen(); - + ciexplorer = (json, state) -> - + org = json.organisation if json.tag org.name += " (Filter: " + json.tag + ")" @@ -484,7 +484,7 @@ ciexplorer = (json, state) -> opt = document.createElement('option') opt.value = item.sourceID ezURL = null - n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([#\S+]+)$/i) + n = item.sourceURL.match(/^([a-z]+:\/\/.+?)\/([#\S+]+)$/i) m = item.sourceURL.match(/^([a-z]+:\/\/.+?)\s(.+)$/i) if n and n.length == 3 ezURL = "#{n[2]} - (#{n[1]})" @@ -494,12 +494,12 @@ ciexplorer = (json, state) -> if globArgs.source and globArgs.source == item.sourceID opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> source = this.value - + if source == "" source = null globArgs.source = source @@ -512,9 +512,9 @@ ciexplorer = (json, state) -> updateWidgets('trends', null, { source: source }) updateWidgets('relationship', null, { source: source }) updateWidgets('punchcard', null, { source: source }) - + ) - + multiviewexplorer = (json, state) -> org = json.organisation @@ -547,7 +547,7 @@ multiviewexplorer = (json, state) -> if globArgs[tName] and globArgs[tName] == item.id opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> @@ -568,7 +568,7 @@ multiviewexplorer = (json, state) -> updateWidgets('radar', null, x) updateWidgets('punchcard', null, x) ) - + subFilterGlob = null subFilter = () -> source = subFilterGlob @@ -601,7 +601,7 @@ subFilter = () -> updateWidgets('worldmap', null, x) updateWidgets('jsondump', null, x) updateWidgets('punchcard', null, x) - + $( "a" ).each( () -> url = $(this).attr('href') if url @@ -640,7 +640,7 @@ pathFilter = () -> updateWidgets('worldmap', null, x) updateWidgets('jsondump', null, x) updateWidgets('punchcard', null, x) - + $( "a" ).each( () -> url = $(this).attr('href') if url @@ -651,7 +651,7 @@ pathFilter = () -> else $(this).attr('href', "#{m[1]}#{m[2]}") ) - + viewexplorer = (json, state) -> org = json.organisation @@ -683,7 +683,7 @@ viewexplorer = (json, state) -> if globArgs[tName] and globArgs[tName] == item.id opt.selected = 'selected' list.appendChild(opt) - + ID = Math.floor(Math.random() * 987654321).toString(16) list.setAttribute('id', ID) $("#"+ID).chosen().change(() -> @@ -716,7 +716,7 @@ viewexplorer = (json, state) -> updateWidgets('worldmap', null, x) updateWidgets('jsondump', null, x) updateWidgets('punchcard', null, x) - + $( "a" ).each( () -> url = $(this).attr('href') if url @@ -727,9 +727,9 @@ viewexplorer = (json, state) -> else $(this).attr('href', "#{m[1]}#{m[2]}") ) - + ) - + # Quick filter state.widget.inject(new HTML('br')) i = new HTML('input', {id:'subfilter', size: 16, type: 'text', value: globArgs.subfilter, onChange: 'subFilterGlob = this.value;', placeholder: 'sub-filter'}) @@ -738,8 +738,8 @@ viewexplorer = (json, state) -> state.widget.inject(i) state.widget.inject(b) state.widget.inject(rb) - - + + if globArgs.subfilter and globArgs.subfilter.length > 0 source = globArgs.subfilter $( "a" ).each( () -> @@ -752,13 +752,13 @@ viewexplorer = (json, state) -> else $(this).attr('href', "#{m[1]}#{m[2]}") ) - + if globArgs.email div = new HTML('div', {}, "Currently filtering results based on " + globArgs.email + ". - ") div.inject(new HTML('a', { href: 'javascript:void(filterPerson(null));'}, "Reset filter")) state.widget.inject(div) - - + + widgetexplorer = (json, state) -> pwidgets = { @@ -817,7 +817,7 @@ widgetexplorer = (json, state) -> if globArgs[tName] and globArgs[tName] == key opt.selected = 'selected' list.appendChild(opt) - + list.addEventListener("change", () -> source = this.value if source == "" @@ -836,7 +836,5 @@ widgetexplorer = (json, state) -> updateWidgets('trends', null, x) updateWidgets('radar', null, x) updateWidgets('punchcard', null, x) - + , false) - - diff --git a/ui/js/coffee/kibble_account.coffee b/ui/js/coffee/kibble_account.coffee index fe8e773a..9158d753 100644 --- a/ui/js/coffee/kibble_account.coffee +++ b/ui/js/coffee/kibble_account.coffee @@ -41,32 +41,32 @@ accountCallback = (json, state) -> else t = new HTML('p', {}, "Please check your email account for a verification email.") obj.appendChild(t) - + kibbleSignup = (form) -> email = form.email.value displayName = form.displayname.value password = form.password.value password2 = form.password2.value - + # Passwords must match if password != password2 alert("Passwords must match!") return false - + # Username must be >= 2 chars if displayName.length < 2 alert("Please enter a proper display name!") return false - + # Email must be valid if not email.match(/([^@]+@[^.]+\.[^.])/) alert("Please enter a valid email address!") return false - + put('account', { email: email, password: password, displayname: displayName }, null, accountCallback) - - return false \ No newline at end of file + + return false diff --git a/ui/js/coffee/kibble_organisation.coffee b/ui/js/coffee/kibble_organisation.coffee index f6a4edc7..07b3490b 100644 --- a/ui/js/coffee/kibble_organisation.coffee +++ b/ui/js/coffee/kibble_organisation.coffee @@ -16,8 +16,8 @@ keyValueForm = (type, key, caption, placeholder) -> div = new HTML('div', { style: { width: "100%", margin: "10px", paddingBottom: "10px"}}) left = new HTML('div', { style: { float: "left", width: "300px", fontWeight: "bold"}}, caption) - right = new HTML('div', { style: { float: "left", width: "500px"}}) - + right = new HTML('div', { style: { float: "left", width: "500px"}}) + if type == 'text' inp = new HTML('input', {name: key, id: key, style: { marginBottom: "10px"}, class: "form-control", type: "text", placeholder: placeholder}) right.inject(inp) @@ -76,34 +76,34 @@ orglist = (json, state) -> new HTML('kbd', {}, ""+org.sourceCount.pretty()), " sources so far." ]) - + odiv.inject(div) if not isDefault dbtn = new HTML('input', { style: { marginTop: "10px", width: "120px"},class: "btn btn-primary btn-block", type: "button", onclick: "setDefaultOrg('#{org.id}');", value: "Set as current"}) div.inject(dbtn) odiv.inject(new HTML('hr')) state.widget.inject(odiv, true) - + if userAccount.userlevel == "admin" fieldset = new HTML('fieldset', { style: { float: "left", margin: '30px'}}) legend = new HTML('legend', {}, "Create a new organisation:") fieldset.inject(legend) - + fieldset.inject(keyValueForm('text', 'orgname', 'Name of the organisation:', 'Foo, inc.')) fieldset.inject(keyValueForm('textarea', 'orgdesc', 'Description:', 'Foo, inc. is awesome and does stuff.')) fieldset.inject(keyValueForm('text', 'orgid', 'Optional org ID:', 'demo, myorg etc')) - + fieldset.inject(new HTML('p', {}, "You'll be able to add users and owners once the organisation has been created.")) - + btn = new HTML('input', { style: { width: "200px"},class: "btn btn-primary btn-block", type: "button", onclick: "makeOrg();", value: "Create organisation"}) fieldset.inject(btn) - + state.widget.inject(fieldset) - + inviteMember = (eml, admin) -> put('org/members', { email: eml, admin: admin}, null, memberInvited) - + removeMember = (eml, admin) -> xdelete('org/members', { email: eml, admin: admin}, null, memberInvited) @@ -115,7 +115,7 @@ memberInvited = (json, state) -> ) membershipList = (json, state) -> - + # Invite member form h = new HTML('h3', {}, "Invite a member to #{userAccount.defaultOrganisation}") state.widget.inject(h, true) @@ -124,13 +124,13 @@ membershipList = (json, state) -> state.widget.inject(inp) state.widget.inject(btn) state.widget.inject(new HTML('hr')) - - + + # Existing membership list h = new HTML('h3', {}, "Current membership of #{userAccount.defaultOrganisation}:") state.widget.inject(h) list = new HTML('table', { style: { margin: "20px", border: "1px solid #666"}}) - + for member in json.members tr = new HTML('tr', { style: { borderBottom: "1px solid #666"}}) eml = new HTML('td', { style: { padding: "5px"}}, member) @@ -140,17 +140,15 @@ membershipList = (json, state) -> if isAdmin alink = new HTML('a', { href: "javascript:void(inviteMember('#{member}', false));"}, "Remove as admin") admopt = new HTML('td', { style: { padding: "5px"}}, alink) - + # Remove member dlink = new HTML('a', { href: "javascript:void(removeMember('#{member}'));"}, "Remove from organisation") delopt = new HTML('td', { style: { padding: "5px"}}, dlink) - + tr.inject(eml) tr.inject(admin) tr.inject(admopt) tr.inject(delopt) list.inject(tr) - - state.widget.inject(list) - + state.widget.inject(list) diff --git a/ui/js/coffee/misc.coffee b/ui/js/coffee/misc.coffee index 30375772..49fdcfdd 100644 --- a/ui/js/coffee/misc.coffee +++ b/ui/js/coffee/misc.coffee @@ -35,7 +35,7 @@ fetch = (url, xstate, callback, nocreds) -> # GET URL xmlHttp.open("GET", "api/#{url}", true); xmlHttp.send(null); - + xmlHttp.onreadystatechange = (state) -> if xmlHttp.readyState == 4 and xmlHttp.status == 500 if snap @@ -70,7 +70,7 @@ put = (url, json, xstate, callback, nocreds = false) -> # GET URL xmlHttp.open("PUT", "api/#{url}", true); xmlHttp.send(JSON.stringify(json || {})); - + xmlHttp.onreadystatechange = (state) -> if xmlHttp.readyState == 4 and xmlHttp.status == 500 if snap @@ -105,7 +105,7 @@ patch = (url, json, xstate, callback, nocreds = false) -> # GET URL xmlHttp.open("PATCH", "api/#{url}", true); xmlHttp.send(JSON.stringify(json || {})); - + xmlHttp.onreadystatechange = (state) -> if xmlHttp.readyState == 4 and xmlHttp.status == 500 if snap @@ -139,7 +139,7 @@ xdelete = (url, json, xstate, callback, nocreds = false) -> # GET URL xmlHttp.open("DELETE", "api/#{url}", true); xmlHttp.send(JSON.stringify(json || {})); - + xmlHttp.onreadystatechange = (state) -> if xmlHttp.readyState == 4 and xmlHttp.status == 500 if snap @@ -180,12 +180,12 @@ post = (url, json, xstate, callback, snap) -> if val == 'false' json[key] = false fdata = JSON.stringify(json) - + # POST URL xmlHttp.open("POST", "api/#{url}", true); xmlHttp.setRequestHeader("Content-type", "application/json"); xmlHttp.send(fdata); - + xmlHttp.onreadystatechange = (state) -> if xmlHttp.readyState == 4 and xmlHttp.status == 500 if snap @@ -477,14 +477,14 @@ isArray = ( value ) -> typeof value.length is 'number' and typeof value.splice is 'function' and not ( value.propertyIsEnumerable 'length' ) - + ### isHash: function to detect if an object is a hash ### isHash = (value) -> value and typeof value is 'object' and not isArray(value) - + class HTML constructor: (type, params, children) -> @@ -493,7 +493,7 @@ class HTML @element = type.cloneNode() else @element = document.createElement(type) - + ### If params have been passed, set them ### if isHash(params) for key, val of params @@ -509,7 +509,7 @@ class HTML if not @element[key] throw "No such attribute, #{key}!" @element[key][subkey] = subval - + ### If any children have been passed, add them to the element ### if children ### If string, convert to textNode using txt() ### @@ -545,4 +545,4 @@ HTMLElement.prototype.inject = (child) -> if typeof child is 'string' child = txt(child) this.appendChild(child) - return child \ No newline at end of file + return child diff --git a/ui/js/coffee/pageloader.coffee b/ui/js/coffee/pageloader.coffee index b1d9f1ac..80af654e 100644 --- a/ui/js/coffee/pageloader.coffee +++ b/ui/js/coffee/pageloader.coffee @@ -26,7 +26,7 @@ setupPage = (json, state) -> div.style.textAlign = 'center' div.innerHTML = "

    An error occurred:

    " + json.error + "

    " return - + # View active? if userAccount.view and userAccount.view.length > 0 vrow = new Row() @@ -46,10 +46,10 @@ setupPage = (json, state) -> set(a, 'href', '?page=views') app(p, a) vrow.inject(p) - - document.title = json.title + " - Apache Kibble" + + document.title = json.title + " - Apache Kibble" # Go through each row - + for r in json.rows row = new Row() @@ -72,7 +72,7 @@ setupPage = (json, state) -> for k, v of child.wargs widget.wargs[k] = v if child.type not in ['views', 'sourcelist'] - widget.args.eargs.quick = 'true' + widget.args.eargs.quick = 'true' switch child.type when 'datepicker' then datepicker(widget) @@ -163,7 +163,7 @@ loadPageWidgets = (page, apiVersion) -> if m if globArgs.view $(this).attr('href', "#{m[1]}&view=#{globArgs.view}#{m[2]}") - + ) # Fetch account info fetch('session', null, renderAccountInfo) @@ -180,14 +180,14 @@ renderAccountInfo = (json, state) -> userAccount = json img = document.getElementById('user_image') img.setAttribute("src", "https://secure.gravatar.com/avatar/" + json.gravatar + ".png") - + name = document.getElementById('user_name') name.innerHTML = "" name.appendChild(document.createTextNode(json.displayName)) - + ulevel = get('user_level') ulevel.inject(if json.userlevel == 'admin' then 'Administrator' else if json.defaultOrganisation in json.ownerships then 'Organisation Owner' else 'User') - + nm = get('messages_number') nm.innerHTML = json.messages if json.messages > 0 @@ -206,13 +206,13 @@ renderAccountInfo = (json, state) -> msp = mk('span') app(msp, txt(email.senderName)) app(ma, msp) - + msp = mk('span') set(msp, 'class', 'message') app(msp, txt(email.subject)) app(ma, msp) app(mli, ma) app(nl, mli) - + # Fetch widget list fetch('widgets/' + pageID, { gargs: globArgs }, setupPage) diff --git a/ui/js/coffee/phonebook.coffee b/ui/js/coffee/phonebook.coffee index c43264d5..16922c5c 100644 --- a/ui/js/coffee/phonebook.coffee +++ b/ui/js/coffee/phonebook.coffee @@ -7,7 +7,7 @@ phonebook = (json, state) -> obj.innerText = "Found #{json.people.length} contributors.." obj.inject(new HTML('br')) state.widget.inject(obj, true) - + json.people.sort( (a,b) => if a.name < b.name return -1 @@ -15,7 +15,7 @@ phonebook = (json, state) -> return 1 return 0 ) - + for i, item of json.people if i > 250 break @@ -33,6 +33,3 @@ phonebook = (json, state) -> idiv.inject(left) idiv.inject(right) obj.inject(idiv) - - - \ No newline at end of file diff --git a/ui/js/coffee/sources.coffee b/ui/js/coffee/sources.coffee index c0012375..ebea14dd 100644 --- a/ui/js/coffee/sources.coffee +++ b/ui/js/coffee/sources.coffee @@ -70,9 +70,9 @@ deletesource = (hash) -> tr = get(hash) tr.parentNode.removeChild(tr) xdelete('sources', { id: hash }, null, null) - + sourceTypes = { - + } getSourceType = (main, t) -> if not sourceTypes[t] @@ -92,7 +92,7 @@ getSourceType = (main, t) -> app(tr, td) app(thead, tr) app(tbl, thead) - + tbody = new HTML('tbody') app(tbl, tbody) obj.inject(tbl) @@ -105,7 +105,7 @@ getSourceType = (main, t) -> return sourceTypes[t] sourcelist = (json, state) -> - + slist = mk('div') vlist = new HTML('div') if json.sources @@ -137,13 +137,13 @@ sourcelist = (json, state) -> d = mk('tr') set(d, 'id', source.sourceID) set(d, 'scope', 'row') - - + + t = mk('td') t.style.color = "#369" app(t, txt(source.sourceURL)) app(d, t) - + # Progress lastUpdate = 0 lastFailure = null @@ -157,9 +157,9 @@ sourcelist = (json, state) -> evolution: 'fa fa-signal' mail: 'fa fa-envelope' issues: 'fa fa-feed' - + t = new HTML('td', { style: { minWidth: "260px !important"}}) - + borked = false steps = ['sync', 'census', 'count', 'evolution'] if source.type in ['mail', 'ponymail', 'pipermail', 'hyperkitty'] @@ -202,7 +202,7 @@ sourcelist = (json, state) -> set(t, 'data-steps-failure', 'false') t.style.minWidth = "260px" app(d, t) - + lu = "Unknown" if lastUpdate > 0 lu = "" @@ -212,16 +212,16 @@ sourcelist = (json, state) -> if h > 0 lu = h + " hour" + (if h == 1 then '' else 's') + ", " lu += m + " minute" + (if m == 1 then '' else 's') + " ago." - + t = mk('td') t.style.textAlign = 'right' t.style.color = "#963" t.style.width = "200px !important" app(t, txt(lu)) app(d, t) - - - + + + status = mk('td') status.style.width = "600px !important" if lastFailure @@ -240,7 +240,7 @@ sourcelist = (json, state) -> else app(status, txt("No errors detected.")) app(d, status) - + act = mk('td') dbtn = mk('button') set(dbtn, 'class', 'btn btn-danger') @@ -248,10 +248,10 @@ sourcelist = (json, state) -> dbtn.style.padding = "2px" app(dbtn, txt("Delete")) app(act, dbtn) - + app(d, act) tbody.inject(d) - + for t, el of sourceTypes div = new HTML('div', {class: "sourceTypeIcon", onclick: "showType('#{t}');"}) el.btn = div @@ -262,12 +262,12 @@ sourcelist = (json, state) -> #app(slist, tbl) state.widget.inject(slist, true) state.widget.inject(vlist) - + retval = mk('div') set(retval, 'id', 'retval') state.widget.inject(retval) showType(true) # Show first available type - + showType = (t) -> for st, el of sourceTypes if st == t or t == true @@ -304,7 +304,7 @@ sourceadd = (json, state) -> div.inject(lbl) obj.inject(new HTML('p', {}, el.description or "")) obj.inject(keyValueForm('textarea', 'source', 'Source URL/ID:', "For example: " + el.example + ". You can add multiple sources, one per line.")) - + if el.optauth obj.inject((if el.authrequired then "Required" else "Optional") + " authentication options:") for abit in el.optauth @@ -314,12 +314,12 @@ sourceadd = (json, state) -> state.widget.inject(div, true) for k, v of aSourceTypes state.widget.inject(v) - + sourceAdded = (json, state) -> window.setTimeout(() -> location.reload() , 1000) - + addSources = (type, form) -> jsa = [] lineNo = 0 diff --git a/ui/js/coffee/widget.coffee b/ui/js/coffee/widget.coffee index 8e67d552..c578ee8e 100644 --- a/ui/js/coffee/widget.coffee +++ b/ui/js/coffee/widget.coffee @@ -29,10 +29,10 @@ toFullscreen = (id) -> FSA = get('FS_' + id) FSA.innerHTML = "Pop back" FSA.setAttribute("onclick", "toNormal('" + id + "');") - + CW = get('CW_' + id) CW.setAttribute("onclick", "toNormal('" + id + "');") - + w = findWidget(id) w.parent = obj.parentNode w.sibling = null @@ -44,7 +44,7 @@ toFullscreen = (id) -> break else if node == obj dobrk = true - + w.sibling = nxt ic = get('innercontents') app(ic, obj) @@ -64,14 +64,14 @@ toFullscreen = (id) -> toNormal = (id) -> obj = get(id) w = findWidget(id) - + FSA = get('FS_' + id) FSA.innerHTML = "Fullscreen" FSA.setAttribute("onclick", "toFullscreen('" + id + "');") - + CW = get('CW_' + id) CW.setAttribute("onclick", "findWidget('"+id+"').kill();") - + if w.sibling w.parent.insertBefore(obj, w.sibling) else @@ -98,7 +98,7 @@ updateWidgets = (type, target, eargs) -> console.log("pushed state " + wloc) window.onpopstate = (event) -> loadPageWidgets() - + for widget in widgetCache if type == widget.args.type widget.args.target = target and target or widget.args.target @@ -146,32 +146,32 @@ class pubWidget if clear @div.innerHTML = "" @div.appendChild(el) - + class Widget constructor: (@blocks, @args, pub) -> @id = Math.floor(Math.random()*1000000).toString(16) - + # Parent object div @div = document.createElement('div') @div.setAttribute("id", @id) @div.setAttribute("class", "x_panel snoot_widget") @div.style.float = 'left' @json = {} - - if (@blocks <= 2) + + if (@blocks <= 2) @div.setAttribute("class", "snoot_widget col-md-2 col-sm-4 col-xs-12") - else if (@blocks <= 3) + else if (@blocks <= 3) @div.setAttribute("class", "snoot_widget col-md-3 col-sm-6 col-xs-12") - else if (@blocks <= 4) + else if (@blocks <= 4) @div.setAttribute("class", "snoot_widget col-md-4 col-sm-8 col-xs-12") - else if (@blocks <= 6) + else if (@blocks <= 6) @div.setAttribute("class", "snoot_widget col-md-6 col-sm-12 col-xs-12") - else if (@blocks <= 9) + else if (@blocks <= 9) @div.setAttribute("class", "snoot_widget col-md-9 col-sm-12 col-xs-12") else @div.setAttribute("class", "snoot_widget col-md-12 col-sm-12 col-xs-12") - - + + if not pub # Title t = document.createElement('div') @@ -180,11 +180,11 @@ class Widget tt.style.fontSize = "17pt" tt.appendChild(document.createTextNode(@args.name)) t.appendChild(tt) - - # Menu + + # Menu ul = document.createElement('ul') ul.setAttribute("class", "nav navbar-right panel_toolbox") - + # Menu: collapse widget li = document.createElement('li') @collapse = document.createElement('a') @@ -194,7 +194,7 @@ class Widget @collapse.appendChild(i) li.appendChild(@collapse) ul.appendChild(li) - + @collapse.addEventListener "click", () -> id = this.parentNode.parentNode.parentNode.parentNode.getAttribute("id") panel = $('#'+id) @@ -208,10 +208,10 @@ class Widget else content.slideToggle(200) panel.css('height', 'auto') - + icon.toggleClass('fa-chevron-up fa-chevron-down'); - - + + # Menu: remove widget li = document.createElement('li') a = document.createElement('a') @@ -223,22 +223,22 @@ class Widget a.setAttribute("id", "CW_" + @id) li.appendChild(a) ul.appendChild(li) - + t.appendChild(ul) - + @div.appendChild(t) - + cldiv = document.createElement('div') cldiv.setAttribute("classs", "clearfix") @div.appendChild(cldiv) - + @cdiv = document.createElement('div') @cdiv.style.width = "100%" @cdiv.setAttribute("id", "contents_" + @id) @cdiv.setAttribute("class", "x_content") @div.appendChild(@cdiv) widgetCache.push(this) - + cog: (size = 100) -> idiv = document.createElement('div') idiv.setAttribute("class", "icon") @@ -249,20 +249,20 @@ class Widget idiv.appendChild(document.createTextNode('Loading, hang on tight..!')) @cdiv.innerHTML = "" @cdiv.appendChild(idiv) - + kill: () -> @div.parentNode.removeChild(@div) - + inject: (object, clear) -> if clear @cdiv.innerHTML = "" @cdiv.style.textAlign = 'left' @cdiv.appendChild(object) - + snap: (state) -> state.widget.cdiv.innerHTML = "
    Oh snap, something went wrong!" state.widget.cdiv.style.textAlign = 'center' - + load: (callback) -> # Insert spinning cog this.cog() @@ -298,11 +298,10 @@ class Row @cdiv.setAttribute("id", "contents_" + @id) @div.appendChild(@cdiv) document.getElementById('innercontents').appendChild(@div) - + inject: (object, clear) -> @cdiv.innerHTML = "" if clear if object instanceof Widget @cdiv.appendChild(object.div) else @cdiv.appendChild(object) - diff --git a/ui/js/coffee/widget_admin.coffee b/ui/js/coffee/widget_admin.coffee index 2ffa0d00..72e79747 100644 --- a/ui/js/coffee/widget_admin.coffee +++ b/ui/js/coffee/widget_admin.coffee @@ -1,5 +1,5 @@ orgadmin = (json, state) -> - + if globArgs.org and json.admin[globArgs.org] pdiv = document.createElement('div') id = globArgs.org @@ -7,12 +7,12 @@ orgadmin = (json, state) -> h2 = mk('h2') app(h2, txt("Editing: " + title)) app(pdiv, h2) - + obj = mk('form') h4 = mk('h4') app(h4, txt("Invite a new user to this org:")) app(obj, h4) - + div = mk('div') app(div, txt("Username (email): ")) inp = mk('input') @@ -21,7 +21,7 @@ orgadmin = (json, state) -> inp.style.width = "200px" app(div, inp) app(obj, div) - + div = mk('div') app(div, txt("Make administrator: ")) inp = mk('input') @@ -30,21 +30,21 @@ orgadmin = (json, state) -> set(inp, 'value', 'true') app(div, inp) app(obj, div) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'onclick', 'addorguser(this.form)') set(btn, 'value', "Add user") app(obj, btn) - + app(pdiv, obj) - - + + obj = mk('form') h4 = mk('h4') app(h4, txt("Remove a user from the org:")) app(obj, h4) - + div = mk('div') app(div, txt("Username (email): ")) inp = mk('input') @@ -53,7 +53,7 @@ orgadmin = (json, state) -> inp.style.width = "200px" app(div, inp) app(obj, div) - + div = mk('div') app(div, txt("Just remove admin privs (if any): ")) inp = mk('input') @@ -62,20 +62,20 @@ orgadmin = (json, state) -> set(inp, 'value', 'true') app(div, inp) app(obj, div) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'onclick', 'remorguser(this.form)') set(btn, 'value', "Remove user") app(obj, btn) - + app(pdiv, obj) - + state.widget.inject(pdiv, true) else state.widget.inject(txt("You are not an admin of this organisation!")) - + addorguser = (form) -> js = { @@ -91,7 +91,7 @@ addorguser = (form) -> v = if form[i].checked then 'true' else 'false' if k in ['who', 'admin'] js[k] = v - + postJSON("admin-org", js, null, (a) -> alert("User added!") ) remorguser = (form) -> @@ -108,6 +108,5 @@ remorguser = (form) -> v = if form[i].checked then 'true' else 'false' if k in ['who', 'admin'] js[k] = v - + postJSON("admin-org", js, null, (a) -> alert("User removed!") ) - \ No newline at end of file diff --git a/ui/js/coffee/widget_affiliations.coffee b/ui/js/coffee/widget_affiliations.coffee index e0747e63..f91385f0 100644 --- a/ui/js/coffee/widget_affiliations.coffee +++ b/ui/js/coffee/widget_affiliations.coffee @@ -12,7 +12,7 @@ affiliation = (json, state) -> ngroups-- app(h3, txt("Found " + ngroups + " organisations/companies:")) app(obj, h3) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'class', 'btn btn-info') @@ -26,7 +26,7 @@ affiliation = (json, state) -> w.callback = affiliationWizard w.reload() ) - + p = mk('p') app(p, txt("You may use the ")) app(p, btn) @@ -132,7 +132,7 @@ affiliationWizard = (json, state) -> app(gdiv, sp) app(gdiv, mk('br')) app(obj,gdiv) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'class', 'btn btn-info') @@ -167,7 +167,7 @@ affiliate = (hash) -> if tag postJSON('affiliations', { tag: tags }, null, null) app(tr, txt("(Tagged as: " + tag + ") ")) - + altemail = (hash) -> tag = window.prompt("Please enter the alt email with which you wish to associate this source, or type nothing to clear alts.") if tag == "" @@ -178,4 +178,3 @@ altemail = (hash) -> if tag postJSON('affiliations', { altemail: tags }, null, null) app(tr, txt("(Affiliated as: " + tag + ") ")) - \ No newline at end of file diff --git a/ui/js/coffee/widget_bio.coffee b/ui/js/coffee/widget_bio.coffee index 4f4d3eb0..e8b55b47 100644 --- a/ui/js/coffee/widget_bio.coffee +++ b/ui/js/coffee/widget_bio.coffee @@ -13,7 +13,7 @@ bio = (json, state) -> firstemail = "Never" if json.bio.firstEmail firstemail = new Date(json.bio.firstEmail*1000).toDateString() - + bioOuter = new HTML('div', { class: 'media-event'} ) bioOuter.inject(new HTML('a', { class: 'pull-left bio-image'}, new HTML('img', { style: "width: 128px; height: 128px;", src: 'https://secure.gravatar.com/avatar/' + json.bio.gravatar + '.png?d=identicon&size=128'}) @@ -66,7 +66,7 @@ bio = (json, state) -> a = mk('a') set(a, 'href', 'javascript:void(affiliate("' + json.bio.id + '"));') app(a, txt("Set a tag")) - + egroups = [] if json.bio.alts and json.bio.alts.length for tag in json.bio.alts @@ -78,14 +78,14 @@ bio = (json, state) -> a2.style.marginLeft = "8px" set(a2, 'href', 'javascript:void(altemail("' + json.bio.id + '"));') app(a2, txt("Add alt email")) - + sp = mk('span') set(sp, 'id', 'tags_' + json.bio.id) app(obj, namecard) app(obj, a) app(obj, a2) app(obj, sp) - + else obj.innerHTML = "Person not found :/" state.widget.inject(obj, true) diff --git a/ui/js/coffee/widget_comstat.coffee b/ui/js/coffee/widget_comstat.coffee index a23cc42c..62381d80 100644 --- a/ui/js/coffee/widget_comstat.coffee +++ b/ui/js/coffee/widget_comstat.coffee @@ -4,9 +4,9 @@ comShow = (t) -> if (row.getAttribute("id")||"foo").match("comstat_#{t}_") row.style.display = "table-row" document.getElementById("comstat_#{t}_more").style.display = "none" - + comstat = (json, state) -> - + if json and json.stats row = new Row() p = new HTML('p', {}, @@ -21,12 +21,12 @@ comstat = (json, state) -> onchange: 'updateWidgets("comstat", null, { committersOnly: this.checked ? "true" : null });' }) lb = new HTML('label', { for: 'comonly' }, "Show only new committers, discard new authors.") - + row.inject(p) row.inject(chk) row.inject(lb) state.widget.inject(row.div, true) - + if json.stats.code.seen > 0 row = new Row() js = { alphaSort: true, counts: { @@ -44,7 +44,7 @@ comstat = (json, state) -> if json.stats.code.newcomers.length and json.stats.code.newcomers.length >= 0 nl = json.stats.code.newcomers.length stbl = new Widget(6, { name: "New code contributors (#{nl})" }) - + tbl = mk('table', {class: "table table-striped"}) tr = mk('tr', {}, [ mk('th', {}, "Avatar"), @@ -80,7 +80,7 @@ comstat = (json, state) -> app(tbl, tb) stbl.inject(tbl) row.inject(stbl) - + if json.stats.code.timeseries and json.stats.code.timeseries.length > 0 widget = new Widget(4, {name: "New code contributors over time:", representation: 'bars'}) widget.parent = state.widget @@ -89,9 +89,9 @@ comstat = (json, state) -> widget.json = js widget.callback = linechart linechart(js, { widget: widget}) - + state.widget.inject(row.div) - + if json.stats.issues.seen > 0 row = new Row() js = { alphaSort: true, counts: { @@ -109,7 +109,7 @@ comstat = (json, state) -> if json.stats.issues.newcomers.length and json.stats.issues.newcomers.length >= 0 nl = json.stats.issues.newcomers.length stbl = new Widget(6, { name: "New issue contributors (#{nl})" }) - + tbl = mk('table', {class: "table table-striped"}) tr = mk('tr', {}, [ mk('th', {}, "Avatar"), @@ -127,7 +127,7 @@ comstat = (json, state) -> key = json.bios[person].issue[1].key || url wh = new Date(json.bios[person].issue[0] * 1000.0).toDateString() person = json.bios[person].bio - + if i == 6 m = json.stats.issues.newcomers.length - i tr = mk('tr', {scope: 'row', id: 'comstat_issue_more'}, [ @@ -146,7 +146,7 @@ comstat = (json, state) -> app(tbl, tb) stbl.inject(tbl) row.inject(stbl) - + if json.stats.issues.timeseries and json.stats.issues.timeseries.length > 0 widget = new Widget(6, {name: "New issue contributors over time:", representation: 'bars'}) widget.parent = state.widget @@ -155,15 +155,15 @@ comstat = (json, state) -> widget.json = js widget.callback = linechart linechart(js, { widget: widget}) - - + + state.widget.inject(row.div) if json.stats.converts if json.stats.converts.issue_to_code.length and json.stats.converts.issue_to_code.length > 0 row = new Row() - + stbl = new Widget(6, { name: "Previous issue contributors who are now contributing code:" }) - + tbl = mk('table', {class: "table table-striped"}) tr = mk('tr', {}, [ mk('th', {}, "Avatar"), @@ -186,14 +186,14 @@ comstat = (json, state) -> app(tbl, tb) stbl.inject(tbl) row.inject(stbl) - + state.widget.inject(row.div) - + if json.stats.converts.email_to_code.length and json.stats.converts.email_to_code.length > 0 row = new Row() - + stbl = new Widget(6, { name: "Previous email authors who are now contributing code:" }) - + tbl = mk('table', {class: "table table-striped"}) tr = mk('tr', {}, [ mk('th', {}, "Avatar"), @@ -216,10 +216,10 @@ comstat = (json, state) -> app(tbl, tb) stbl.inject(tbl) row.inject(stbl) - + state.widget.inject(row.div) else notice = new HTML('h2', {}, "Community growth stats only works with user-defined views!") p = new HTML('p', {}, "To see community growth stats, please create a view of the code, email, bugs you wish to view stats for, or select an existng view in the list above") state.widget.inject(notice, true) - state.widget.inject(p) \ No newline at end of file + state.widget.inject(p) diff --git a/ui/js/coffee/widget_donut.coffee b/ui/js/coffee/widget_donut.coffee index 542e8405..a5359ce5 100644 --- a/ui/js/coffee/widget_donut.coffee +++ b/ui/js/coffee/widget_donut.coffee @@ -1,6 +1,6 @@ # Donut widget donut = (json, state) -> - + dt = [] dtl = [] l = 0 @@ -21,7 +21,7 @@ donut = (json, state) -> for item in dt dtl.push(dt.name) theme.color = genColors(a+1, 0.55, 0.475, true) #quickColors(a) - + if (state.widget.args.representation == 'commentcount') code = 0 comment = 0 @@ -31,7 +31,7 @@ donut = (json, state) -> code += data.code comment += data.comment blank += data.blank||0 - + tot = code + comment dtl = ['Code', 'Comments'] dt = [ @@ -40,16 +40,16 @@ donut = (json, state) -> ] if blank > 0 dt.push({name: "Blanks", value: blank}) - + theme.color = genColors(3, 0.6, 0.5, true) - - + + if (state.widget.args.representation == 'sloccount' or (state.widget.args.representation != 'commentcount' and json.languages)) langs = json.languages for lang, data of langs tot += data.code top.push(lang) - + top.sort((a,b) => langs[b].code - langs[a].code) for lang in top l++ @@ -61,21 +61,19 @@ donut = (json, state) -> value: langs[lang].code }) dtl.push(lang) - - if (tot != ttot) + + if (tot != ttot) dtl.push('Other languages') dt.push( { name: 'Other languages', value: (tot-ttot) }) - + theme.color = genColors(17, 0.6, 0.5, true) - + data = {} for el in dt data[el.name] = el.value div = new HTML('div') state.widget.inject(div, true) chartBox = new Chart(div, 'donut', data, 25) - - \ No newline at end of file diff --git a/ui/js/coffee/widget_factors.coffee b/ui/js/coffee/widget_factors.coffee index ef642e66..789209ae 100644 --- a/ui/js/coffee/widget_factors.coffee +++ b/ui/js/coffee/widget_factors.coffee @@ -30,13 +30,13 @@ factors = (json, state) -> " #{pct}% change since last period" ]) h.inject(h2) - else + else h2 = new HTML('span', { style: { marginLeft: "8px", fontSize: "14px", color: 'green'}},[ new HTML('i', {class: "fa fa-chevron-circle-up"}), " +#{pct}% change since last period" ]) h.inject(h2) - + t = txt(factor.title) obj.inject(new HTML('div', {}, [h,t])) - state.widget.inject(obj, true) \ No newline at end of file + state.widget.inject(obj, true) diff --git a/ui/js/coffee/widget_jsondump.coffee b/ui/js/coffee/widget_jsondump.coffee index 8715b73e..677e9d48 100644 --- a/ui/js/coffee/widget_jsondump.coffee +++ b/ui/js/coffee/widget_jsondump.coffee @@ -2,4 +2,3 @@ jsondump = (json, state) -> pre = new HTML('pre', { style: { whiteSpace: 'pre-wrap'}}) pre.inject(JSON.stringify(json, null, 2)) state.widget.inject(pre, true) - diff --git a/ui/js/coffee/widget_map.coffee b/ui/js/coffee/widget_map.coffee index db11d680..82695c56 100644 --- a/ui/js/coffee/widget_map.coffee +++ b/ui/js/coffee/widget_map.coffee @@ -1,6 +1,6 @@ # Donut widget worldmap = (json, state) -> - + dt = [] dtl = [] l = 0 @@ -15,7 +15,7 @@ worldmap = (json, state) -> ctotal += details.count if details.count > cmax cmax = details.count - + lmain = document.createElement('div') radius = ['30%', '50%'] if not state.widget.div.style.height @@ -29,7 +29,7 @@ worldmap = (json, state) -> lmain.style.width = "100%" state.widget.inject(lmain, true) echartMap = echarts.init(lmain, theme); - + echartMap.setOption({ title: { text: "Worldwide distribution by country" @@ -62,9 +62,9 @@ worldmap = (json, state) -> trigger: 'item', formatter: (params) -> return params.seriesName + '
    ' + params.name + ' : ' + (params.value||0).pretty(); - + }, - + series: [{ name: state.widget.name, type: 'map', @@ -79,4 +79,3 @@ worldmap = (json, state) -> }] }); theme.textStyle.fontSize = 12 - \ No newline at end of file diff --git a/ui/js/coffee/widget_messages.coffee b/ui/js/coffee/widget_messages.coffee index ec97b883..fa3b1dab 100644 --- a/ui/js/coffee/widget_messages.coffee +++ b/ui/js/coffee/widget_messages.coffee @@ -1,5 +1,5 @@ messages = (json, state) -> - + if isArray json obj = document.createElement('form') @@ -15,10 +15,10 @@ messages = (json, state) -> app(tr, td) app(thead, tr) app(tbl, thead) - + tbody = mk('tbody') app(tbl, tbody) - + for message in json tr = mk('tr') if message.read == false @@ -30,14 +30,14 @@ messages = (json, state) -> app(a, txt(new Date(message.epoch*1000).toString())) app(td, a) app(tr, td) - + td = mk('td') a = mk('a') set(a, 'href', '?page=messages&message=' + message.id) app(a, txt(message.senderName)) app(td, a) app(tr, td) - + td = mk('td') a = mk('a') set(a, 'href', '?page=messages&message=' + message.id) @@ -45,18 +45,18 @@ messages = (json, state) -> app(td, a) app(tr, td) app(tbody, tr) - + app(obj, tbl) - + items = recipient: 'Recipient ID' subject: "Message subject" body: "Message" - + h2 = mk('h2') app(h2, txt("Send a message:")) app(obj, h2) - + for item in ['recipient', 'subject', 'body'] div = mk('div') app(div, txt(items[item] + ": ")) @@ -71,13 +71,13 @@ messages = (json, state) -> set(inp, 'name', item) app(div, inp) app(obj, div) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'onclick', 'sendEmail(this.form)') set(btn, 'value', "Send message") app(obj, btn) - + #obj.innerHTML += JSON.stringify(json) state.widget.inject(obj, true) else @@ -87,42 +87,42 @@ messages = (json, state) -> app(obj, b) app(obj, txt(json.senderName + ' (' + json.sender + ')')) app(obj, mk('br')) - + b = mk('b') app(b, txt("Date: ")) app(obj, b) app(obj, txt(new Date(json.epoch*1000).toString())) app(obj, mk('br')) - + b = mk('b') app(b, txt("Subject: ")) app(obj, b) app(obj, txt(json.subject)) app(obj, mk('br')) app(obj, mk('br')) - + pre = mk('pre') app(pre, txt(json.body)) app(obj, pre) - + app(obj, mk('hr')) - + form = mk('form') items = recipient: 'Recipient ID' subject: "Message subject" body: "Message" - + h2 = mk('h2') app(h2, txt("Send a reply:")) app(form, h2) - + reply = { recipient: json.sender subject: 'RE: ' + json.subject body: '' } - + for item in ['recipient', 'subject', 'body'] div = mk('div') app(div, txt(items[item] + ": ")) @@ -138,13 +138,13 @@ messages = (json, state) -> set(inp, 'name', item) app(div, inp) app(form, div) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'onclick', 'sendEmail(this.form)') set(btn, 'value', "Send message") app(form, btn) - + app(obj, form) state.widget.inject(obj, true) @@ -158,4 +158,3 @@ sendEmail = (form) -> if k in ['recipient', 'subject', 'body'] js[k] = v postJSON("messages", js, null, (a) -> alert("Mail sent!") ) - \ No newline at end of file diff --git a/ui/js/coffee/widget_mvp.coffee b/ui/js/coffee/widget_mvp.coffee index 3cfd5785..a89e4864 100644 --- a/ui/js/coffee/widget_mvp.coffee +++ b/ui/js/coffee/widget_mvp.coffee @@ -12,15 +12,15 @@ mvp = (json, state) -> n = null globArgs.size = n updateWidgets('mvp', null, { size: n }) - + , false) state.widget.inject( new HTML('b', {}, "List size: "), true ) state.widget.inject(nlist) - - + + nlist = new HTML('select', { name: 'sort', id: 'sort'}) for i in ['commits', 'issues', 'emails'] el = new HTML('option', { value: i, text: i}) @@ -34,13 +34,13 @@ mvp = (json, state) -> n = null globArgs.sort = n updateWidgets('mvp', null, { sort: n }) - + , false) state.widget.inject( new HTML('b', {}, " Sort by: "), ) state.widget.inject(nlist) - + tbl = mk('table', {class: "table table-striped"}) tr = mk('tr', {}, [ mk('th', {}, "Rank"), @@ -67,5 +67,3 @@ mvp = (json, state) -> app(tbl, tb) state.widget.inject(tbl) #updateWidgets('trends', null, { email: email }) - - \ No newline at end of file diff --git a/ui/js/coffee/widget_paragraph.coffee b/ui/js/coffee/widget_paragraph.coffee index e32970ed..211e4b7d 100644 --- a/ui/js/coffee/widget_paragraph.coffee +++ b/ui/js/coffee/widget_paragraph.coffee @@ -12,6 +12,3 @@ paragraph = (json, state) -> app(lmain, para) else app(lmain, mk('p', {style:"font-size: 1.2rem;"}, json.text)) - - - \ No newline at end of file diff --git a/ui/js/coffee/widget_preferences.coffee b/ui/js/coffee/widget_preferences.coffee index d1bd94f9..08aebefb 100644 --- a/ui/js/coffee/widget_preferences.coffee +++ b/ui/js/coffee/widget_preferences.coffee @@ -9,7 +9,7 @@ preferences = (json, state) -> token: "API token" desc = tag: "If set, only sources with this tag will be shown in your views." - + for item in ['screenname', 'fullname', 'email', 'tag', 'token'] div = mk('div') app(div, txt(items[item] + ": ")) @@ -43,16 +43,16 @@ preferences = (json, state) -> app(list, opt) app(div,list) app(obj, div) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'onclick', 'saveprefs(this.form)') set(btn, 'value', "Save preferences") app(obj, btn) - + #obj.innerHTML += JSON.stringify(json) state.widget.inject(obj, true) - + # Org admin? if json.admin aobj = mk('div') @@ -81,4 +81,3 @@ saveprefs = (form) -> if k in ['screenname', 'fullname', 'email', 'tag', 'organisation'] js[k] = v postJSON("preferences", js, null, (a) -> alert("Preferences saved!") ) - \ No newline at end of file diff --git a/ui/js/coffee/widget_publisher.coffee b/ui/js/coffee/widget_publisher.coffee index 100269d4..c0b3fc1e 100644 --- a/ui/js/coffee/widget_publisher.coffee +++ b/ui/js/coffee/widget_publisher.coffee @@ -3,7 +3,7 @@ publisherWidget = null publisherPublic = (json, state) -> publisher(json, state, true) - + publisher = (json, state, nolink) -> div = document.createElement('div') state.public = true @@ -33,12 +33,12 @@ publisher = (json, state, nolink) -> if not location.href.match(/snoot\.io/) link = mk('a', { href: "https://www.snoot.io/", style: "font-size: 10px; margin-left: 60px; font-family: sans-serif;"}, "Data courtesy of Snoot.io") state.widget.inject(link) - + publishWidget = () -> postJSON("publish", { publish: JSON.parse(viewJS) }, null, postPublishLink) - + postPublishLink = (json, state) -> if json.id pdiv = get('publishercode') @@ -52,4 +52,3 @@ postPublishLink = (json, state) -> app(pdiv, txt("Script code for publishing:\n\n
    \n\n#{added}")) else alert("Something broke :(") - \ No newline at end of file diff --git a/ui/js/coffee/widget_punchcard.coffee b/ui/js/coffee/widget_punchcard.coffee index 9071db98..b92e77af 100644 --- a/ui/js/coffee/widget_punchcard.coffee +++ b/ui/js/coffee/widget_punchcard.coffee @@ -17,10 +17,6 @@ punchcard = (json, state) -> div = document.createElement('div') if json.text div.inject(new HTML('p', {}, json.text)) - + state.widget.inject(div, true) pc = new Chart(div, 'punchcard', json, {punchcard: true}) - - - - \ No newline at end of file diff --git a/ui/js/coffee/widget_radar.coffee b/ui/js/coffee/widget_radar.coffee index 63aac0b8..14410be6 100644 --- a/ui/js/coffee/widget_radar.coffee +++ b/ui/js/coffee/widget_radar.coffee @@ -3,13 +3,13 @@ radarIndicators = [] radar = (json, state) -> - + lmain = new HTML('div') state.widget.inject(lmain, true) - + radarChart = new Chart(lmain, 'radar', json.radar) - - + + # Harmonizer id = Math.floor(Math.random() * 987654321).toString(16) chk = document.createElement('input') @@ -23,7 +23,7 @@ radar = (json, state) -> if this.checked harmonize = 'true' globArgs['harmonize'] = 'true' - + updateWidgets('radar', null, { harmonize: harmonize }) ) state.widget.inject(mk('br')) @@ -35,7 +35,7 @@ radar = (json, state) -> label.style.paddingLeft = '5px' label.appendChild(document.createTextNode('Harmonize edges')) state.widget.inject(label) - + # Relativizer id = Math.floor(Math.random() * 987654321).toString(16) chk = document.createElement('input') @@ -49,7 +49,7 @@ radar = (json, state) -> if this.checked relativize = 'true' globArgs['relativize'] = 'true' - + updateWidgets('radar', null, { relativize: relativize }) ) state.widget.inject(mk('br')) @@ -60,4 +60,4 @@ radar = (json, state) -> chk.setAttribute("title", "Check this box to force all areas to be relative to their own projects (and not the compared projects). This may help to display foucs areas.") label.style.paddingLeft = '5px' label.appendChild(document.createTextNode('Make all projects relative to themselves')) - state.widget.inject(label) \ No newline at end of file + state.widget.inject(label) diff --git a/ui/js/coffee/widget_relation.coffee b/ui/js/coffee/widget_relation.coffee index 474c93d5..8bddba7d 100644 --- a/ui/js/coffee/widget_relation.coffee +++ b/ui/js/coffee/widget_relation.coffee @@ -2,23 +2,23 @@ relationship = (json, state) -> div = document.createElement('div') state.widget.inject(div, true) chart = new Chart(div, 'relationship', json, {}) - - + + id = Math.floor(Math.random() * 987654321).toString(16) invchk = new HTML('input', { class: "uniform", style: { marginRight: "10px"}, id: "author_#{id}", type: 'checkbox', checked: globArgs.author, name: 'author', value: 'true' }) - + invchk.addEventListener("change", () -> author = null if this.checked author = 'true' globArgs['author'] = 'true' - + updateWidgets('relationship', null, { author: author }) ) invlbl = new HTML('label', { for: "author_#{id}"}, "Inverse map (sender <-> recipient)") state.widget.inject(invchk) state.widget.inject(invlbl) - + state.widget.inject(new HTML('br')) state.widget.inject(new HTML('span', {}, "Minimum signal strength: ")) sigsel = new HTML('select', {id: "signal_#{id}"}) @@ -30,7 +30,7 @@ relationship = (json, state) -> if this.value links = this.value globArgs['links'] = links - + updateWidgets('relationship', null, { links: links }) ) - state.widget.inject(sigsel) \ No newline at end of file + state.widget.inject(sigsel) diff --git a/ui/js/coffee/widget_report.coffee b/ui/js/coffee/widget_report.coffee index d056b6eb..56a7f9c4 100644 --- a/ui/js/coffee/widget_report.coffee +++ b/ui/js/coffee/widget_report.coffee @@ -12,8 +12,8 @@ rcollate = (list) -> report = (json, state) -> div = document.createElement('div') state.widget.inject(div, true) - - + + # Get + write the age of the project, if possible if json.projectAge == 0 app(div, mk('h3', {}, "We were unable to determine the age of this project, sorry!")) @@ -22,18 +22,18 @@ report = (json, state) -> ageInYears = parseInt(json.projectAge / (86400*365.25)) age = mk('h3', {}, "Estimated age of project: #{ageInMonths} months (#{ageInYears} years)") app(div, age) - - + + # Commit rate trends if ageInYears >= 1 title = mk('h2', {}, "Long range trends:") - + app(div, title) - + # Commits stitle = mk('h3', {}, "Commits:") carr = [] - + # 5 year commit trend if ageInYears >= 5 pct = json.commits['5'].angle @@ -51,7 +51,7 @@ report = (json, state) -> if pct > 50 rtext = "a strong increase in commits in the long term (5+ years)" carr.push(rtext) - + # 2 year commit trend if ageInYears >= 2 pct = json.commits['2'].angle @@ -69,7 +69,7 @@ report = (json, state) -> if pct > 50 rtext = "a strong increase in commits in the medium term (2 years)" carr.push(rtext) - + # 1 year commit trend if ageInYears >= 1 pct = json.commits['1'].angle @@ -87,15 +87,15 @@ report = (json, state) -> if pct > 50 rtext = "a strong increase in commits in the short term (past year)" carr.push(rtext) - + p = mk('p', {}, "This project has experienced " + rcollate(carr) + ".") app(div, stitle) app(div, p) - + # Contributors stitle = mk('h3', {}, "Contributors:") carr = [] - + # 5 year commit trend if ageInYears >= 5 pct = json.authors['5'].authors.angle @@ -113,7 +113,7 @@ report = (json, state) -> if pct > 50 rtext = "a strong increase in contributors in the long term (5+ years)" carr.push(rtext) - + # 2 year commit trend if ageInYears >= 2 pct = json.authors['2'].authors.angle @@ -131,7 +131,7 @@ report = (json, state) -> if pct > 50 rtext = "a strong increase in contributors in the medium term (2 years)" carr.push(rtext) - + # 1 year commit trend if ageInYears >= 1 pct = json.authors['1'].authors.angle @@ -149,10 +149,9 @@ report = (json, state) -> if pct > 50 rtext = "a strong increase in contributors in the short term (past year)" carr.push(rtext) - + active = parseInt(json.authors['1'].authors.average) carr.push("currently has #{active} active contributors") p = mk('p', {}, "The project has had " + rcollate(carr) + ".") app(div, stitle) app(div, p) - \ No newline at end of file diff --git a/ui/js/coffee/widget_top5.coffee b/ui/js/coffee/widget_top5.coffee index 7cddea0a..e3bdb372 100644 --- a/ui/js/coffee/widget_top5.coffee +++ b/ui/js/coffee/widget_top5.coffee @@ -70,20 +70,20 @@ top5 = (json, state) -> pos = 5 while pos < json.topN.items.length nid = id + "_show_" + pos - + obj.inject(new HTML('a', { style: { cursor: 'pointer'}, onclick: "this.style.display = 'none'; get('#{nid}').style.display = 'block';"}, "Show more...")) obj = new HTML('div', { id: nid, style: { display: 'none'}}) make5(obj, json, pos) state.widget.inject(obj) pos += 5 - - + + showMore = (id) -> obj = document.getElementById(id) if obj obj.style.display = "block" - + filterPerson = (email) -> if email == "" @@ -96,4 +96,3 @@ filterPerson = (email) -> updateWidgets('relationship', null, { email: email }) updateWidgets('viewpicker', null, { email: email }) globArgs.email = email - \ No newline at end of file diff --git a/ui/js/coffee/widget_treemap.coffee b/ui/js/coffee/widget_treemap.coffee index 00a3074b..5b9cb96b 100644 --- a/ui/js/coffee/widget_treemap.coffee +++ b/ui/js/coffee/widget_treemap.coffee @@ -3,7 +3,7 @@ treemap = (json, state) -> cats = new Array() dates = new Array() catdata = {} - + filled = { areaStyle: {type: 'default' } } if json.widgetType if json.widgetType.chartType @@ -16,25 +16,25 @@ treemap = (json, state) -> #type = state.widget.args.representation if not json.widget.title or json.widget.title.length == 0 json.widget.title = 'Languages' - + if not state.widget.div.style.height div.style.minHeight = "900px" else div.style.minHeight = "100%" if state.widget.fullscreen div.style.minHeight = (window.innerHeight - 100) + "px" - + state.widget.inject(div, true) - - - + + + range = "" rect = div.getBoundingClientRect() theme.color = genColors(json.treemap.length+1, 0.6, 0.5, true) colors = genColors(json.treemap.length+1, 0.6, 0.5, true) theme.textStyle.fontSize = Math.max(12, window.innerHeight/100) echartLine = echarts.init(div, theme); - + ld = [] for lang, i in json.treemap ld.push(lang) @@ -45,7 +45,7 @@ treemap = (json, state) -> color: colors[i] } } - + option = { title: { @@ -58,7 +58,7 @@ treemap = (json, state) -> #selectedMode: 'single', data: ld }], - + tooltip: { show: true, feature: { @@ -71,18 +71,18 @@ treemap = (json, state) -> value = info.value; treePathInfo = info.treePathInfo; treePath = []; - + for i in [1...treePathInfo.length] treePath.push(treePathInfo[i].name) - - + + return [ '
    ' + treePath.join('/') + '
    ', 'Lines of Code: ' + value.pretty(), ].join(''); - + }, - + series: [ { name:json.widget.title, @@ -122,4 +122,3 @@ treemap = (json, state) -> ] } echartLine.setOption(option = option); - \ No newline at end of file diff --git a/ui/js/coffee/widget_trend.coffee b/ui/js/coffee/widget_trend.coffee index e8d661cb..090d7b3d 100644 --- a/ui/js/coffee/widget_trend.coffee +++ b/ui/js/coffee/widget_trend.coffee @@ -19,23 +19,23 @@ trendBox = (icon, count, title, desc) -> i.setAttribute("class", "fa " + (icons[icon] || 'fa-comments-o')) idiv.appendChild(i) cdiv.appendChild(idiv) - + # Count codiv = document.createElement('div') codiv.setAttribute("class", "count") codiv.appendChild(document.createTextNode(count)) cdiv.appendChild(codiv) - + # Title h3 = document.createElement('h4') h3.appendChild(document.createTextNode(title)) cdiv.appendChild(h3) - + # Description p = document.createElement('p') p.appendChild(document.createTextNode(desc)) cdiv.appendChild(p) - + div.appendChild(cdiv) return div @@ -57,6 +57,3 @@ trend = (json, state) -> tb = trendBox(icon, data.after.pretty(), data.title, linediff) state.widget.inject(tb, wipe) wipe = false - - - \ No newline at end of file diff --git a/ui/js/coffee/widget_views.coffee b/ui/js/coffee/widget_views.coffee index 8893a37d..28744955 100644 --- a/ui/js/coffee/widget_views.coffee +++ b/ui/js/coffee/widget_views.coffee @@ -65,9 +65,9 @@ filterView = (val) -> me.style.background = "#4B8" me.style.color = "#FFF" me.style.display = 'block' - + manageviews = (json, state) -> - + obj = mk('div') p = mk('p') app(p, txt("Views allow you to quickly set up a group of sources to view as a sub-organisation, much like tags, but faster.")) @@ -75,15 +75,15 @@ manageviews = (json, state) -> h3 = mk('h3') noviews = json.views.length || 0 app(h3, txt("You currently have " + noviews + " view" + (if noviews == 1 then '' else 's') + " in your database ")) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'class', 'btn btn-success') set(btn, 'value', 'Create a new view') set(btn, 'onclick', 'get("newdiv").style.display = "block"; this.style.display = "none";') - app(h3, btn) + app(h3, btn) app(obj, h3) - + newdiv = mk('div') set(newdiv, 'id', 'newdiv') newdiv.style.display = "none" @@ -94,7 +94,7 @@ manageviews = (json, state) -> app(newdiv, txt("Name your new view: ")) app(newdiv, inp) app(newdiv, mk('br')) - + if userAccount.userlevel == 'admin' or userAccount.defaultOrganisation in userAccount.ownerships inp = mk('input') set(inp, 'type', 'checkbox') @@ -102,7 +102,7 @@ manageviews = (json, state) -> app(newdiv, txt("Make view public (global): ")) app(newdiv, inp) app(newdiv, mk('br')) - + inp = mk('input') set(inp, 'type', 'text') set(inp, 'id', 'viewfilter') @@ -111,7 +111,7 @@ manageviews = (json, state) -> app(newdiv, inp) app(newdiv, mk('i', {}, "You can use the filter-select to quickly mark sources based on a regex. Type in 'foo' to select all sources matching 'foo' etc.")) app(newdiv, mk('br')) - + app(newdiv, txt("Select the sources you wish to add to this view below:")) app(newdiv, mk('br')) btn = mk('input') @@ -150,7 +150,7 @@ manageviews = (json, state) -> set(btn, 'value', 'Save view') set(btn, 'onclick', 'saveview();') app(newdiv, btn) - + app(obj, newdiv) for view in json.views popdiv = mk('div') @@ -163,8 +163,8 @@ manageviews = (json, state) -> popdiv.style.background = "#323234" h4.style.display = "inline-block" app(popdiv, h4) - - + + btn = mk('input') set(btn, 'type', 'button') set(btn, 'class', 'btn btn-warning') @@ -182,7 +182,7 @@ manageviews = (json, state) -> btn.style.marginLeft = "20px" btn.style.padding = "2px" app(popdiv, btn) - + btn = mk('input') set(btn, 'type', 'button') set(btn, 'class', 'btn btn-success') @@ -191,7 +191,7 @@ manageviews = (json, state) -> btn.style.marginLeft = "20px" btn.style.padding = "2px" app(popdiv, btn) - + h4.style.color = "#FFA" h4.style.cursor = 'pointer' set(h4, 'onclick', "get('" + view.id + "').style.display = (get('" + view.id + "').style.display == 'block') ? 'none' : 'block'") @@ -245,6 +245,4 @@ manageviews = (json, state) -> app(newdiv, btn) app(obj, popdiv) app(obj, newdiv) - state.widget.inject(obj, true) - - \ No newline at end of file + state.widget.inject(obj, true) diff --git a/ui/js/core.js b/ui/js/core.js index f0918731..ff814f00 100644 --- a/ui/js/core.js +++ b/ui/js/core.js @@ -202,4 +202,4 @@ Metis.panelBodyCollapse(); Metis.boxHiding(); }); -})(jQuery); \ No newline at end of file +})(jQuery); diff --git a/ui/js/d3.min.js b/ui/js/d3.min.js index 2135fcb4..57d22b5b 100644 --- a/ui/js/d3.min.js +++ b/ui/js/d3.min.js @@ -1,2 +1,2 @@ // https://d3js.org Version 4.10.2. Copyright 2017 Mike Bostock. -(function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n(t.d3=t.d3||{})})(this,function(t){"use strict";function n(t){return function(n,e){return ss(t(n),e)}}function e(t,n){return[t,n]}function r(t,n,e){var r=(n-t)/Math.max(0,e),i=Math.floor(Math.log(r)/Math.LN10),o=r/Math.pow(10,i);return i>=0?(o>=Ts?10:o>=ks?5:o>=Ns?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(o>=Ts?10:o>=ks?5:o>=Ns?2:1)}function i(t,n,e){var r=Math.abs(n-t)/Math.max(0,e),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),o=r/i;return o>=Ts?i*=10:o>=ks?i*=5:o>=Ns&&(i*=2),n=0&&(e=t.slice(r+1),t=t.slice(0,r)),t&&!n.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:e}})}function v(t,n){for(var e,r=0,i=t.length;r=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}})}function T(t){return function(){var n=this.__on;if(n){for(var e,r=0,i=-1,o=n.length;rn?1:t>=n?0:NaN}function R(t){return function(){this.removeAttribute(t)}}function L(t){return function(){this.removeAttributeNS(t.space,t.local)}}function q(t,n){return function(){this.setAttribute(t,n)}}function U(t,n){return function(){this.setAttributeNS(t.space,t.local,n)}}function D(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttribute(t):this.setAttribute(t,e)}}function O(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,e)}}function F(t){return function(){this.style.removeProperty(t)}}function I(t,n,e){return function(){this.style.setProperty(t,n,e)}}function Y(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}function B(t,n){return t.style.getPropertyValue(n)||uf(t).getComputedStyle(t,null).getPropertyValue(n)}function j(t){return function(){delete this[t]}}function H(t,n){return function(){this[t]=n}}function X(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}function $(t){return t.trim().split(/^|\s+/)}function V(t){return t.classList||new W(t)}function W(t){this._node=t,this._names=$(t.getAttribute("class")||"")}function Z(t,n){for(var e=V(t),r=-1,i=n.length;++r>8&15|n>>4&240,n>>4&15|240&n,(15&n)<<4|15&n,1)):(n=gf.exec(t))?kt(parseInt(n[1],16)):(n=mf.exec(t))?new At(n[1],n[2],n[3],1):(n=xf.exec(t))?new At(255*n[1]/100,255*n[2]/100,255*n[3]/100,1):(n=bf.exec(t))?Nt(n[1],n[2],n[3],n[4]):(n=wf.exec(t))?Nt(255*n[1]/100,255*n[2]/100,255*n[3]/100,n[4]):(n=Mf.exec(t))?Ct(n[1],n[2]/100,n[3]/100,1):(n=Tf.exec(t))?Ct(n[1],n[2]/100,n[3]/100,n[4]):kf.hasOwnProperty(t)?kt(kf[t]):"transparent"===t?new At(NaN,NaN,NaN,0):null}function kt(t){return new At(t>>16&255,t>>8&255,255&t,1)}function Nt(t,n,e,r){return r<=0&&(t=n=e=NaN),new At(t,n,e,r)}function St(t){return t instanceof Mt||(t=Tt(t)),t?(t=t.rgb(),new At(t.r,t.g,t.b,t.opacity)):new At}function Et(t,n,e,r){return 1===arguments.length?St(t):new At(t,n,e,null==r?1:r)}function At(t,n,e,r){this.r=+t,this.g=+n,this.b=+e,this.opacity=+r}function Ct(t,n,e,r){return r<=0?t=n=e=NaN:e<=0||e>=1?t=n=NaN:n<=0&&(t=NaN),new Rt(t,n,e,r)}function zt(t){if(t instanceof Rt)return new Rt(t.h,t.s,t.l,t.opacity);if(t instanceof Mt||(t=Tt(t)),!t)return new Rt;if(t instanceof Rt)return t;var n=(t=t.rgb()).r/255,e=t.g/255,r=t.b/255,i=Math.min(n,e,r),o=Math.max(n,e,r),u=NaN,a=o-i,c=(o+i)/2;return a?(u=n===o?(e-r)/a+6*(e0&&c<1?0:u,new Rt(u,a,c,t.opacity)}function Pt(t,n,e,r){return 1===arguments.length?zt(t):new Rt(t,n,e,null==r?1:r)}function Rt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Lt(t,n,e){return 255*(t<60?n+(e-n)*t/60:t<180?e:t<240?n+(e-n)*(240-t)/60:n)}function qt(t){if(t instanceof Dt)return new Dt(t.l,t.a,t.b,t.opacity);if(t instanceof Ht){var n=t.h*Nf;return new Dt(t.l,Math.cos(n)*t.c,Math.sin(n)*t.c,t.opacity)}t instanceof At||(t=St(t));var e=Yt(t.r),r=Yt(t.g),i=Yt(t.b),o=Ot((.4124564*e+.3575761*r+.1804375*i)/Ef),u=Ot((.2126729*e+.7151522*r+.072175*i)/Af);return new Dt(116*u-16,500*(o-u),200*(u-Ot((.0193339*e+.119192*r+.9503041*i)/Cf)),t.opacity)}function Ut(t,n,e,r){return 1===arguments.length?qt(t):new Dt(t,n,e,null==r?1:r)}function Dt(t,n,e,r){this.l=+t,this.a=+n,this.b=+e,this.opacity=+r}function Ot(t){return t>Lf?Math.pow(t,1/3):t/Rf+zf}function Ft(t){return t>Pf?t*t*t:Rf*(t-zf)}function It(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Yt(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Bt(t){if(t instanceof Ht)return new Ht(t.h,t.c,t.l,t.opacity);t instanceof Dt||(t=qt(t));var n=Math.atan2(t.b,t.a)*Sf;return new Ht(n<0?n+360:n,Math.sqrt(t.a*t.a+t.b*t.b),t.l,t.opacity)}function jt(t,n,e,r){return 1===arguments.length?Bt(t):new Ht(t,n,e,null==r?1:r)}function Ht(t,n,e,r){this.h=+t,this.c=+n,this.l=+e,this.opacity=+r}function Xt(t){if(t instanceof Vt)return new Vt(t.h,t.s,t.l,t.opacity);t instanceof At||(t=St(t));var n=t.r/255,e=t.g/255,r=t.b/255,i=(Bf*r+If*n-Yf*e)/(Bf+If-Yf),o=r-i,u=(Ff*(e-i)-Df*o)/Of,a=Math.sqrt(u*u+o*o)/(Ff*i*(1-i)),c=a?Math.atan2(u,o)*Sf-120:NaN;return new Vt(c<0?c+360:c,a,i,t.opacity)}function $t(t,n,e,r){return 1===arguments.length?Xt(t):new Vt(t,n,e,null==r?1:r)}function Vt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Wt(t,n,e,r,i){var o=t*t,u=o*t;return((1-3*t+3*o-u)*n+(4-6*o+3*u)*e+(1+3*t+3*o-3*u)*r+u*i)/6}function Zt(t,n){return function(e){return t+e*n}}function Gt(t,n,e){return t=Math.pow(t,e),n=Math.pow(n,e)-t,e=1/e,function(r){return Math.pow(t+r*n,e)}}function Jt(t,n){var e=n-t;return e?Zt(t,e>180||e<-180?e-360*Math.round(e/360):e):Jf(isNaN(t)?n:t)}function Qt(t){return 1==(t=+t)?Kt:function(n,e){return e-n?Gt(n,e,t):Jf(isNaN(n)?e:n)}}function Kt(t,n){var e=n-t;return e?Zt(t,e):Jf(isNaN(t)?n:t)}function tn(t){return function(n){var e,r,i=n.length,o=new Array(i),u=new Array(i),a=new Array(i);for(e=0;e180?n+=360:n-t>180&&(t+=360),o.push({i:e.push(i(e)+"rotate(",null,r)-2,x:rl(t,n)})):n&&e.push(i(e)+"rotate("+n+r)}function a(t,n,e,o){t!==n?o.push({i:e.push(i(e)+"skewX(",null,r)-2,x:rl(t,n)}):n&&e.push(i(e)+"skewX("+n+r)}function c(t,n,e,r,o,u){if(t!==e||n!==r){var a=o.push(i(o)+"scale(",null,",",null,")");u.push({i:a-4,x:rl(t,e)},{i:a-2,x:rl(n,r)})}else 1===e&&1===r||o.push(i(o)+"scale("+e+","+r+")")}return function(n,e){var r=[],i=[];return n=t(n),e=t(e),o(n.translateX,n.translateY,e.translateX,e.translateY,r,i),u(n.rotate,e.rotate,r,i),a(n.skewX,e.skewX,r,i),c(n.scaleX,n.scaleY,e.scaleX,e.scaleY,r,i),n=e=null,function(t){for(var n,e=-1,o=i.length;++e=0&&n._call.call(null,t),n=n._next;--Ml}function _n(){El=(Sl=Cl.now())+Al,Ml=Tl=0;try{vn()}finally{Ml=0,gn(),El=0}}function yn(){var t=Cl.now(),n=t-Sl;n>Nl&&(Al-=n,Sl=t)}function gn(){for(var t,n,e=Vf,r=1/0;e;)e._call?(r>e._time&&(r=e._time),t=e,e=e._next):(n=e._next,e._next=null,e=t?t._next=n:Vf=n);Wf=t,mn(r)}function mn(t){Ml||(Tl&&(Tl=clearTimeout(Tl)),t-El>24?(t<1/0&&(Tl=setTimeout(_n,t-Cl.now()-Al)),kl&&(kl=clearInterval(kl))):(kl||(Sl=Cl.now(),kl=setInterval(yn,Nl)),Ml=1,zl(_n)))}function xn(t,n){var e=t.__transition;if(!e||!(e=e[n])||e.state>ql)throw new Error("too late");return e}function bn(t,n){var e=t.__transition;if(!e||!(e=e[n])||e.state>Dl)throw new Error("too late");return e}function wn(t,n){var e=t.__transition;if(!e||!(e=e[n]))throw new Error("too late");return e}function Mn(t,n,e){function r(c){var s,f,l,h;if(e.state!==Ul)return o();for(s in a)if((h=a[s]).name===e.name){if(h.state===Ol)return Pl(r);h.state===Fl?(h.state=Yl,h.timer.stop(),h.on.call("interrupt",t,t.__data__,h.index,h.group),delete a[s]):+s=0&&(t=t.slice(0,n)),!t||"start"===t})}function Yn(t,n,e){var r,i,o=In(n)?xn:bn;return function(){var u=o(this,t),a=u.on;a!==r&&(i=(r=a).copy()).on(n,e),u.on=i}}function Bn(t){return function(){var n=this.parentNode;for(var e in this.__transition)if(+e!==t)return;n&&n.removeChild(this)}}function jn(t,n){var e,r,i;return function(){var o=B(this,t),u=(this.style.removeProperty(t),B(this,t));return o===u?null:o===e&&u===r?i:i=n(e=o,r=u)}}function Hn(t){return function(){this.style.removeProperty(t)}}function Xn(t,n,e){var r,i;return function(){var o=B(this,t);return o===e?null:o===r?i:i=n(r=o,e)}}function $n(t,n,e){var r,i,o;return function(){var u=B(this,t),a=e(this);return null==a&&(this.style.removeProperty(t),a=B(this,t)),u===a?null:u===r&&a===i?o:o=n(r=u,i=a)}}function Vn(t,n,e){function r(){var r=this,i=n.apply(r,arguments);return i&&function(n){r.style.setProperty(t,i(n),e)}}return r._value=n,r}function Wn(t){return function(){this.textContent=t}}function Zn(t){return function(){var n=t(this);this.textContent=null==n?"":n}}function Gn(t,n,e,r){this._groups=t,this._parents=n,this._name=e,this._id=r}function Jn(t){return dt().transition(t)}function Qn(){return++$l}function Kn(t){return((t*=2)<=1?t*t:--t*(2-t)+1)/2}function te(t){return((t*=2)<=1?t*t*t:(t-=2)*t*t+2)/2}function ne(t){return(1-Math.cos(Jl*t))/2}function ee(t){return((t*=2)<=1?Math.pow(2,10*t-10):2-Math.pow(2,10-10*t))/2}function re(t){return((t*=2)<=1?1-Math.sqrt(1-t*t):Math.sqrt(1-(t-=2)*t)+1)/2}function ie(t){return(t=+t)Math.abs(t[1]-U[1])?b=!0:x=!0),U=t,m=!0,xh(),o()}function o(){var t;switch(y=U[0]-q[0],g=U[1]-q[1],T){case wh:case bh:k&&(y=Math.max(C-a,Math.min(P-p,y)),s=a+y,d=p+y),N&&(g=Math.max(z-l,Math.min(R-v,g)),h=l+g,_=v+g);break;case Mh:k<0?(y=Math.max(C-a,Math.min(P-a,y)),s=a+y,d=p):k>0&&(y=Math.max(C-p,Math.min(P-p,y)),s=a,d=p+y),N<0?(g=Math.max(z-l,Math.min(R-l,g)),h=l+g,_=v):N>0&&(g=Math.max(z-v,Math.min(R-v,g)),h=l,_=v+g);break;case Th:k&&(s=Math.max(C,Math.min(P,a-y*k)),d=Math.max(C,Math.min(P,p+y*k))),N&&(h=Math.max(z,Math.min(R,l-g*N)),_=Math.max(z,Math.min(R,v+g*N)))}d0&&(a=s-y),N<0?v=_-g:N>0&&(l=h-g),T=wh,F.attr("cursor",Eh.selection),o());break;default:return}xh()},!0).on("keyup.brush",function(){switch(t.event.keyCode){case 16:L&&(x=b=L=!1,o());break;case 18:T===Th&&(k<0?p=d:k>0&&(a=s),N<0?v=_:N>0&&(l=h),T=Mh,o());break;case 32:T===wh&&(t.event.altKey?(k&&(p=d-y*k,a=s+y*k),N&&(v=_-g*N,l=h+g*N),T=Th):(k<0?p=d:k>0&&(a=s),N<0?v=_:N>0&&(l=h),T=Mh),F.attr("cursor",Eh[M]),o());break;default:return}xh()},!0).on("mousemove.brush",e,!0).on("mouseup.brush",u,!0);lf(t.event.view)}ue(),jl(w),r.call(w),D.start()}}function a(){var t=this.__brush||{selection:null};return t.extent=s.apply(this,arguments),t.dim=n,t}var c,s=se,f=ce,l=h(e,"start","brush","end"),p=6;return e.move=function(t,e){t.selection?t.on("start.brush",function(){i(this,arguments).beforestart().start()}).on("interrupt.brush end.brush",function(){i(this,arguments).end()}).tween("brush",function(){function t(t){u.selection=1===t&&le(s)?null:f(t),r.call(o),a.brush()}var o=this,u=o.__brush,a=i(o,arguments),c=u.selection,s=n.input("function"==typeof e?e.apply(this,arguments):e,u.extent),f=cl(c,s);return c&&s?t:t(1)}):t.each(function(){var t=this,o=arguments,u=t.__brush,a=n.input("function"==typeof e?e.apply(t,o):e,u.extent),c=i(t,o).beforestart();jl(t),u.selection=null==a||le(a)?null:a,r.call(t),c.start().brush().end()})},o.prototype={beforestart:function(){return 1==++this.active&&(this.state.emitter=this,this.starting=!0),this},start:function(){return this.starting&&(this.starting=!1,this.emit("start")),this},brush:function(){return this.emit("brush"),this},end:function(){return 0==--this.active&&(delete this.state.emitter,this.emit("end")),this},emit:function(t){N(new mh(e,t,n.output(this.state.selection)),l.apply,l,[t,this.that,this.args])}},e.extent=function(t){return arguments.length?(s="function"==typeof t?t:gh([[+t[0][0],+t[0][1]],[+t[1][0],+t[1][1]]]),e):s},e.filter=function(t){return arguments.length?(f="function"==typeof t?t:gh(!!t),e):f},e.handleSize=function(t){return arguments.length?(p=+t,e):p},e.on=function(){var t=l.on.apply(l,arguments);return t===l?e:t},e}function pe(t){return function(n,e){return t(n.source.value+n.target.value,e.source.value+e.target.value)}}function de(){this._x0=this._y0=this._x1=this._y1=null,this._=""}function ve(){return new de}function _e(t){return t.source}function ye(t){return t.target}function ge(t){return t.radius}function me(t){return t.startAngle}function xe(t){return t.endAngle}function be(){}function we(t,n){var e=new be;if(t instanceof be)t.each(function(t,n){e.set(n,t)});else if(Array.isArray(t)){var r,i=-1,o=t.length;if(null==n)for(;++i=(o=(v+y)/2))?v=o:y=o,(f=e>=(u=(_+g)/2))?_=u:g=u,i=p,!(p=p[l=f<<1|s]))return i[l]=d,t;if(a=+t._x.call(null,p.data),c=+t._y.call(null,p.data),n===a&&e===c)return d.next=p,i?i[l]=d:t._root=d,t;do{i=i?i[l]=new Array(4):t._root=new Array(4),(s=n>=(o=(v+y)/2))?v=o:y=o,(f=e>=(u=(_+g)/2))?_=u:g=u}while((l=f<<1|s)==(h=(c>=u)<<1|a>=o));return i[h]=p,i[l]=d,t}function Re(t){return t[0]}function Le(t){return t[1]}function qe(t,n,e){var r=new Ue(null==n?Re:n,null==e?Le:e,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function Ue(t,n,e,r,i,o){this._x=t,this._y=n,this._x0=e,this._y0=r,this._x1=i,this._y1=o,this._root=void 0}function De(t){for(var n={data:t.data},e=n;t=t.next;)e=e.next={data:t.data};return n}function Oe(t){return t.x+t.vx}function Fe(t){return t.y+t.vy}function Ie(t){return t.index}function Ye(t,n){var e=t.get(n);if(!e)throw new Error("missing: "+n);return e}function Be(t){return t.x}function je(t){return t.y}function He(t){return new Xe(t)}function Xe(t){if(!(n=xp.exec(t)))throw new Error("invalid format: "+t);var n,e=n[1]||" ",r=n[2]||">",i=n[3]||"-",o=n[4]||"",u=!!n[5],a=n[6]&&+n[6],c=!!n[7],s=n[8]&&+n[8].slice(1),f=n[9]||"";"n"===f?(c=!0,f="g"):mp[f]||(f=""),(u||"0"===e&&"="===r)&&(u=!0,e="0",r="="),this.fill=e,this.align=r,this.sign=i,this.symbol=o,this.zero=u,this.width=a,this.comma=c,this.precision=s,this.type=f}function $e(n){return bp=Tp(n),t.format=bp.format,t.formatPrefix=bp.formatPrefix,bp}function Ve(){this.reset()}function We(t,n,e){var r=t.s=n+e,i=r-n,o=r-i;t.t=n-o+(e-i)}function Ze(t){return t>1?0:t<-1?cd:Math.acos(t)}function Ge(t){return t>1?sd:t<-1?-sd:Math.asin(t)}function Je(t){return(t=wd(t/2))*t}function Qe(){}function Ke(t,n){t&&Sd.hasOwnProperty(t.type)&&Sd[t.type](t,n)}function tr(t,n,e){var r,i=-1,o=t.length-e;for(n.lineStart();++i=0?1:-1,i=r*e,o=yd(n),u=wd(n),a=Rp*u,c=Pp*o+a*yd(i),s=a*r*wd(i);Ad.add(_d(s,c)),zp=t,Pp=o,Rp=u}function ur(t){return[_d(t[1],t[0]),Ge(t[2])]}function ar(t){var n=t[0],e=t[1],r=yd(e);return[r*yd(n),r*wd(n),wd(e)]}function cr(t,n){return t[0]*n[0]+t[1]*n[1]+t[2]*n[2]}function sr(t,n){return[t[1]*n[2]-t[2]*n[1],t[2]*n[0]-t[0]*n[2],t[0]*n[1]-t[1]*n[0]]}function fr(t,n){t[0]+=n[0],t[1]+=n[1],t[2]+=n[2]}function lr(t,n){return[t[0]*n,t[1]*n,t[2]*n]}function hr(t){var n=Td(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=n,t[1]/=n,t[2]/=n}function pr(t,n){Bp.push(jp=[Lp=t,Up=t]),nDp&&(Dp=n)}function dr(t,n){var e=ar([t*pd,n*pd]);if(Yp){var r=sr(Yp,e),i=sr([r[1],-r[0],0],r);hr(i),i=ur(i);var o,u=t-Op,a=u>0?1:-1,c=i[0]*hd*a,s=dd(u)>180;s^(a*OpDp&&(Dp=o):(c=(c+360)%360-180,s^(a*OpDp&&(Dp=n))),s?txr(Lp,Up)&&(Up=t):xr(t,Up)>xr(Lp,Up)&&(Lp=t):Up>=Lp?(tUp&&(Up=t)):t>Op?xr(Lp,t)>xr(Lp,Up)&&(Up=t):xr(t,Up)>xr(Lp,Up)&&(Lp=t)}else Bp.push(jp=[Lp=t,Up=t]);nDp&&(Dp=n),Yp=e,Op=t}function vr(){Rd.point=dr}function _r(){jp[0]=Lp,jp[1]=Up,Rd.point=pr,Yp=null}function yr(t,n){if(Yp){var e=t-Op;Pd.add(dd(e)>180?e+(e>0?360:-360):e)}else Fp=t,Ip=n;zd.point(t,n),dr(t,n)}function gr(){zd.lineStart()}function mr(){yr(Fp,Ip),zd.lineEnd(),dd(Pd)>ad&&(Lp=-(Up=180)),jp[0]=Lp,jp[1]=Up,Yp=null}function xr(t,n){return(n-=t)<0?n+360:n}function br(t,n){return t[0]-n[0]}function wr(t,n){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:ncd?t-ld:t<-cd?t+ld:t,n]}function Lr(t,n,e){return(t%=ld)?n||e?Ud(Ur(t),Dr(n,e)):Ur(t):n||e?Dr(n,e):Rr}function qr(t){return function(n,e){return n+=t,[n>cd?n-ld:n<-cd?n+ld:n,e]}}function Ur(t){var n=qr(t);return n.invert=qr(-t),n}function Dr(t,n){function e(t,n){var e=yd(n),a=yd(t)*e,c=wd(t)*e,s=wd(n),f=s*r+a*i;return[_d(c*o-f*u,a*r-s*i),Ge(f*o+c*u)]}var r=yd(t),i=wd(t),o=yd(n),u=wd(n);return e.invert=function(t,n){var e=yd(n),a=yd(t)*e,c=wd(t)*e,s=wd(n),f=s*o-c*u;return[_d(c*o+s*u,a*r+f*i),Ge(f*r-a*i)]},e}function Or(t,n,e,r,i,o){if(e){var u=yd(n),a=wd(n),c=r*e;null==i?(i=n+r*ld,o=n-c/2):(i=Fr(u,i),o=Fr(u,o),(r>0?io)&&(i+=r*ld));for(var s,f=i;r>0?f>o:f0)do{s.point(0===f||3===f?t:e,f>1?r:n)}while((f=(f+a+4)%4)!==l);else s.point(o[0],o[1])}function u(r,i){return dd(r[0]-t)0?0:3:dd(r[0]-e)0?2:1:dd(r[1]-n)0?1:0:i>0?3:2}function a(t,n){return c(t.x,n.x)}function c(t,n){var e=u(t,1),r=u(n,1);return e!==r?e-r:0===e?n[1]-t[1]:1===e?t[0]-n[0]:2===e?t[1]-n[1]:n[0]-t[0]}return function(u){function c(t,n){i(t,n)&&w.point(t,n)}function s(){for(var n=0,e=0,i=h.length;er&&(l-o)*(r-u)>(p-u)*(t-o)&&++n:p<=r&&(l-o)*(r-u)<(p-u)*(t-o)&&--n;return n}function f(o,u){var a=i(o,u);if(h&&p.push([o,u]),x)d=o,v=u,_=a,x=!1,a&&(w.lineStart(),w.point(o,u));else if(a&&m)w.point(o,u);else{var c=[y=Math.max(tv,Math.min(Kd,y)),g=Math.max(tv,Math.min(Kd,g))],s=[o=Math.max(tv,Math.min(Kd,o)),u=Math.max(tv,Math.min(Kd,u))];Gd(c,s,t,n,e,r)?(m||(w.lineStart(),w.point(c[0],c[1])),w.point(s[0],s[1]),a||w.lineEnd(),b=!1):a&&(w.lineStart(),w.point(o,u),b=!1)}y=o,g=u,m=a}var l,h,p,d,v,_,y,g,m,x,b,w=u,M=Zd(),T={point:c,lineStart:function(){T.point=f,h&&h.push(p=[]),x=!0,m=!1,y=g=NaN},lineEnd:function(){l&&(f(d,v),_&&m&&M.rejoin(),l.push(M.result())),T.point=c,m&&w.lineEnd()},polygonStart:function(){w=M,l=[],h=[],b=!0},polygonEnd:function(){var t=s(),n=b&&t,e=(l=Cs(l)).length;(n||e)&&(u.polygonStart(),n&&(u.lineStart(),o(null,null,1,u),u.lineEnd()),e&&Qd(l,a,t,o,u),u.polygonEnd()),w=u,l=h=p=null}};return T}}function jr(){iv.point=iv.lineEnd=Qe}function Hr(t,n){Dd=t*=pd,Od=wd(n*=pd),Fd=yd(n),iv.point=Xr}function Xr(t,n){t*=pd;var e=wd(n*=pd),r=yd(n),i=dd(t-Dd),o=yd(i),u=r*wd(i),a=Fd*e-Od*r*o,c=Od*e+Fd*r*o;rv.add(_d(Td(u*u+a*a),c)),Dd=t,Od=e,Fd=r}function $r(t,n){return!(!t||!fv.hasOwnProperty(t.type))&&fv[t.type](t,n)}function Vr(t,n){return 0===cv(t,n)}function Wr(t,n){var e=cv(t[0],t[1]);return cv(t[0],n)+cv(n,t[1])<=e+ad}function Zr(t,n){return!!ev(t.map(Gr),Jr(n))}function Gr(t){return(t=t.map(Jr)).pop(),t}function Jr(t){return[t[0]*pd,t[1]*pd]}function Qr(t,n,e){var r=Ms(t,n-ad,e).concat(n);return function(t){return r.map(function(n){return[t,n]})}}function Kr(t,n,e){var r=Ms(t,n-ad,e).concat(n);return function(t){return r.map(function(n){return[n,t]})}}function ti(){function t(){return{type:"MultiLineString",coordinates:n()}}function n(){return Ms(gd(o/_)*_,i,_).map(h).concat(Ms(gd(s/y)*y,c,y).map(p)).concat(Ms(gd(r/d)*d,e,d).filter(function(t){return dd(t%_)>ad}).map(f)).concat(Ms(gd(a/v)*v,u,v).filter(function(t){return dd(t%y)>ad}).map(l))}var e,r,i,o,u,a,c,s,f,l,h,p,d=10,v=d,_=90,y=360,g=2.5;return t.lines=function(){return n().map(function(t){return{type:"LineString",coordinates:t}})},t.outline=function(){return{type:"Polygon",coordinates:[h(o).concat(p(c).slice(1),h(i).reverse().slice(1),p(s).reverse().slice(1))]}},t.extent=function(n){return arguments.length?t.extentMajor(n).extentMinor(n):t.extentMinor()},t.extentMajor=function(n){return arguments.length?(o=+n[0][0],i=+n[1][0],s=+n[0][1],c=+n[1][1],o>i&&(n=o,o=i,i=n),s>c&&(n=s,s=c,c=n),t.precision(g)):[[o,s],[i,c]]},t.extentMinor=function(n){return arguments.length?(r=+n[0][0],e=+n[1][0],a=+n[0][1],u=+n[1][1],r>e&&(n=r,r=e,e=n),a>u&&(n=a,a=u,u=n),t.precision(g)):[[r,a],[e,u]]},t.step=function(n){return arguments.length?t.stepMajor(n).stepMinor(n):t.stepMinor()},t.stepMajor=function(n){return arguments.length?(_=+n[0],y=+n[1],t):[_,y]},t.stepMinor=function(n){return arguments.length?(d=+n[0],v=+n[1],t):[d,v]},t.precision=function(n){return arguments.length?(g=+n,f=Qr(a,u,90),l=Kr(r,e,g),h=Qr(s,c,90),p=Kr(o,i,g),t):g},t.extentMajor([[-180,-90+ad],[180,90-ad]]).extentMinor([[-180,-80-ad],[180,80+ad]])}function ni(){dv.point=ei}function ei(t,n){dv.point=ri,Id=Bd=t,Yd=jd=n}function ri(t,n){pv.add(jd*t-Bd*n),Bd=t,jd=n}function ii(){ri(Id,Yd)}function oi(t,n){xv+=t,bv+=n,++wv}function ui(){Av.point=ai}function ai(t,n){Av.point=ci,oi($d=t,Vd=n)}function ci(t,n){var e=t-$d,r=n-Vd,i=Td(e*e+r*r);Mv+=i*($d+t)/2,Tv+=i*(Vd+n)/2,kv+=i,oi($d=t,Vd=n)}function si(){Av.point=oi}function fi(){Av.point=hi}function li(){pi(Hd,Xd)}function hi(t,n){Av.point=pi,oi(Hd=$d=t,Xd=Vd=n)}function pi(t,n){var e=t-$d,r=n-Vd,i=Td(e*e+r*r);Mv+=i*($d+t)/2,Tv+=i*(Vd+n)/2,kv+=i,Nv+=(i=Vd*t-$d*n)*($d+t),Sv+=i*(Vd+n),Ev+=3*i,oi($d=t,Vd=n)}function di(t){this._context=t}function vi(t,n){Uv.point=_i,zv=Rv=t,Pv=Lv=n}function _i(t,n){Rv-=t,Lv-=n,qv.add(Td(Rv*Rv+Lv*Lv)),Rv=t,Lv=n}function yi(){this._string=[]}function gi(t){return"m0,"+t+"a"+t+","+t+" 0 1,1 0,"+-2*t+"a"+t+","+t+" 0 1,1 0,"+2*t+"z"}function mi(t){return t.length>1}function xi(t,n){return((t=t.x)[0]<0?t[1]-sd-ad:sd-t[1])-((n=n.x)[0]<0?n[1]-sd-ad:sd-n[1])}function bi(t,n,e,r){var i,o,u=wd(t-e);return dd(u)>ad?vd((wd(n)*(o=yd(r))*wd(e)-wd(r)*(i=yd(n))*wd(t))/(i*o*u)):(n+r)/2}function wi(t){return function(n){var e=new Mi;for(var r in t)e[r]=t[r];return e.stream=n,e}}function Mi(){}function Ti(t,n,e){var r=n[1][0]-n[0][0],i=n[1][1]-n[0][1],o=t.clipExtent&&t.clipExtent();t.scale(150).translate([0,0]),null!=o&&t.clipExtent(null),Ed(e,t.stream(mv));var u=mv.result(),a=Math.min(r/(u[1][0]-u[0][0]),i/(u[1][1]-u[0][1])),c=+n[0][0]+(r-a*(u[1][0]+u[0][0]))/2,s=+n[0][1]+(i-a*(u[1][1]+u[0][1]))/2;return null!=o&&t.clipExtent(o),t.scale(150*a).translate([c,s])}function ki(t,n,e){return Ti(t,[[0,0],n],e)}function Ni(t){return wi({point:function(n,e){n=t(n,e),this.stream.point(n[0],n[1])}})}function Si(t,n){function e(r,i,o,u,a,c,s,f,l,h,p,d,v,_){var y=s-r,g=f-i,m=y*y+g*g;if(m>4*n&&v--){var x=u+h,b=a+p,w=c+d,M=Td(x*x+b*b+w*w),T=Ge(w/=M),k=dd(dd(w)-1)n||dd((y*A+g*C)/m-.5)>.3||u*h+a*p+c*d2?t[2]%360*pd:0,i()):[b*hd,w*hd,M*hd]},n.precision=function(t){return arguments.length?(A=Bv(r,E=t*t),o()):Td(E)},n.fitExtent=function(t,e){return Ti(n,t,e)},n.fitSize=function(t,e){return ki(n,t,e)},function(){return u=t.apply(this,arguments),n.invert=u.invert&&e,i()}}function Ci(t){var n=0,e=cd/3,r=Ai(t),i=r(n,e);return i.parallels=function(t){return arguments.length?r(n=t[0]*pd,e=t[1]*pd):[n*hd,e*hd]},i}function zi(t){function n(t,n){return[t*e,wd(n)/e]}var e=yd(t);return n.invert=function(t,n){return[t/e,Ge(n*e)]},n}function Pi(t,n){function e(t,n){var e=Td(o-2*i*wd(n))/i;return[e*wd(t*=i),u-e*yd(t)]}var r=wd(t),i=(r+wd(n))/2;if(dd(i)0?n<-sd+ad&&(n=-sd+ad):n>sd-ad&&(n=sd-ad);var e=o/bd(Oi(n),i);return[e*wd(i*t),o-e*yd(i*t)]}var r=yd(t),i=t===n?wd(t):xd(r/yd(n))/xd(Oi(n)/Oi(t)),o=r*bd(Oi(t),i)/i;return i?(e.invert=function(t,n){var e=o-n,r=Md(i)*Td(t*t+e*e);return[_d(t,dd(e))/i*Md(e),2*vd(bd(o/r,1/i))-sd]},e):Ui}function Ii(t,n){return[t,n]}function Yi(t,n){function e(t,n){var e=o-n,r=i*t;return[e*wd(r),o-e*yd(r)]}var r=yd(t),i=t===n?wd(t):(r-yd(n))/(n-t),o=r/i+t;return dd(i)=0;)n+=e[r].value;else n=1;t.value=n}function no(t,n){if(t===n)return t;var e=t.ancestors(),r=n.ancestors(),i=null;for(t=e.pop(),n=r.pop();t===n;)i=t,t=e.pop(),n=r.pop();return i}function eo(t,n){var e,r,i,o,u,a=new uo(t),c=+t.value&&(a.value=t.value),s=[a];for(null==n&&(n=ro);e=s.pop();)if(c&&(e.value=+e.data.value),(i=n(e.data))&&(u=i.length))for(e.children=new Array(u),o=u-1;o>=0;--o)s.push(r=e.children[o]=new uo(i[o])),r.parent=e,r.depth=e.depth+1;return a.eachBefore(oo)}function ro(t){return t.children}function io(t){t.data=t.data.data}function oo(t){var n=0;do{t.height=n}while((t=t.parent)&&t.height<++n)}function uo(t){this.data=t,this.depth=this.height=0,this.parent=null}function ao(t){for(var n,e,r=t.length;r;)e=Math.random()*r--|0,n=t[r],t[r]=t[e],t[e]=n;return t}function co(t,n){var e,r;if(lo(n,t))return[n];for(e=0;e0&&e*e>r*r+i*i}function lo(t,n){for(var e=0;ee*e+r*r}function mo(t){var n=t._,e=t.next._,r=n.r+e.r,i=(n.x*e.r+e.x*n.r)/r,o=(n.y*e.r+e.y*n.r)/r;return i*i+o*o}function xo(t){this._=t,this.next=null,this.previous=null}function bo(t){if(!(i=t.length))return 0;var n,e,r,i,o,u,a,c,s,f,l;if(n=t[0],n.x=0,n.y=0,!(i>1))return n.r;if(e=t[1],n.x=-e.r,e.x=n.r,e.y=0,!(i>2))return n.r+e.r;yo(e,n,r=t[2]),n=new xo(n),e=new xo(e),r=new xo(r),n.next=r.previous=e,e.next=n.previous=r,r.next=e.previous=n;t:for(a=3;a=0;)(n=i[o]).z+=e,n.m+=e,e+=n.s+(r+=n.c)}function Uo(t,n,e){return t.a.parent===n.parent?t.a:e}function Do(t,n){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=n}function Oo(t){for(var n,e,r,i,o,u=new Do(t,0),a=[u];n=a.pop();)if(r=n._.children)for(n.children=new Array(o=r.length),i=o-1;i>=0;--i)a.push(e=n.children[i]=new Do(r[i],i)),e.parent=n;return(u.parent=new Do(null,0)).children=[u],u}function Fo(t,n,e,r,i,o){for(var u,a,c,s,f,l,h,p,d,v,_,y=[],g=n.children,m=0,x=0,b=g.length,w=n.value;mh&&(h=a),_=f*f*v,(p=Math.max(h/_,_/l))>d){f-=a;break}d=p}y.push(u={value:f,dice:c1&&u_(t[e[r-2]],t[e[r-1]],t[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function Bo(t){this._size=t,this._call=this._error=null,this._tasks=[],this._data=[],this._waiting=this._active=this._ended=this._start=0}function jo(t){if(!t._start)try{Ho(t)}catch(n){if(t._tasks[t._ended+t._active-1])$o(t,n);else if(!t._data)throw n}}function Ho(t){for(;t._start=t._waiting&&t._active=0;)if((e=t._tasks[r])&&(t._tasks[r]=null,e.abort))try{e.abort()}catch(n){}t._active=NaN,Vo(t)}function Vo(t){if(!t._active&&t._call){var n=t._data;t._data=void 0,t._call(t._error,n)}}function Wo(t){if(null==t)t=1/0;else if(!((t=+t)>=1))throw new Error("invalid concurrency");return new Bo(t)}function Zo(t){return function(n,e){t(null==n?e:null)}}function Go(t){var n=t.responseType;return n&&"text"!==n?t.response:t.responseText}function Jo(t,n){return function(e){return t(e.responseText,n)}}function Qo(t){function n(n){var o=n+"",u=e.get(o);if(!u){if(i!==E_)return i;e.set(o,u=r.push(n))}return t[(u-1)%t.length]}var e=we(),r=[],i=E_;return t=null==t?[]:S_.call(t),n.domain=function(t){if(!arguments.length)return r.slice();r=[],e=we();for(var i,o,u=-1,a=t.length;++u=e?1:r(t)}}}function ru(t){return function(n,e){var r=t(n=+n,e=+e);return function(t){return t<=0?n:t>=1?e:r(t)}}}function iu(t,n,e,r){var i=t[0],o=t[1],u=n[0],a=n[1];return o2?ou:iu,o=u=null,r}function r(n){return(o||(o=i(a,c,f?eu(t):t,s)))(+n)}var i,o,u,a=z_,c=z_,s=cl,f=!1;return r.invert=function(t){return(u||(u=i(c,a,nu,f?ru(n):n)))(+t)},r.domain=function(t){return arguments.length?(a=N_.call(t,C_),e()):a.slice()},r.range=function(t){return arguments.length?(c=S_.call(t),e()):c.slice()},r.rangeRound=function(t){return c=S_.call(t),s=sl,e()},r.clamp=function(t){return arguments.length?(f=!!t,e()):f},r.interpolate=function(t){return arguments.length?(s=t,e()):s},e()}function cu(t){var n=t.domain;return t.ticks=function(t){var e=n();return Ss(e[0],e[e.length-1],null==t?10:t)},t.tickFormat=function(t,e){return P_(n(),t,e)},t.nice=function(e){null==e&&(e=10);var i,o=n(),u=0,a=o.length-1,c=o[u],s=o[a];return s0?i=r(c=Math.floor(c/i)*i,s=Math.ceil(s/i)*i,e):i<0&&(i=r(c=Math.ceil(c*i)/i,s=Math.floor(s*i)/i,e)),i>0?(o[u]=Math.floor(c/i)*i,o[a]=Math.ceil(s/i)*i,n(o)):i<0&&(o[u]=Math.ceil(c*i)/i,o[a]=Math.floor(s*i)/i,n(o)),t},t}function su(){var t=au(nu,rl);return t.copy=function(){return uu(t,su())},cu(t)}function fu(){function t(t){return+t}var n=[0,1];return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=N_.call(e,C_),t):n.slice()},t.copy=function(){return fu().domain(n)},cu(t)}function lu(t,n){return(n=Math.log(n/t))?function(e){return Math.log(e/t)/n}:A_(n)}function hu(t,n){return t<0?function(e){return-Math.pow(-n,e)*Math.pow(-t,1-e)}:function(e){return Math.pow(n,e)*Math.pow(t,1-e)}}function pu(t){return isFinite(t)?+("1e"+t):t<0?0:t}function du(t){return 10===t?pu:t===Math.E?Math.exp:function(n){return Math.pow(t,n)}}function vu(t){return t===Math.E?Math.log:10===t&&Math.log10||2===t&&Math.log2||(t=Math.log(t),function(n){return Math.log(n)/t})}function _u(t){return function(n){return-t(-n)}}function yu(){function n(){return o=vu(i),u=du(i),r()[0]<0&&(o=_u(o),u=_u(u)),e}var e=au(lu,hu).domain([1,10]),r=e.domain,i=10,o=vu(10),u=du(10);return e.base=function(t){return arguments.length?(i=+t,n()):i},e.domain=function(t){return arguments.length?(r(t),n()):r()},e.ticks=function(t){var n,e=r(),a=e[0],c=e[e.length-1];(n=c0){for(;hc)break;v.push(l)}}else for(;h=1;--f)if(!((l=s*f)c)break;v.push(l)}}else v=Ss(h,p,Math.min(p-h,d)).map(u);return n?v.reverse():v},e.tickFormat=function(n,r){if(null==r&&(r=10===i?".0e":","),"function"!=typeof r&&(r=t.format(r)),n===1/0)return r;null==n&&(n=10);var a=Math.max(1,i*n/e.ticks().length);return function(t){var n=t/u(Math.round(o(t)));return n*i0?i[n-1]:e[0],n=i?[o[i-1],r]:[o[n-1],o[n]]},t.copy=function(){return bu().domain([e,r]).range(u)},cu(t)}function wu(){function t(t){if(t<=t)return e[hs(n,t,0,r)]}var n=[.5],e=[0,1],r=1;return t.domain=function(i){return arguments.length?(n=S_.call(i),r=Math.min(n.length,e.length-1),t):n.slice()},t.range=function(i){return arguments.length?(e=S_.call(i),r=Math.min(n.length,e.length-1),t):e.slice()},t.invertExtent=function(t){var r=e.indexOf(t);return[n[r-1],n[r]]},t.copy=function(){return wu().domain(n).range(e)},t}function Mu(t,n,e,r){function i(n){return t(n=new Date(+n)),n}return i.floor=i,i.ceil=function(e){return t(e=new Date(e-1)),n(e,1),t(e),e},i.round=function(t){var n=i(t),e=i.ceil(t);return t-n0))return u;do{u.push(new Date(+e))}while(n(e,o),t(e),e=n)for(;t(n),!e(n);)n.setTime(n-1)},function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;n(t,-1),!e(t););else for(;--r>=0;)for(;n(t,1),!e(t););})},e&&(i.count=function(n,r){return L_.setTime(+n),q_.setTime(+r),t(L_),t(q_),Math.floor(e(L_,q_))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(n){return r(n)%t==0}:function(n){return i.count(0,n)%t==0}):i:null}),i}function Tu(t){return Mu(function(n){n.setDate(n.getDate()-(n.getDay()+7-t)%7),n.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+7*n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*O_)/F_})}function ku(t){return Mu(function(n){n.setUTCDate(n.getUTCDate()-(n.getUTCDay()+7-t)%7),n.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+7*n)},function(t,n){return(n-t)/F_})}function Nu(t){if(0<=t.y&&t.y<100){var n=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return n.setFullYear(t.y),n}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function Su(t){if(0<=t.y&&t.y<100){var n=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return n.setUTCFullYear(t.y),n}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function Eu(t){return{y:t,m:0,d:1,H:0,M:0,S:0,L:0}}function Au(t){function n(t,n){return function(e){var r,i,o,u=[],a=-1,c=0,s=t.length;for(e instanceof Date||(e=new Date(+e));++a=c)return-1;if(37===(i=n.charCodeAt(u++))){if(i=n.charAt(u++),!(o=T[i in Dy?n.charAt(u++):i])||(r=o(t,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}var i=t.dateTime,o=t.date,u=t.time,a=t.periods,c=t.days,s=t.shortDays,f=t.months,l=t.shortMonths,h=Pu(a),p=Ru(a),d=Pu(c),v=Ru(c),_=Pu(s),y=Ru(s),g=Pu(f),m=Ru(f),x=Pu(l),b=Ru(l),w={a:function(t){return s[t.getDay()]},A:function(t){return c[t.getDay()]},b:function(t){return l[t.getMonth()]},B:function(t){return f[t.getMonth()]},c:null,d:Wu,e:Wu,H:Zu,I:Gu,j:Ju,L:Qu,m:Ku,M:ta,p:function(t){return a[+(t.getHours()>=12)]},S:na,U:ea,w:ra,W:ia,x:null,X:null,y:oa,Y:ua,Z:aa,"%":wa},M={a:function(t){return s[t.getUTCDay()]},A:function(t){return c[t.getUTCDay()]},b:function(t){return l[t.getUTCMonth()]},B:function(t){return f[t.getUTCMonth()]},c:null,d:ca,e:ca,H:sa,I:fa,j:la,L:ha,m:pa,M:da,p:function(t){return a[+(t.getUTCHours()>=12)]},S:va,U:_a,w:ya,W:ga,x:null,X:null,y:ma,Y:xa,Z:ba,"%":wa},T={a:function(t,n,e){var r=_.exec(n.slice(e));return r?(t.w=y[r[0].toLowerCase()],e+r[0].length):-1},A:function(t,n,e){var r=d.exec(n.slice(e));return r?(t.w=v[r[0].toLowerCase()],e+r[0].length):-1},b:function(t,n,e){var r=x.exec(n.slice(e));return r?(t.m=b[r[0].toLowerCase()],e+r[0].length):-1},B:function(t,n,e){var r=g.exec(n.slice(e));return r?(t.m=m[r[0].toLowerCase()],e+r[0].length):-1},c:function(t,n,e){return r(t,i,n,e)},d:Yu,e:Yu,H:ju,I:ju,j:Bu,L:$u,m:Iu,M:Hu,p:function(t,n,e){var r=h.exec(n.slice(e));return r?(t.p=p[r[0].toLowerCase()],e+r[0].length):-1},S:Xu,U:qu,w:Lu,W:Uu,x:function(t,n,e){return r(t,o,n,e)},X:function(t,n,e){return r(t,u,n,e)},y:Ou,Y:Du,Z:Fu,"%":Vu};return w.x=n(o,w),w.X=n(u,w),w.c=n(i,w),M.x=n(o,M),M.X=n(u,M),M.c=n(i,M),{format:function(t){var e=n(t+="",w);return e.toString=function(){return t},e},parse:function(t){var n=e(t+="",Nu);return n.toString=function(){return t},n},utcFormat:function(t){var e=n(t+="",M);return e.toString=function(){return t},e},utcParse:function(t){var n=e(t,Su);return n.toString=function(){return t},n}}}function Cu(t,n,e){var r=t<0?"-":"",i=(r?-t:t)+"",o=i.length;return r+(o68?1900:2e3),e+r[0].length):-1}function Fu(t,n,e){var r=/^(Z)|([+-]\d\d)(?:\:?(\d\d))?/.exec(n.slice(e,e+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),e+r[0].length):-1}function Iu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.m=r[0]-1,e+r[0].length):-1}function Yu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.d=+r[0],e+r[0].length):-1}function Bu(t,n,e){var r=Oy.exec(n.slice(e,e+3));return r?(t.m=0,t.d=+r[0],e+r[0].length):-1}function ju(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.H=+r[0],e+r[0].length):-1}function Hu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.M=+r[0],e+r[0].length):-1}function Xu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.S=+r[0],e+r[0].length):-1}function $u(t,n,e){var r=Oy.exec(n.slice(e,e+3));return r?(t.L=+r[0],e+r[0].length):-1}function Vu(t,n,e){var r=Fy.exec(n.slice(e,e+1));return r?e+r[0].length:-1}function Wu(t,n){return Cu(t.getDate(),n,2)}function Zu(t,n){return Cu(t.getHours(),n,2)}function Gu(t,n){return Cu(t.getHours()%12||12,n,2)}function Ju(t,n){return Cu(1+$_.count(fy(t),t),n,3)}function Qu(t,n){return Cu(t.getMilliseconds(),n,3)}function Ku(t,n){return Cu(t.getMonth()+1,n,2)}function ta(t,n){return Cu(t.getMinutes(),n,2)}function na(t,n){return Cu(t.getSeconds(),n,2)}function ea(t,n){return Cu(W_.count(fy(t),t),n,2)}function ra(t){return t.getDay()}function ia(t,n){return Cu(Z_.count(fy(t),t),n,2)}function oa(t,n){return Cu(t.getFullYear()%100,n,2)}function ua(t,n){return Cu(t.getFullYear()%1e4,n,4)}function aa(t){var n=t.getTimezoneOffset();return(n>0?"-":(n*=-1,"+"))+Cu(n/60|0,"0",2)+Cu(n%60,"0",2)}function ca(t,n){return Cu(t.getUTCDate(),n,2)}function sa(t,n){return Cu(t.getUTCHours(),n,2)}function fa(t,n){return Cu(t.getUTCHours()%12||12,n,2)}function la(t,n){return Cu(1+_y.count(Ly(t),t),n,3)}function ha(t,n){return Cu(t.getUTCMilliseconds(),n,3)}function pa(t,n){return Cu(t.getUTCMonth()+1,n,2)}function da(t,n){return Cu(t.getUTCMinutes(),n,2)}function va(t,n){return Cu(t.getUTCSeconds(),n,2)}function _a(t,n){return Cu(gy.count(Ly(t),t),n,2)}function ya(t){return t.getUTCDay()}function ga(t,n){return Cu(my.count(Ly(t),t),n,2)}function ma(t,n){return Cu(t.getUTCFullYear()%100,n,2)}function xa(t,n){return Cu(t.getUTCFullYear()%1e4,n,4)}function ba(){return"+0000"}function wa(){return"%"}function Ma(n){return qy=Au(n),t.timeFormat=qy.format,t.timeParse=qy.parse,t.utcFormat=qy.utcFormat,t.utcParse=qy.utcParse,qy}function Ta(t){return new Date(t)}function ka(t){return t instanceof Date?+t:+new Date(+t)}function Na(t,n,e,r,o,u,a,c,s){function f(i){return(a(i)1?0:t<-1?gg:Math.acos(t)}function Ca(t){return t>=1?mg:t<=-1?-mg:Math.asin(t)}function za(t){return t.innerRadius}function Pa(t){return t.outerRadius}function Ra(t){return t.startAngle}function La(t){return t.endAngle}function qa(t){return t&&t.padAngle}function Ua(t,n,e,r,i,o,u,a){var c=e-t,s=r-n,f=u-i,l=a-o,h=(f*(n-o)-l*(t-i))/(l*c-f*s);return[t+h*c,n+h*s]}function Da(t,n,e,r,i,o,u){var a=t-e,c=n-r,s=(u?o:-o)/_g(a*a+c*c),f=s*c,l=-s*a,h=t+f,p=n+l,d=e+f,v=r+l,_=(h+d)/2,y=(p+v)/2,g=d-h,m=v-p,x=g*g+m*m,b=i-o,w=h*v-d*p,M=(m<0?-1:1)*_g(pg(0,b*b*x-w*w)),T=(w*m-g*M)/x,k=(-w*g-m*M)/x,N=(w*m+g*M)/x,S=(-w*g+m*M)/x,E=T-_,A=k-y,C=N-_,z=S-y;return E*E+A*A>C*C+z*z&&(T=N,k=S),{cx:T,cy:k,x01:-f,y01:-l,x11:T*(i/b-1),y11:k*(i/b-1)}}function Oa(t){this._context=t}function Fa(t){return t[0]}function Ia(t){return t[1]}function Ya(t){this._curve=t}function Ba(t){function n(n){return new Ya(t(n))}return n._curve=t,n}function ja(t){var n=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(t){return arguments.length?n(Ba(t)):n()._curve},t}function Ha(t){return t.source}function Xa(t){return t.target}function $a(t){function n(){var n,a=Cg.call(arguments),c=e.apply(this,a),s=r.apply(this,a);if(u||(u=n=ve()),t(u,+i.apply(this,(a[0]=c,a)),+o.apply(this,a),+i.apply(this,(a[0]=s,a)),+o.apply(this,a)),n)return u=null,n+""||null}var e=Ha,r=Xa,i=Fa,o=Ia,u=null;return n.source=function(t){return arguments.length?(e=t,n):e},n.target=function(t){return arguments.length?(r=t,n):r},n.x=function(t){return arguments.length?(i="function"==typeof t?t:sg(+t),n):i},n.y=function(t){return arguments.length?(o="function"==typeof t?t:sg(+t),n):o},n.context=function(t){return arguments.length?(u=null==t?null:t,n):u},n}function Va(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n=(n+r)/2,e,n,i,r,i)}function Wa(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n,e=(e+i)/2,r,e,r,i)}function Za(t,n,e,r,i){var o=Ag(n,e),u=Ag(n,e=(e+i)/2),a=Ag(r,e),c=Ag(r,i);t.moveTo(o[0],o[1]),t.bezierCurveTo(u[0],u[1],a[0],a[1],c[0],c[1])}function Ga(t,n,e){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+n)/6,(t._y0+4*t._y1+e)/6)}function Ja(t){this._context=t}function Qa(t){this._context=t}function Ka(t){this._context=t}function tc(t,n){this._basis=new Ja(t),this._beta=n}function nc(t,n,e){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-n),t._y2+t._k*(t._y1-e),t._x2,t._y2)}function ec(t,n){this._context=t,this._k=(1-n)/6}function rc(t,n){this._context=t,this._k=(1-n)/6}function ic(t,n){this._context=t,this._k=(1-n)/6}function oc(t,n,e){var r=t._x1,i=t._y1,o=t._x2,u=t._y2;if(t._l01_a>yg){var a=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,c=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*a-t._x0*t._l12_2a+t._x2*t._l01_2a)/c,i=(i*a-t._y0*t._l12_2a+t._y2*t._l01_2a)/c}if(t._l23_a>yg){var s=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,f=3*t._l23_a*(t._l23_a+t._l12_a);o=(o*s+t._x1*t._l23_2a-n*t._l12_2a)/f,u=(u*s+t._y1*t._l23_2a-e*t._l12_2a)/f}t._context.bezierCurveTo(r,i,o,u,t._x2,t._y2)}function uc(t,n){this._context=t,this._alpha=n}function ac(t,n){this._context=t,this._alpha=n}function cc(t,n){this._context=t,this._alpha=n}function sc(t){this._context=t}function fc(t){return t<0?-1:1}function lc(t,n,e){var r=t._x1-t._x0,i=n-t._x1,o=(t._y1-t._y0)/(r||i<0&&-0),u=(e-t._y1)/(i||r<0&&-0),a=(o*i+u*r)/(r+i);return(fc(o)+fc(u))*Math.min(Math.abs(o),Math.abs(u),.5*Math.abs(a))||0}function hc(t,n){var e=t._x1-t._x0;return e?(3*(t._y1-t._y0)/e-n)/2:n}function pc(t,n,e){var r=t._x0,i=t._y0,o=t._x1,u=t._y1,a=(o-r)/3;t._context.bezierCurveTo(r+a,i+a*n,o-a,u-a*e,o,u)}function dc(t){this._context=t}function vc(t){this._context=new _c(t)}function _c(t){this._context=t}function yc(t){this._context=t}function gc(t){var n,e,r=t.length-1,i=new Array(r),o=new Array(r),u=new Array(r);for(i[0]=0,o[0]=2,u[0]=t[0]+2*t[1],n=1;n=0;--n)i[n]=(u[n]-i[n+1])/o[n];for(o[r-1]=(t[r]+i[r-1])/2,n=0;n0)){if(o/=h,h<0){if(o0){if(o>l)return;o>f&&(f=o)}if(o=r-c,h||!(o<0)){if(o/=h,h<0){if(o>l)return;o>f&&(f=o)}else if(h>0){if(o0)){if(o/=p,p<0){if(o0){if(o>l)return;o>f&&(f=o)}if(o=i-s,p||!(o<0)){if(o/=p,p<0){if(o>l)return;o>f&&(f=o)}else if(p>0){if(o0||l<1)||(f>0&&(t[0]=[c+f*h,s+f*p]),l<1&&(t[1]=[c+l*h,s+l*p]),!0)}}}}}function Rc(t,n,e,r,i){var o=t[1];if(o)return!0;var u,a,c=t[0],s=t.left,f=t.right,l=s[0],h=s[1],p=f[0],d=f[1],v=(l+p)/2,_=(h+d)/2;if(d===h){if(v=r)return;if(l>p){if(c){if(c[1]>=i)return}else c=[v,e];o=[v,i]}else{if(c){if(c[1]1)if(l>p){if(c){if(c[1]>=i)return}else c=[(e-a)/u,e];o=[(i-a)/u,i]}else{if(c){if(c[1]=r)return}else c=[n,u*n+a];o=[r,u*r+a]}else{if(c){if(c[0]dm||Math.abs(i[0][1]-i[1][1])>dm)||delete lm[o]}function qc(t){return sm[t.index]={site:t,halfedges:[]}}function Uc(t,n){var e=t.site,r=n.left,i=n.right;return e===i&&(i=r,r=e),i?Math.atan2(i[1]-r[1],i[0]-r[0]):(e===r?(r=n[1],i=n[0]):(r=n[0],i=n[1]),Math.atan2(r[0]-i[0],i[1]-r[1]))}function Dc(t,n){return n[+(n.left!==t.site)]}function Oc(t,n){return n[+(n.left===t.site)]}function Fc(){for(var t,n,e,r,i=0,o=sm.length;idm||Math.abs(v-h)>dm)&&(c.splice(a,0,lm.push(Cc(u,p,Math.abs(d-t)dm?[t,Math.abs(l-t)dm?[Math.abs(h-r)dm?[e,Math.abs(l-e)dm?[Math.abs(h-n)=-vm)){var p=c*c+s*s,d=f*f+l*l,v=(l*p-s*d)/h,_=(c*d-f*p)/h,y=hm.pop()||new Yc;y.arc=t,y.site=i,y.x=v+u,y.y=(y.cy=_+a)+Math.sqrt(v*v+_*_),t.circle=y;for(var g=null,m=fm._;m;)if(y.ydm)a=a.L;else{if(!((i=o-Gc(a,u))>dm)){r>-dm?(n=a.P,e=a):i>-dm?(n=a,e=a.N):n=e=a;break}if(!a.R){n=a;break}a=a.R}qc(t);var c=Xc(t);if(cm.insert(n,c),n||e){if(n===e)return jc(n),e=Xc(n.site),cm.insert(c,e),c.edge=e.edge=Ac(n.site,c.site),Bc(n),void Bc(e);if(e){jc(n),jc(e);var s=n.site,f=s[0],l=s[1],h=t[0]-f,p=t[1]-l,d=e.site,v=d[0]-f,_=d[1]-l,y=2*(h*_-p*v),g=h*h+p*p,m=v*v+_*_,x=[(_*g-p*m)/y+f,(h*m-v*g)/y+l];zc(e.edge,s,d,x),c.edge=Ac(s,t,null,x),e.edge=Ac(t,d,null,x),Bc(n),Bc(e)}else c.edge=Ac(n.site,c.site)}}function Zc(t,n){var e=t.site,r=e[0],i=e[1],o=i-n;if(!o)return r;var u=t.P;if(!u)return-1/0;var a=(e=u.site)[0],c=e[1],s=c-n;if(!s)return a;var f=a-r,l=1/o-1/s,h=f/s;return l?(-h+Math.sqrt(h*h-2*l*(f*f/(-2*s)-c+s/2+i-o/2)))/l+r:(r+a)/2}function Gc(t,n){var e=t.N;if(e)return Zc(e,n);var r=t.site;return r[1]===n?r[0]:1/0}function Jc(t,n,e){return(t[0]-e[0])*(n[1]-t[1])-(t[0]-n[0])*(e[1]-t[1])}function Qc(t,n){return n[1]-t[1]||n[0]-t[0]}function Kc(t,n){var e,r,i,o=t.sort(Qc).pop();for(lm=[],sm=new Array(t.length),cm=new Tc,fm=new Tc;;)if(i=am,o&&(!i||o[1]n?1:t>=n?0:NaN},fs=function(t){return 1===t.length&&(t=n(t)),{left:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)<0?r=o+1:i=o}return r},right:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)>0?i=o:r=o+1}return r}}},ls=fs(ss),hs=ls.right,ps=ls.left,ds=function(t){return null===t?NaN:+t},vs=function(t,n){var e,r,i=t.length,o=0,u=-1,a=0,c=0;if(null==n)for(;++u1)return c/(o-1)},_s=function(t,n){var e=vs(t,n);return e?Math.sqrt(e):e},ys=function(t,n){var e,r,i,o=t.length,u=-1;if(null==n){for(;++u=e)for(r=i=e;++ue&&(r=e),i=e)for(r=i=e;++ue&&(r=e),i0)for(t=Math.ceil(t/u),n=Math.floor(n/u),o=new Array(i=Math.ceil(n-t+1));++c=1)return+e(t[r-1],r-1,t);var r,i=(r-1)*n,o=Math.floor(i),u=+e(t[o],o,t);return u+(+e(t[o+1],o+1,t)-u)*(i-o)}},Cs=function(t){for(var n,e,r,i=t.length,o=-1,u=0;++o=0;)for(n=(r=t[i]).length;--n>=0;)e[--u]=r[n];return e},zs=function(t,n){var e,r,i=t.length,o=-1;if(null==n){for(;++o=e)for(r=e;++oe&&(r=e)}else for(;++o=e)for(r=e;++oe&&(r=e);return r},Ps=function(t){if(!(i=t.length))return[];for(var n=-1,e=zs(t,o),r=new Array(e);++n0)for(var e,r,i=new Array(e),o=0;o=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),Bs.hasOwnProperty(n)?{space:Bs[n],local:t}:t},Hs=function(t){var n=js(t);return(n.local?g:y)(n)},Xs=0;x.prototype=m.prototype={constructor:x,get:function(t){for(var n=this._;!(n in t);)if(!(t=t.parentNode))return;return t[n]},set:function(t,n){return t[this._]=n},remove:function(t){return this._ in t&&delete t[this._]},toString:function(){return this._}};var $s=function(t){return function(){return this.matches(t)}};if("undefined"!=typeof document){var Vs=document.documentElement;if(!Vs.matches){var Ws=Vs.webkitMatchesSelector||Vs.msMatchesSelector||Vs.mozMatchesSelector||Vs.oMatchesSelector;$s=function(t){return function(){return Ws.call(this,t)}}}}var Zs=$s,Gs={};t.event=null,"undefined"!=typeof document&&("onmouseenter"in document.documentElement||(Gs={mouseenter:"mouseover",mouseleave:"mouseout"}));var Js=function(){for(var n,e=t.event;n=e.sourceEvent;)e=n;return e},Qs=function(t,n){var e=t.ownerSVGElement||t;if(e.createSVGPoint){var r=e.createSVGPoint();return r.x=n.clientX,r.y=n.clientY,r=r.matrixTransform(t.getScreenCTM().inverse()),[r.x,r.y]}var i=t.getBoundingClientRect();return[n.clientX-i.left-t.clientLeft,n.clientY-i.top-t.clientTop]},Ks=function(t){var n=Js();return n.changedTouches&&(n=n.changedTouches[0]),Qs(t,n)},tf=function(t){return null==t?S:function(){return this.querySelector(t)}},nf=function(t){return null==t?E:function(){return this.querySelectorAll(t)}},ef=function(t){return new Array(t.length)};A.prototype={constructor:A,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,n){return this._parent.insertBefore(t,n)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var rf=function(t){return function(){return t}},of="$",uf=function(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView};W.prototype={add:function(t){this._names.indexOf(t)<0&&(this._names.push(t),this._node.setAttribute("class",this._names.join(" ")))},remove:function(t){var n=this._names.indexOf(t);n>=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var af=[null];pt.prototype=dt.prototype={constructor:pt,select:function(t){"function"!=typeof t&&(t=tf(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i=x&&(x=m+1);!(g=_[x])&&++x=0;)(r=i[o])&&(u&&u!==r.nextSibling&&u.parentNode.insertBefore(r,u),u=r);return this},sort:function(t){t||(t=P);for(var n=this._groups,e=n.length,r=new Array(e),i=0;i1?this.each((null==n?F:"function"==typeof n?Y:I)(t,n,null==e?"":e)):B(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?j:"function"==typeof n?X:H)(t,n)):this.node()[t]},classed:function(t,n){var e=$(t+"");if(arguments.length<2){for(var r=V(this.node()),i=-1,o=e.length;++i=240?t-240:t+120,i,r),Lt(t,i,r),Lt(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1}}));var Nf=Math.PI/180,Sf=180/Math.PI,Ef=.95047,Af=1,Cf=1.08883,zf=4/29,Pf=6/29,Rf=3*Pf*Pf,Lf=Pf*Pf*Pf;pf(Dt,Ut,wt(Mt,{brighter:function(t){return new Dt(this.l+18*(null==t?1:t),this.a,this.b,this.opacity)},darker:function(t){return new Dt(this.l-18*(null==t?1:t),this.a,this.b,this.opacity)},rgb:function(){var t=(this.l+16)/116,n=isNaN(this.a)?t:t+this.a/500,e=isNaN(this.b)?t:t-this.b/200;return t=Af*Ft(t),n=Ef*Ft(n),e=Cf*Ft(e),new At(It(3.2404542*n-1.5371385*t-.4985314*e),It(-.969266*n+1.8760108*t+.041556*e),It(.0556434*n-.2040259*t+1.0572252*e),this.opacity)}})),pf(Ht,jt,wt(Mt,{brighter:function(t){return new Ht(this.h,this.c,this.l+18*(null==t?1:t),this.opacity)},darker:function(t){return new Ht(this.h,this.c,this.l-18*(null==t?1:t),this.opacity)},rgb:function(){return qt(this).rgb()}}));var qf=-.14861,Uf=1.78277,Df=-.29227,Of=-.90649,Ff=1.97294,If=Ff*Of,Yf=Ff*Uf,Bf=Uf*Df-Of*qf;pf(Vt,$t,wt(Mt,{brighter:function(t){return t=null==t?1/.7:Math.pow(1/.7,t),new Vt(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?.7:Math.pow(.7,t),new Vt(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=isNaN(this.h)?0:(this.h+120)*Nf,n=+this.l,e=isNaN(this.s)?0:this.s*n*(1-n),r=Math.cos(t),i=Math.sin(t);return new At(255*(n+e*(qf*r+Uf*i)),255*(n+e*(Df*r+Of*i)),255*(n+e*(Ff*r)),this.opacity)}}));var jf,Hf,Xf,$f,Vf,Wf,Zf=function(t){var n=t.length-1;return function(e){var r=e<=0?e=0:e>=1?(e=1,n-1):Math.floor(e*n),i=t[r],o=t[r+1],u=r>0?t[r-1]:2*i-o,a=ro&&(i=n.slice(o,i),a[u]?a[u]+=i:a[++u]=i),(e=e[0])===(r=r[0])?a[u]?a[u]+=r:a[++u]=r:(a[++u]=null,c.push({i:u,x:rl(e,r)})),o=ul.lastIndex;return oDl&&e.state1e-6)if(Math.abs(f*a-c*s)>1e-6&&i){var h=e-o,p=r-u,d=a*a+c*c,v=h*h+p*p,_=Math.sqrt(d),y=Math.sqrt(l),g=i*Math.tan((Yh-Math.acos((d+l-v)/(2*_*y)))/2),m=g/y,x=g/_;Math.abs(m-1)>1e-6&&(this._+="L"+(t+m*s)+","+(n+m*f)),this._+="A"+i+","+i+",0,0,"+ +(f*h>s*p)+","+(this._x1=t+x*a)+","+(this._y1=n+x*c)}else this._+="L"+(this._x1=t)+","+(this._y1=n);else;},arc:function(t,n,e,r,i,o){t=+t,n=+n;var u=(e=+e)*Math.cos(r),a=e*Math.sin(r),c=t+u,s=n+a,f=1^o,l=o?r-i:i-r;if(e<0)throw new Error("negative radius: "+e);null===this._x1?this._+="M"+c+","+s:(Math.abs(this._x1-c)>1e-6||Math.abs(this._y1-s)>1e-6)&&(this._+="L"+c+","+s),e&&(l<0&&(l=l%Bh+Bh),l>jh?this._+="A"+e+","+e+",0,1,"+f+","+(t-u)+","+(n-a)+"A"+e+","+e+",0,1,"+f+","+(this._x1=c)+","+(this._y1=s):l>1e-6&&(this._+="A"+e+","+e+",0,"+ +(l>=Yh)+","+f+","+(this._x1=t+e*Math.cos(i))+","+(this._y1=n+e*Math.sin(i))))},rect:function(t,n,e,r){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+n)+"h"+ +e+"v"+ +r+"h"+-e+"Z"},toString:function(){return this._}};be.prototype=we.prototype={constructor:be,has:function(t){return"$"+t in this},get:function(t){return this["$"+t]},set:function(t,n){return this["$"+t]=n,this},remove:function(t){var n="$"+t;return n in this&&delete this[n]},clear:function(){for(var t in this)"$"===t[0]&&delete this[t]},keys:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(n.slice(1));return t},values:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(this[n]);return t},entries:function(){var t=[];for(var n in this)"$"===n[0]&&t.push({key:n.slice(1),value:this[n]});return t},size:function(){var t=0;for(var n in this)"$"===n[0]&&++t;return t},empty:function(){for(var t in this)if("$"===t[0])return!1;return!0},each:function(t){for(var n in this)"$"===n[0]&&t(this[n],n.slice(1),this)}};var Hh=we.prototype;Se.prototype=Ee.prototype={constructor:Se,has:Hh.has,add:function(t){return t+="",this["$"+t]=t,this},remove:Hh.remove,clear:Hh.clear,values:Hh.keys,size:Hh.size,empty:Hh.empty,each:Hh.each};var Xh={},$h={},Vh=34,Wh=10,Zh=13,Gh=function(t){function n(t,n){function e(){if(s)return $h;if(f)return f=!1,Xh;var n,e,r=a;if(t.charCodeAt(r)===Vh){for(;a++=u?s=!0:(e=t.charCodeAt(a++))===Wh?f=!0:e===Zh&&(f=!0,t.charCodeAt(a)===Wh&&++a),t.slice(r+1,n-1).replace(/""/g,'"')}for(;af&&(f=r),il&&(l=i));for(ft||t>i||r>n||n>o))return this;var u,a,c=i-e,s=this._root;switch(a=(n<(r+o)/2)<<1|t<(e+i)/2){case 0:do{u=new Array(4),u[a]=s,s=u}while(c*=2,i=e+c,o=r+c,t>i||n>o);break;case 1:do{u=new Array(4),u[a]=s,s=u}while(c*=2,e=i-c,o=r+c,e>t||n>o);break;case 2:do{u=new Array(4),u[a]=s,s=u}while(c*=2,i=e+c,r=o-c,t>i||r>n);break;case 3:do{u=new Array(4),u[a]=s,s=u}while(c*=2,e=i-c,r=o-c,e>t||r>n)}this._root&&this._root.length&&(this._root=s)}return this._x0=e,this._y0=r,this._x1=i,this._y1=o,this},fp.data=function(){var t=[];return this.visit(function(n){if(!n.length)do{t.push(n.data)}while(n=n.next)}),t},fp.extent=function(t){return arguments.length?this.cover(+t[0][0],+t[0][1]).cover(+t[1][0],+t[1][1]):isNaN(this._x0)?void 0:[[this._x0,this._y0],[this._x1,this._y1]]},fp.find=function(t,n,e){var r,i,o,u,a,c,s,f=this._x0,l=this._y0,h=this._x1,p=this._y1,d=[],v=this._root;for(v&&d.push(new sp(v,f,l,h,p)),null==e?e=1/0:(f=t-e,l=n-e,h=t+e,p=n+e,e*=e);c=d.pop();)if(!(!(v=c.node)||(i=c.x0)>h||(o=c.y0)>p||(u=c.x1)=y)<<1|t>=_)&&(c=d[d.length-1],d[d.length-1]=d[d.length-1-s],d[d.length-1-s]=c)}else{var g=t-+this._x.call(null,v.data),m=n-+this._y.call(null,v.data),x=g*g+m*m;if(x=(a=(d+_)/2))?d=a:_=a,(f=u>=(c=(v+y)/2))?v=c:y=c,n=p,!(p=p[l=f<<1|s]))return this;if(!p.length)break;(n[l+1&3]||n[l+2&3]||n[l+3&3])&&(e=n,h=l)}for(;p.data!==t;)if(r=p,!(p=p.next))return this;return(i=p.next)&&delete p.next,r?(i?r.next=i:delete r.next,this):n?(i?n[l]=i:delete n[l],(p=n[0]||n[1]||n[2]||n[3])&&p===(n[3]||n[2]||n[1]||n[0])&&!p.length&&(e?e[h]=p:this._root=p),this):(this._root=i,this)},fp.removeAll=function(t){for(var n=0,e=t.length;n1?r[0]+r.slice(2):r,+t.slice(e+1)]},vp=function(t){return(t=dp(Math.abs(t)))?t[1]:NaN},_p=function(t,n){return function(e,r){for(var i=e.length,o=[],u=0,a=t[0],c=0;i>0&&a>0&&(c+a+1>r&&(a=Math.max(1,r-c)),o.push(e.substring(i-=a,i+a)),!((c+=a+1)>r));)a=t[u=(u+1)%t.length];return o.reverse().join(n)}},yp=function(t){return function(n){return n.replace(/[0-9]/g,function(n){return t[+n]})}},gp=function(t,n){var e=dp(t,n);if(!e)return t+"";var r=e[0],i=e[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")},mp={"":function(t,n){t:for(var e,r=(t=t.toPrecision(n)).length,i=1,o=-1;i0&&(o=0)}return o>0?t.slice(0,o)+t.slice(e+1):t},"%":function(t,n){return(100*t).toFixed(n)},b:function(t){return Math.round(t).toString(2)},c:function(t){return t+""},d:function(t){return Math.round(t).toString(10)},e:function(t,n){return t.toExponential(n)},f:function(t,n){return t.toFixed(n)},g:function(t,n){return t.toPrecision(n)},o:function(t){return Math.round(t).toString(8)},p:function(t,n){return gp(100*t,n)},r:gp,s:function(t,n){var e=dp(t,n);if(!e)return t+"";var r=e[0],i=e[1],o=i-(lp=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,u=r.length;return o===u?r:o>u?r+new Array(o-u+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+dp(t,Math.max(0,n+o-1))[0]},X:function(t){return Math.round(t).toString(16).toUpperCase()},x:function(t){return Math.round(t).toString(16)}},xp=/^(?:(.)?([<>=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?([a-z%])?$/i;He.prototype=Xe.prototype,Xe.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(null==this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(null==this.precision?"":"."+Math.max(0,0|this.precision))+this.type};var bp,wp=function(t){return t},Mp=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"],Tp=function(t){function n(t){function n(t){var n,r,u,f=_,x=y;if("c"===v)x=g(t)+x,t="";else{var b=(t=+t)<0;if(t=g(Math.abs(t),d),b&&0==+t&&(b=!1),f=(b?"("===s?s:"-":"-"===s||"("===s?"":s)+f,x=x+("s"===v?Mp[8+lp/3]:"")+(b&&"("===s?")":""),m)for(n=-1,r=t.length;++n(u=t.charCodeAt(n))||u>57){x=(46===u?i+t.slice(n+1):t.slice(n))+x,t=t.slice(0,n);break}}p&&!l&&(t=e(t,1/0));var w=f.length+t.length+x.length,M=w>1)+f+t+x+M.slice(w);break;default:t=M+f+t+x}return o(t)}var a=(t=He(t)).fill,c=t.align,s=t.sign,f=t.symbol,l=t.zero,h=t.width,p=t.comma,d=t.precision,v=t.type,_="$"===f?r[0]:"#"===f&&/[boxX]/.test(v)?"0"+v.toLowerCase():"",y="$"===f?r[1]:/[%p]/.test(v)?u:"",g=mp[v],m=!v||/[defgprs%]/.test(v);return d=null==d?v?6:12:/[gprs]/.test(v)?Math.max(1,Math.min(21,d)):Math.max(0,Math.min(20,d)),n.toString=function(){return t+""},n}var e=t.grouping&&t.thousands?_p(t.grouping,t.thousands):wp,r=t.currency,i=t.decimal,o=t.numerals?yp(t.numerals):wp,u=t.percent||"%";return{format:n,formatPrefix:function(t,e){var r=n((t=He(t),t.type="f",t)),i=3*Math.max(-8,Math.min(8,Math.floor(vp(e)/3))),o=Math.pow(10,-i),u=Mp[8+i/3];return function(t){return r(o*t)+u}}}};$e({decimal:".",thousands:",",grouping:[3],currency:["$",""]});var kp=function(t){return Math.max(0,-vp(Math.abs(t)))},Np=function(t,n){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(vp(n)/3)))-vp(Math.abs(t)))},Sp=function(t,n){return t=Math.abs(t),n=Math.abs(n)-t,Math.max(0,vp(n)-vp(t))+1},Ep=function(){return new Ve};Ve.prototype={constructor:Ve,reset:function(){this.s=this.t=0},add:function(t){We(ud,t,this.t),We(this,ud.s,this.s),this.s?this.t+=ud.t:this.s=ud.t},valueOf:function(){return this.s}};var Ap,Cp,zp,Pp,Rp,Lp,qp,Up,Dp,Op,Fp,Ip,Yp,Bp,jp,Hp,Xp,$p,Vp,Wp,Zp,Gp,Jp,Qp,Kp,td,nd,ed,rd,id,od,ud=new Ve,ad=1e-6,cd=Math.PI,sd=cd/2,fd=cd/4,ld=2*cd,hd=180/cd,pd=cd/180,dd=Math.abs,vd=Math.atan,_d=Math.atan2,yd=Math.cos,gd=Math.ceil,md=Math.exp,xd=Math.log,bd=Math.pow,wd=Math.sin,Md=Math.sign||function(t){return t>0?1:t<0?-1:0},Td=Math.sqrt,kd=Math.tan,Nd={Feature:function(t,n){Ke(t.geometry,n)},FeatureCollection:function(t,n){for(var e=t.features,r=-1,i=e.length;++rad?Dp=90:Pd<-ad&&(qp=-90),jp[0]=Lp,jp[1]=Up}},Ld={sphere:Qe,point:Mr,lineStart:kr,lineEnd:Er,polygonStart:function(){Ld.lineStart=Ar,Ld.lineEnd=Cr},polygonEnd:function(){Ld.lineStart=kr,Ld.lineEnd=Er}},qd=function(t){return function(){return t}},Ud=function(t,n){function e(e,r){return e=t(e,r),n(e[0],e[1])}return t.invert&&n.invert&&(e.invert=function(e,r){return(e=n.invert(e,r))&&t.invert(e[0],e[1])}),e};Rr.invert=Rr;var Dd,Od,Fd,Id,Yd,Bd,jd,Hd,Xd,$d,Vd,Wd=function(t){function n(n){return n=t(n[0]*pd,n[1]*pd),n[0]*=hd,n[1]*=hd,n}return t=Lr(t[0]*pd,t[1]*pd,t.length>2?t[2]*pd:0),n.invert=function(n){return n=t.invert(n[0]*pd,n[1]*pd),n[0]*=hd,n[1]*=hd,n},n},Zd=function(){var t,n=[];return{point:function(n,e){t.push([n,e])},lineStart:function(){n.push(t=[])},lineEnd:Qe,rejoin:function(){n.length>1&&n.push(n.pop().concat(n.shift()))},result:function(){var e=n;return n=[],t=null,e}}},Gd=function(t,n,e,r,i,o){var u,a=t[0],c=t[1],s=0,f=1,l=n[0]-a,h=n[1]-c;if(u=e-a,l||!(u>0)){if(u/=l,l<0){if(u0){if(u>f)return;u>s&&(s=u)}if(u=i-a,l||!(u<0)){if(u/=l,l<0){if(u>f)return;u>s&&(s=u)}else if(l>0){if(u0)){if(u/=h,h<0){if(u0){if(u>f)return;u>s&&(s=u)}if(u=o-c,h||!(u<0)){if(u/=h,h<0){if(u>f)return;u>s&&(s=u)}else if(h>0){if(u0&&(t[0]=a+s*l,t[1]=c+s*h),f<1&&(n[0]=a+f*l,n[1]=c+f*h),!0}}}}},Jd=function(t,n){return dd(t[0]-n[0])=0;--o)i.point((f=s[o])[0],f[1]);else r(h.x,h.p.x,-1,i);h=h.p}s=(h=h.o).z,p=!p}while(!h.v);i.lineEnd()}}},Kd=1e9,tv=-Kd,nv=Ep(),ev=function(t,n){var e=n[0],r=n[1],i=[wd(e),-yd(e),0],o=0,u=0;nv.reset();for(var a=0,c=t.length;a=0?1:-1,T=M*w,k=T>cd,N=d*x;if(nv.add(_d(N*M*wd(T),v*b+N*yd(T))),o+=k?w+M*ld:w,k^h>=e^g>=e){var S=sr(ar(l),ar(y));hr(S);var E=sr(i,S);hr(E);var A=(k^w>=0?-1:1)*Ge(E[2]);(r>A||r===A&&(S[0]||S[1]))&&(u+=k^w>=0?1:-1)}}return(o<-ad||oyv&&(yv=t),n<_v&&(_v=n),n>gv&&(gv=n)},lineStart:Qe,lineEnd:Qe,polygonStart:Qe,polygonEnd:Qe,result:function(){var t=[[vv,_v],[yv,gv]];return yv=gv=-(_v=vv=1/0),t}},xv=0,bv=0,wv=0,Mv=0,Tv=0,kv=0,Nv=0,Sv=0,Ev=0,Av={point:oi,lineStart:ui,lineEnd:si,polygonStart:function(){Av.lineStart=fi,Av.lineEnd=li},polygonEnd:function(){Av.point=oi,Av.lineStart=ui,Av.lineEnd=si},result:function(){var t=Ev?[Nv/Ev,Sv/Ev]:kv?[Mv/kv,Tv/kv]:wv?[xv/wv,bv/wv]:[NaN,NaN];return xv=bv=wv=Mv=Tv=kv=Nv=Sv=Ev=0,t}};di.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._context.moveTo(t,n),this._point=1;break;case 1:this._context.lineTo(t,n);break;default:this._context.moveTo(t+this._radius,n),this._context.arc(t,n,this._radius,0,ld)}},result:Qe};var Cv,zv,Pv,Rv,Lv,qv=Ep(),Uv={point:Qe,lineStart:function(){Uv.point=vi},lineEnd:function(){Cv&&_i(zv,Pv),Uv.point=Qe},polygonStart:function(){Cv=!0},polygonEnd:function(){Cv=null},result:function(){var t=+qv;return qv.reset(),t}};yi.prototype={_radius:4.5,_circle:gi(4.5),pointRadius:function(t){return(t=+t)!==this._radius&&(this._radius=t,this._circle=null),this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._string.push("Z"),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._string.push("M",t,",",n),this._point=1;break;case 1:this._string.push("L",t,",",n);break;default:null==this._circle&&(this._circle=gi(this._radius)),this._string.push("M",t,",",n,this._circle)}},result:function(){if(this._string.length){var t=this._string.join("");return this._string=[],t}return null}};var Dv=function(t,n,e,r){return function(i,o){function u(n,e){var r=i(n,e);t(n=r[0],e=r[1])&&o.point(n,e)}function a(t,n){var e=i(t,n);_.point(e[0],e[1])}function c(){b.point=a,_.lineStart()}function s(){b.point=u,_.lineEnd()}function f(t,n){v.push([t,n]);var e=i(t,n);m.point(e[0],e[1])}function l(){m.lineStart(),v=[]}function h(){f(v[0][0],v[0][1]),m.lineEnd();var t,n,e,r,i=m.clean(),u=g.result(),a=u.length;if(v.pop(),p.push(v),v=null,a)if(1&i){if(e=u[0],(n=e.length-1)>0){for(x||(o.polygonStart(),x=!0),o.lineStart(),t=0;t1&&2&i&&u.push(u.pop().concat(u.shift())),d.push(u.filter(mi))}var p,d,v,_=n(o),y=i.invert(r[0],r[1]),g=Zd(),m=n(g),x=!1,b={point:u,lineStart:c,lineEnd:s,polygonStart:function(){b.point=f,b.lineStart=l,b.lineEnd=h,d=[],p=[]},polygonEnd:function(){b.point=u,b.lineStart=c,b.lineEnd=s,d=Cs(d);var t=ev(p,y);d.length?(x||(o.polygonStart(),x=!0),Qd(d,xi,t,e,o)):t&&(x||(o.polygonStart(),x=!0),o.lineStart(),e(null,null,1,o),o.lineEnd()),x&&(o.polygonEnd(),x=!1),d=p=null},sphere:function(){o.polygonStart(),o.lineStart(),e(null,null,1,o),o.lineEnd(),o.polygonEnd()}};return b}},Ov=Dv(function(){return!0},function(t){var n,e=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),n=1},point:function(o,u){var a=o>0?cd:-cd,c=dd(o-e);dd(c-cd)0?sd:-sd),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(a,r),t.point(o,r),n=0):i!==a&&c>=cd&&(dd(e-i)ad){var o=t[0]o}function r(t,n,e){var r=[1,0,0],i=sr(ar(t),ar(n)),u=cr(i,i),a=i[0],c=u-a*a;if(!c)return!e&&t;var s=o*u/c,f=-o*a/c,l=sr(r,i),h=lr(r,s);fr(h,lr(i,f));var p=l,d=cr(h,p),v=cr(p,p),_=d*d-v*(cr(h,h)-1);if(!(_<0)){var y=Td(_),g=lr(p,(-d-y)/v);if(fr(g,h),g=ur(g),!e)return g;var m,x=t[0],b=n[0],w=t[1],M=n[1];b0^g[1]<(dd(g[0]-x)cd^(x<=g[0]&&g[0]<=b)){var S=lr(p,(-d+y)/v);return fr(S,h),[g,ur(S)]}}}function i(n,e){var r=u?t:cd-t,i=0;return n<-r?i|=1:n>r&&(i|=2),e<-r?i|=4:e>r&&(i|=8),i}var o=yd(t),u=o>0,a=dd(o)>ad;return Dv(e,function(t){var n,o,c,s,f;return{lineStart:function(){s=c=!1,f=1},point:function(l,h){var p,d=[l,h],v=e(l,h),_=u?v?0:i(l,h):v?i(l+(l<0?cd:-cd),h):0;if(!n&&(s=c=v)&&t.lineStart(),v!==c&&(!(p=r(n,d))||Jd(n,p)||Jd(d,p))&&(d[0]+=ad,d[1]+=ad,v=e(d[0],d[1])),v!==c)f=0,v?(t.lineStart(),p=r(d,n),t.point(p[0],p[1])):(p=r(n,d),t.point(p[0],p[1]),t.lineEnd()),n=p;else if(a&&n&&u^v){var y;_&o||!(y=r(d,n,!0))||(f=0,u?(t.lineStart(),t.point(y[0][0],y[0][1]),t.point(y[1][0],y[1][1]),t.lineEnd()):(t.point(y[1][0],y[1][1]),t.lineEnd(),t.lineStart(),t.point(y[0][0],y[0][1])))}!v||n&&Jd(n,d)||t.point(d[0],d[1]),n=d,c=v,o=_},lineEnd:function(){c&&t.lineEnd(),n=null},clean:function(){return f|(s&&c)<<1}}},function(e,r,i,o){Or(o,t,n,i,e,r)},u?[0,-t]:[-cd,t-cd])};Mi.prototype={constructor:Mi,point:function(t,n){this.stream.point(t,n)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};var Iv=16,Yv=yd(30*pd),Bv=function(t,n){return+n?Si(t,n):Ni(t)},jv=wi({point:function(t,n){this.stream.point(t*pd,n*pd)}}),Hv=function(){return Ci(Pi).scale(155.424).center([0,33.6442])},Xv=function(){return Hv().parallels([29.5,45.5]).scale(1070).translate([480,250]).rotate([96,0]).center([-.6,38.7])},$v=Li(function(t){return Td(2/(1+t))});$v.invert=qi(function(t){return 2*Ge(t/2)});var Vv=Li(function(t){return(t=Ze(t))&&t/wd(t)});Vv.invert=qi(function(t){return t});Ui.invert=function(t,n){return[t,2*vd(md(n))-sd]};Ii.invert=Ii;Bi.invert=qi(vd);Hi.invert=qi(Ge);Xi.invert=qi(function(t){return 2*vd(t)});$i.invert=function(t,n){return[-n,2*vd(md(t))-sd]};uo.prototype=eo.prototype={constructor:uo,count:function(){return this.eachAfter(to)},each:function(t){var n,e,r,i,o=this,u=[o];do{for(n=u.reverse(),u=[];o=n.pop();)if(t(o),e=o.children)for(r=0,i=e.length;r=0;--e)i.push(n[e]);return this},sum:function(t){return this.eachAfter(function(n){for(var e=+t(n.data)||0,r=n.children,i=r&&r.length;--i>=0;)e+=r[i].value;n.value=e})},sort:function(t){return this.eachBefore(function(n){n.children&&n.children.sort(t)})},path:function(t){for(var n=this,e=no(n,t),r=[n];n!==e;)n=n.parent,r.push(n);for(var i=r.length;t!==e;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,n=[t];t=t.parent;)n.push(t);return n},descendants:function(){var t=[];return this.each(function(n){t.push(n)}),t},leaves:function(){var t=[];return this.eachBefore(function(n){n.children||t.push(n)}),t},links:function(){var t=this,n=[];return t.each(function(e){e!==t&&n.push({source:e.parent,target:e})}),n},copy:function(){return eo(this).eachBefore(io)}};var Wv=Array.prototype.slice,Zv=function(t){for(var n,e,r=0,i=(t=ao(Wv.call(t))).length,o=[];r1?n:1)},e}(r_),o_=function t(n){function e(t,e,r,i,o){if((u=t._squarify)&&u.ratio===n)for(var u,a,c,s,f,l=-1,h=u.length,p=t.value;++l1?n:1)},e}(r_),u_=function(t,n,e){return(n[0]-t[0])*(e[1]-t[1])-(n[1]-t[1])*(e[0]-t[0])},a_=[].slice,c_={};Bo.prototype=Wo.prototype={constructor:Bo,defer:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("defer after await");if(null!=this._error)return this;var n=a_.call(arguments,1);return n.push(t),++this._waiting,this._tasks.push(n),jo(this),this},abort:function(){return null==this._error&&$o(this,new Error("abort")),this},await:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("multiple await");return this._call=function(n,e){t.apply(null,[n].concat(e))},Vo(this),this},awaitAll:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("multiple await");return this._call=t,Vo(this),this}};var s_=function(){return Math.random()},f_=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,1===arguments.length?(e=t,t=0):e-=t,function(){return n()*e+t}}return e.source=t,e}(s_),l_=function t(n){function e(t,e){var r,i;return t=null==t?0:+t,e=null==e?1:+e,function(){var o;if(null!=r)o=r,r=null;else do{r=2*n()-1,o=2*n()-1,i=r*r+o*o}while(!i||i>1);return t+e*o*Math.sqrt(-2*Math.log(i)/i)}}return e.source=t,e}(s_),h_=function t(n){function e(){var t=l_.source(n).apply(this,arguments);return function(){return Math.exp(t())}}return e.source=t,e}(s_),p_=function t(n){function e(t){return function(){for(var e=0,r=0;r=200&&e<300||304===e){if(o)try{n=o.call(r,s)}catch(t){return void a.call("error",r,t)}else n=s;a.call("load",r,n)}else a.call("error",r,t)}var r,i,o,u,a=h("beforesend","progress","load","error"),c=we(),s=new XMLHttpRequest,f=null,l=null,p=0;if("undefined"==typeof XDomainRequest||"withCredentials"in s||!/^(http(s)?:)?\/\//.test(t)||(s=new XDomainRequest),"onload"in s?s.onload=s.onerror=s.ontimeout=e:s.onreadystatechange=function(t){s.readyState>3&&e(t)},s.onprogress=function(t){a.call("progress",r,t)},r={header:function(t,n){return t=(t+"").toLowerCase(),arguments.length<2?c.get(t):(null==n?c.remove(t):c.set(t,n+""),r)},mimeType:function(t){return arguments.length?(i=null==t?null:t+"",r):i},responseType:function(t){return arguments.length?(u=t,r):u},timeout:function(t){return arguments.length?(p=+t,r):p},user:function(t){return arguments.length<1?f:(f=null==t?null:t+"",r)},password:function(t){return arguments.length<1?l:(l=null==t?null:t+"",r)},response:function(t){return o=t,r},get:function(t,n){return r.send("GET",t,n)},post:function(t,n){return r.send("POST",t,n)},send:function(n,e,o){return s.open(n,t,!0,f,l),null==i||c.has("accept")||c.set("accept",i+",*/*"),s.setRequestHeader&&c.each(function(t,n){s.setRequestHeader(n,t)}),null!=i&&s.overrideMimeType&&s.overrideMimeType(i),null!=u&&(s.responseType=u),p>0&&(s.timeout=p),null==o&&"function"==typeof e&&(o=e,e=null),null!=o&&1===o.length&&(o=Zo(o)),null!=o&&r.on("error",o).on("load",function(t){o(null,t)}),a.call("beforesend",r,s),s.send(null==e?null:e),r},abort:function(){return s.abort(),r},on:function(){var t=a.on.apply(a,arguments);return t===a?r:t}},null!=n){if("function"!=typeof n)throw new Error("invalid callback: "+n);return r.get(n)}return r},y_=function(t,n){return function(e,r){var i=__(e).mimeType(t).response(n);if(null!=r){if("function"!=typeof r)throw new Error("invalid callback: "+r);return i.get(r)}return i}},g_=y_("text/html",function(t){return document.createRange().createContextualFragment(t.responseText)}),m_=y_("application/json",function(t){return JSON.parse(t.responseText)}),x_=y_("text/plain",function(t){return t.responseText}),b_=y_("application/xml",function(t){var n=t.responseXML;if(!n)throw new Error("parse error");return n}),w_=function(t,n){return function(e,r,i){arguments.length<3&&(i=r,r=null);var o=__(e).mimeType(t);return o.row=function(t){return arguments.length?o.response(Jo(n,r=t)):r},o.row(r),i?o.get(i):o}},M_=w_("text/csv",Qh),T_=w_("text/tab-separated-values",rp),k_=Array.prototype,N_=k_.map,S_=k_.slice,E_={name:"implicit"},A_=function(t){return function(){return t}},C_=function(t){return+t},z_=[0,1],P_=function(n,e,r){var o,u=n[0],a=n[n.length-1],c=i(u,a,null==e?10:e);switch((r=He(null==r?",f":r)).type){case"s":var s=Math.max(Math.abs(u),Math.abs(a));return null!=r.precision||isNaN(o=Np(c,s))||(r.precision=o),t.formatPrefix(r,s);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(o=Sp(c,Math.max(Math.abs(u),Math.abs(a))))||(r.precision=o-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(o=kp(c))||(r.precision=o-2*("%"===r.type))}return t.format(r)},R_=function(t,n){var e,r=0,i=(t=t.slice()).length-1,o=t[r],u=t[i];return u0?t>1?Mu(function(n){n.setTime(Math.floor(n/t)*t)},function(n,e){n.setTime(+n+e*t)},function(n,e){return(e-n)/t}):U_:null};var D_=U_.range,O_=6e4,F_=6048e5,I_=Mu(function(t){t.setTime(1e3*Math.floor(t/1e3))},function(t,n){t.setTime(+t+1e3*n)},function(t,n){return(n-t)/1e3},function(t){return t.getUTCSeconds()}),Y_=I_.range,B_=Mu(function(t){t.setTime(Math.floor(t/O_)*O_)},function(t,n){t.setTime(+t+n*O_)},function(t,n){return(n-t)/O_},function(t){return t.getMinutes()}),j_=B_.range,H_=Mu(function(t){var n=t.getTimezoneOffset()*O_%36e5;n<0&&(n+=36e5),t.setTime(36e5*Math.floor((+t-n)/36e5)+n)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getHours()}),X_=H_.range,$_=Mu(function(t){t.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*O_)/864e5},function(t){return t.getDate()-1}),V_=$_.range,W_=Tu(0),Z_=Tu(1),G_=Tu(2),J_=Tu(3),Q_=Tu(4),K_=Tu(5),ty=Tu(6),ny=W_.range,ey=Z_.range,ry=G_.range,iy=J_.range,oy=Q_.range,uy=K_.range,ay=ty.range,cy=Mu(function(t){t.setDate(1),t.setHours(0,0,0,0)},function(t,n){t.setMonth(t.getMonth()+n)},function(t,n){return n.getMonth()-t.getMonth()+12*(n.getFullYear()-t.getFullYear())},function(t){return t.getMonth()}),sy=cy.range,fy=Mu(function(t){t.setMonth(0,1),t.setHours(0,0,0,0)},function(t,n){t.setFullYear(t.getFullYear()+n)},function(t,n){return n.getFullYear()-t.getFullYear()},function(t){return t.getFullYear()});fy.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Mu(function(n){n.setFullYear(Math.floor(n.getFullYear()/t)*t),n.setMonth(0,1),n.setHours(0,0,0,0)},function(n,e){n.setFullYear(n.getFullYear()+e*t)}):null};var ly=fy.range,hy=Mu(function(t){t.setUTCSeconds(0,0)},function(t,n){t.setTime(+t+n*O_)},function(t,n){return(n-t)/O_},function(t){return t.getUTCMinutes()}),py=hy.range,dy=Mu(function(t){t.setUTCMinutes(0,0,0)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getUTCHours()}),vy=dy.range,_y=Mu(function(t){t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+n)},function(t,n){return(n-t)/864e5},function(t){return t.getUTCDate()-1}),yy=_y.range,gy=ku(0),my=ku(1),xy=ku(2),by=ku(3),wy=ku(4),My=ku(5),Ty=ku(6),ky=gy.range,Ny=my.range,Sy=xy.range,Ey=by.range,Ay=wy.range,Cy=My.range,zy=Ty.range,Py=Mu(function(t){t.setUTCDate(1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCMonth(t.getUTCMonth()+n)},function(t,n){return n.getUTCMonth()-t.getUTCMonth()+12*(n.getUTCFullYear()-t.getUTCFullYear())},function(t){return t.getUTCMonth()}),Ry=Py.range,Ly=Mu(function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCFullYear(t.getUTCFullYear()+n)},function(t,n){return n.getUTCFullYear()-t.getUTCFullYear()},function(t){return t.getUTCFullYear()});Ly.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Mu(function(n){n.setUTCFullYear(Math.floor(n.getUTCFullYear()/t)*t),n.setUTCMonth(0,1),n.setUTCHours(0,0,0,0)},function(n,e){n.setUTCFullYear(n.getUTCFullYear()+e*t)}):null};var qy,Uy=Ly.range,Dy={"-":"",_:" ",0:"0"},Oy=/^\s*\d+/,Fy=/^%/,Iy=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g;Ma({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});var Yy=Date.prototype.toISOString?function(t){return t.toISOString()}:t.utcFormat("%Y-%m-%dT%H:%M:%S.%LZ"),By=+new Date("2000-01-01T00:00:00.000Z")?function(t){var n=new Date(t);return isNaN(n)?null:n}:t.utcParse("%Y-%m-%dT%H:%M:%S.%LZ"),jy=1e3,Hy=60*jy,Xy=60*Hy,$y=24*Xy,Vy=7*$y,Wy=30*$y,Zy=365*$y,Gy=function(t){return t.match(/.{6}/g).map(function(t){return"#"+t})},Jy=Gy("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf"),Qy=Gy("393b795254a36b6ecf9c9ede6379398ca252b5cf6bcedb9c8c6d31bd9e39e7ba52e7cb94843c39ad494ad6616be7969c7b4173a55194ce6dbdde9ed6"),Ky=Gy("3182bd6baed69ecae1c6dbefe6550dfd8d3cfdae6bfdd0a231a35474c476a1d99bc7e9c0756bb19e9ac8bcbddcdadaeb636363969696bdbdbdd9d9d9"),tg=Gy("1f77b4aec7e8ff7f0effbb782ca02c98df8ad62728ff98969467bdc5b0d58c564bc49c94e377c2f7b6d27f7f7fc7c7c7bcbd22dbdb8d17becf9edae5"),ng=wl($t(300,.5,0),$t(-240,.5,1)),eg=wl($t(-100,.75,.35),$t(80,1.5,.8)),rg=wl($t(260,.75,.35),$t(80,1.5,.8)),ig=$t(),og=Sa(Gy("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725")),ug=Sa(Gy("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),ag=Sa(Gy("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),cg=Sa(Gy("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921")),sg=function(t){return function(){return t}},fg=Math.abs,lg=Math.atan2,hg=Math.cos,pg=Math.max,dg=Math.min,vg=Math.sin,_g=Math.sqrt,yg=1e-12,gg=Math.PI,mg=gg/2,xg=2*gg;Oa.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:this._context.lineTo(t,n)}}};var bg=function(t){return new Oa(t)},wg=function(){function t(t){var a,c,s,f=t.length,l=!1;for(null==i&&(u=o(s=ve())),a=0;a<=f;++a)!(a=f;--l)s.point(_[l],y[l]);s.lineEnd(),s.areaEnd()}v&&(_[n]=+e(h,n,t),y[n]=+i(h,n,t),s.point(r?+r(h,n,t):_[n],o?+o(h,n,t):y[n]))}if(p)return s=null,p+""||null}function n(){return wg().defined(u).curve(c).context(a)}var e=Fa,r=null,i=sg(0),o=Ia,u=sg(!0),a=null,c=bg,s=null;return t.x=function(n){return arguments.length?(e="function"==typeof n?n:sg(+n),r=null,t):e},t.x0=function(n){return arguments.length?(e="function"==typeof n?n:sg(+n),t):e},t.x1=function(n){return arguments.length?(r=null==n?null:"function"==typeof n?n:sg(+n),t):r},t.y=function(n){return arguments.length?(i="function"==typeof n?n:sg(+n),o=null,t):i},t.y0=function(n){return arguments.length?(i="function"==typeof n?n:sg(+n),t):i},t.y1=function(n){return arguments.length?(o=null==n?null:"function"==typeof n?n:sg(+n),t):o},t.lineX0=t.lineY0=function(){return n().x(e).y(i)},t.lineY1=function(){return n().x(e).y(o)},t.lineX1=function(){return n().x(r).y(i)},t.defined=function(n){return arguments.length?(u="function"==typeof n?n:sg(!!n),t):u},t.curve=function(n){return arguments.length?(c=n,null!=a&&(s=c(a)),t):c},t.context=function(n){return arguments.length?(null==n?a=s=null:s=c(a=n),t):a},t},Tg=function(t,n){return nt?1:n>=t?0:NaN},kg=function(t){return t},Ng=Ba(bg);Ya.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,n){this._curve.point(n*Math.sin(t),n*-Math.cos(t))}};var Sg=function(){return ja(wg().curve(Ng))},Eg=function(){var t=Mg().curve(Ng),n=t.curve,e=t.lineX0,r=t.lineX1,i=t.lineY0,o=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return ja(e())},delete t.lineX0,t.lineEndAngle=function(){return ja(r())},delete t.lineX1,t.lineInnerRadius=function(){return ja(i())},delete t.lineY0,t.lineOuterRadius=function(){return ja(o())},delete t.lineY1,t.curve=function(t){return arguments.length?n(Ba(t)):n()._curve},t},Ag=function(t,n){return[(n=+n)*Math.cos(t-=Math.PI/2),n*Math.sin(t)]},Cg=Array.prototype.slice,zg={draw:function(t,n){var e=Math.sqrt(n/gg);t.moveTo(e,0),t.arc(0,0,e,0,xg)}},Pg={draw:function(t,n){var e=Math.sqrt(n/5)/2;t.moveTo(-3*e,-e),t.lineTo(-e,-e),t.lineTo(-e,-3*e),t.lineTo(e,-3*e),t.lineTo(e,-e),t.lineTo(3*e,-e),t.lineTo(3*e,e),t.lineTo(e,e),t.lineTo(e,3*e),t.lineTo(-e,3*e),t.lineTo(-e,e),t.lineTo(-3*e,e),t.closePath()}},Rg=Math.sqrt(1/3),Lg=2*Rg,qg={draw:function(t,n){var e=Math.sqrt(n/Lg),r=e*Rg;t.moveTo(0,-e),t.lineTo(r,0),t.lineTo(0,e),t.lineTo(-r,0),t.closePath()}},Ug=Math.sin(gg/10)/Math.sin(7*gg/10),Dg=Math.sin(xg/10)*Ug,Og=-Math.cos(xg/10)*Ug,Fg={draw:function(t,n){var e=Math.sqrt(.8908130915292852*n),r=Dg*e,i=Og*e;t.moveTo(0,-e),t.lineTo(r,i);for(var o=1;o<5;++o){var u=xg*o/5,a=Math.cos(u),c=Math.sin(u);t.lineTo(c*e,-a*e),t.lineTo(a*r-c*i,c*r+a*i)}t.closePath()}},Ig={draw:function(t,n){var e=Math.sqrt(n),r=-e/2;t.rect(r,r,e,e)}},Yg=Math.sqrt(3),Bg={draw:function(t,n){var e=-Math.sqrt(n/(3*Yg));t.moveTo(0,2*e),t.lineTo(-Yg*e,-e),t.lineTo(Yg*e,-e),t.closePath()}},jg=-.5,Hg=Math.sqrt(3)/2,Xg=1/Math.sqrt(12),$g=3*(Xg/2+1),Vg={draw:function(t,n){var e=Math.sqrt(n/$g),r=e/2,i=e*Xg,o=r,u=e*Xg+e,a=-o,c=u;t.moveTo(r,i),t.lineTo(o,u),t.lineTo(a,c),t.lineTo(jg*r-Hg*i,Hg*r+jg*i),t.lineTo(jg*o-Hg*u,Hg*o+jg*u),t.lineTo(jg*a-Hg*c,Hg*a+jg*c),t.lineTo(jg*r+Hg*i,jg*i-Hg*r),t.lineTo(jg*o+Hg*u,jg*u-Hg*o),t.lineTo(jg*a+Hg*c,jg*c-Hg*a),t.closePath()}},Wg=[zg,Pg,qg,Ig,Fg,Bg,Vg],Zg=function(){};Ja.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:Ga(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};Qa.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x2=t,this._y2=n;break;case 1:this._point=2,this._x3=t,this._y3=n;break;case 2:this._point=3,this._x4=t,this._y4=n,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+n)/6);break;default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};Ka.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var e=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+n)/6;this._line?this._context.lineTo(e,r):this._context.moveTo(e,r);break;case 3:this._point=4;default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};tc.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,n=this._y,e=t.length-1;if(e>0)for(var r,i=t[0],o=n[0],u=t[e]-i,a=n[e]-o,c=-1;++c<=e;)r=c/e,this._basis.point(this._beta*t[c]+(1-this._beta)*(i+r*u),this._beta*n[c]+(1-this._beta)*(o+r*a));this._x=this._y=null,this._basis.lineEnd()},point:function(t,n){this._x.push(+t),this._y.push(+n)}};var Gg=function t(n){function e(t){return 1===n?new Ja(t):new tc(t,n)}return e.beta=function(n){return t(+n)},e}(.85);ec.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:nc(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2,this._x1=t,this._y1=n;break;case 2:this._point=3;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Jg=function t(n){function e(t){return new ec(t,n)}return e.tension=function(n){return t(+n)},e}(0);rc.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Qg=function t(n){function e(t){return new rc(t,n)}return e.tension=function(n){return t(+n)},e}(0);ic.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Kg=function t(n){function e(t){return new ic(t,n)}return e.tension=function(n){return t(+n)},e}(0);uc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var tm=function t(n){function e(t){return n?new uc(t,n):new ec(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);ac.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var nm=function t(n){function e(t){return n?new ac(t,n):new rc(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);cc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var em=function t(n){function e(t){return n?new cc(t,n):new ic(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);sc.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._point=0},lineEnd:function(){this._point&&this._context.closePath()},point:function(t,n){t=+t,n=+n,this._point?this._context.lineTo(t,n):(this._point=1,this._context.moveTo(t,n))}};dc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=this._t0=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x1,this._y1);break;case 3:pc(this,this._t0,hc(this,this._t0))}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){var e=NaN;if(t=+t,n=+n,t!==this._x1||n!==this._y1){switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,pc(this,hc(this,e=lc(this,t,n)),e);break;default:pc(this,this._t0,e=lc(this,t,n))}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n,this._t0=e}}},(vc.prototype=Object.create(dc.prototype)).point=function(t,n){dc.prototype.point.call(this,n,t)},_c.prototype={moveTo:function(t,n){this._context.moveTo(n,t)},closePath:function(){this._context.closePath()},lineTo:function(t,n){this._context.lineTo(n,t)},bezierCurveTo:function(t,n,e,r,i,o){this._context.bezierCurveTo(n,t,r,e,o,i)}},yc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=[],this._y=[]},lineEnd:function(){var t=this._x,n=this._y,e=t.length;if(e)if(this._line?this._context.lineTo(t[0],n[0]):this._context.moveTo(t[0],n[0]),2===e)this._context.lineTo(t[1],n[1]);else for(var r=gc(t),i=gc(n),o=0,u=1;u=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,n),this._context.lineTo(t,n);else{var e=this._x*(1-this._t)+t*this._t;this._context.lineTo(e,this._y),this._context.lineTo(e,n)}}this._x=t,this._y=n}};var rm=function(t,n){if((i=t.length)>1)for(var e,r,i,o=1,u=t[n[0]],a=u.length;o=0;)e[n]=n;return e},om=function(t){var n=t.map(bc);return im(t).sort(function(t,e){return n[t]-n[e]})},um=function(t){return function(){return t}};Tc.prototype={constructor:Tc,insert:function(t,n){var e,r,i;if(t){if(n.P=t,n.N=t.N,t.N&&(t.N.P=n),t.N=n,t.R){for(t=t.R;t.L;)t=t.L;t.L=n}else t.R=n;e=t}else this._?(t=Ec(this._),n.P=null,n.N=t,t.P=t.L=n,e=t):(n.P=n.N=null,this._=n,e=null);for(n.L=n.R=null,n.U=e,n.C=!0,t=n;e&&e.C;)e===(r=e.U).L?(i=r.R)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.R&&(Nc(this,e),e=(t=e).U),e.C=!1,r.C=!0,Sc(this,r)):(i=r.L)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.L&&(Sc(this,e),e=(t=e).U),e.C=!1,r.C=!0,Nc(this,r)),e=t.U;this._.C=!1},remove:function(t){t.N&&(t.N.P=t.P),t.P&&(t.P.N=t.N),t.N=t.P=null;var n,e,r,i=t.U,o=t.L,u=t.R;if(e=o?u?Ec(u):o:u,i?i.L===t?i.L=e:i.R=e:this._=e,o&&u?(r=e.C,e.C=t.C,e.L=o,o.U=e,e!==u?(i=e.U,e.U=t.U,t=e.R,i.L=t,e.R=u,u.U=e):(e.U=i,i=e,t=e.R)):(r=t.C,t=e),t&&(t.U=i),!r)if(t&&t.C)t.C=!1;else{do{if(t===this._)break;if(t===i.L){if((n=i.R).C&&(n.C=!1,i.C=!0,Nc(this,i),n=i.R),n.L&&n.L.C||n.R&&n.R.C){n.R&&n.R.C||(n.L.C=!1,n.C=!0,Sc(this,n),n=i.R),n.C=i.C,i.C=n.R.C=!1,Nc(this,i),t=this._;break}}else if((n=i.L).C&&(n.C=!1,i.C=!0,Sc(this,i),n=i.L),n.L&&n.L.C||n.R&&n.R.C){n.L&&n.L.C||(n.R.C=!1,n.C=!0,Nc(this,n),n=i.L),n.C=i.C,i.C=n.L.C=!1,Sc(this,i),t=this._;break}n.C=!0,t=i,i=i.U}while(!t.C);t&&(t.C=!1)}}};var am,cm,sm,fm,lm,hm=[],pm=[],dm=1e-6,vm=1e-12;Kc.prototype={constructor:Kc,polygons:function(){var t=this.edges;return this.cells.map(function(n){var e=n.halfedges.map(function(e){return Dc(n,t[e])});return e.data=n.site.data,e})},triangles:function(){var t=[],n=this.edges;return this.cells.forEach(function(e,r){if(o=(i=e.halfedges).length)for(var i,o,u,a=e.site,c=-1,s=n[i[o-1]],f=s.left===a?s.right:s.left;++c=a)return null;var c=t-i.site[0],s=n-i.site[1],f=c*c+s*s;do{i=o.cells[r=u],u=null,i.halfedges.forEach(function(e){var r=o.edges[e],a=r.left;if(a!==i.site&&a||(a=r.right)){var c=t-a[0],s=n-a[1],l=c*c+s*s;lt?1:n>=t?0:NaN},t.deviation=_s,t.extent=ys,t.histogram=function(){function t(t){var o,u,a=t.length,c=new Array(a);for(o=0;ol;)h.pop(),--p;var d,v=new Array(p+1);for(o=0;o<=p;++o)(d=v[o]=[]).x0=o>0?h[o-1]:f,d.x1=o=e)for(r=e;++or&&(r=e)}else for(;++o=e)for(r=e;++or&&(r=e);return r},t.mean=function(t,n){var e,r=t.length,i=r,o=-1,u=0;if(null==n)for(;++o=o.length)return null!=e&&n.sort(e),null!=r?r(n):n;for(var c,s,f,l=-1,h=n.length,p=o[i++],d=we(),v=u();++lo.length)return t;var i,a=u[e-1];return null!=r&&e>=o.length?i=t.entries():(i=[],t.each(function(t,r){i.push({key:r,values:n(t,e)})})),null!=a?i.sort(function(t,n){return a(t.key,n.key)}):i}var e,r,i,o=[],u=[];return i={object:function(n){return t(n,0,Me,Te)},map:function(n){return t(n,0,ke,Ne)},entries:function(e){return n(t(e,0,ke,Ne),0)},key:function(t){return o.push(t),i},sortKeys:function(t){return u[o.length-1]=t,i},sortValues:function(t){return e=t,i},rollup:function(t){return r=t,i}}},t.set=Ee,t.map=we,t.keys=function(t){var n=[];for(var e in t)n.push(e);return n},t.values=function(t){var n=[];for(var e in t)n.push(t[e]);return n},t.entries=function(t){var n=[];for(var e in t)n.push({key:e,value:t[e]});return n},t.color=Tt,t.rgb=Et,t.hsl=Pt,t.lab=Ut,t.hcl=jt,t.cubehelix=$t,t.dispatch=h,t.drag=function(){function n(t){t.on("mousedown.drag",e).filter(bt).on("touchstart.drag",o).on("touchmove.drag",u).on("touchend.drag touchcancel.drag",a).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(){if(!p&&d.apply(this,arguments)){var n=c("mouse",v.apply(this,arguments),Ks,this,arguments);n&&(cf(t.event.view).on("mousemove.drag",r,!0).on("mouseup.drag",i,!0),lf(t.event.view),vt(),l=!1,s=t.event.clientX,f=t.event.clientY,n("start"))}}function r(){if(ff(),!l){var n=t.event.clientX-s,e=t.event.clientY-f;l=n*n+e*e>x}y.mouse("drag")}function i(){cf(t.event.view).on("mousemove.drag mouseup.drag",null),_t(t.event.view,l),ff(),y.mouse("end")}function o(){if(d.apply(this,arguments)){var n,e,r=t.event.changedTouches,i=v.apply(this,arguments),o=r.length;for(n=0;nc+p||is+p||or.index){var d=c-a.x-a.vx,v=s-a.y-a.vy,_=d*d+v*v;_t.r&&(t.r=t[n].r)}function r(){if(i){var n,e,r=i.length;for(o=new Array(r),n=0;n=f)){(t.data!==o||t.next)&&(0===i&&(i=cp(),p+=i*i),0===c&&(c=cp(),p+=c*c),p1?(null==n?l.remove(t):l.set(t,i(n)),o):l.get(t)},find:function(n,e,r){var i,o,u,a,c,s=0,f=t.length;for(null==r?r=1/0:r*=r,s=0;s1?(d.on(t,n),o):d.on(t)}}},t.forceX=function(t){function n(t){for(var n,e=0,u=r.length;exr(r[0],r[1])&&(r[1]=i[1]),xr(i[0],r[1])>xr(r[0],r[1])&&(r[0]=i[0])):o.push(r=i);for(u=-1/0,n=0,r=o[e=o.length-1];n<=e;r=i,++n)i=o[n],(a=xr(r[1],i[0]))>u&&(u=a,Lp=i[0],Up=r[1])}return Bp=jp=null,Lp===1/0||qp===1/0?[[NaN,NaN],[NaN,NaN]]:[[Lp,qp],[Up,Dp]]},t.geoCentroid=function(t){Hp=Xp=$p=Vp=Wp=Zp=Gp=Jp=Qp=Kp=td=0,Ed(t,Ld);var n=Qp,e=Kp,r=td,i=n*n+e*e+r*r;return i<1e-12&&(n=Zp,e=Gp,r=Jp,Xp=.12&&i<.234&&r>=-.425&&r<-.214?s:i>=.166&&i<.234&&r>=-.214&&r<-.115?f:c).invert(t)},t.stream=function(t){return e&&r===t?e:e=Ri([c.stream(r=t),s.stream(t),f.stream(t)])},t.precision=function(t){return arguments.length?(c.precision(t),s.precision(t),f.precision(t),n()):c.precision()},t.scale=function(n){return arguments.length?(c.scale(n),s.scale(.35*n),f.scale(n),t.translate(c.translate())):c.scale()},t.translate=function(t){if(!arguments.length)return c.translate();var e=c.scale(),r=+t[0],a=+t[1];return i=c.translate(t).clipExtent([[r-.455*e,a-.238*e],[r+.455*e,a+.238*e]]).stream(l),o=s.translate([r-.307*e,a+.201*e]).clipExtent([[r-.425*e+ad,a+.12*e+ad],[r-.214*e-ad,a+.234*e-ad]]).stream(l),u=f.translate([r-.205*e,a+.212*e]).clipExtent([[r-.214*e+ad,a+.166*e+ad],[r-.115*e-ad,a+.234*e-ad]]).stream(l),n()},t.fitExtent=function(n,e){return Ti(t,n,e)},t.fitSize=function(n,e){return ki(t,n,e)},t.scale(1070)},t.geoAzimuthalEqualArea=function(){return Ei($v).scale(124.75).clipAngle(179.999)},t.geoAzimuthalEqualAreaRaw=$v,t.geoAzimuthalEquidistant=function(){return Ei(Vv).scale(79.4188).clipAngle(179.999)},t.geoAzimuthalEquidistantRaw=Vv,t.geoConicConformal=function(){return Ci(Fi).scale(109.5).parallels([30,30])},t.geoConicConformalRaw=Fi,t.geoConicEqualArea=Hv,t.geoConicEqualAreaRaw=Pi,t.geoConicEquidistant=function(){return Ci(Yi).scale(131.154).center([0,13.9389])},t.geoConicEquidistantRaw=Yi,t.geoEquirectangular=function(){return Ei(Ii).scale(152.63)},t.geoEquirectangularRaw=Ii,t.geoGnomonic=function(){return Ei(Bi).scale(144.049).clipAngle(60)},t.geoGnomonicRaw=Bi,t.geoIdentity=function(){function t(){return i=o=null,u}var n,e,r,i,o,u,a=1,c=0,s=0,f=1,l=1,h=lv,p=null,d=lv;return u={stream:function(t){return i&&o===t?i:i=h(d(o=t))},clipExtent:function(i){return arguments.length?(d=null==i?(p=n=e=r=null,lv):Br(p=+i[0][0],n=+i[0][1],e=+i[1][0],r=+i[1][1]),t()):null==p?null:[[p,n],[e,r]]},scale:function(n){return arguments.length?(h=ji((a=+n)*f,a*l,c,s),t()):a},translate:function(n){return arguments.length?(h=ji(a*f,a*l,c=+n[0],s=+n[1]),t()):[c,s]},reflectX:function(n){return arguments.length?(h=ji(a*(f=n?-1:1),a*l,c,s),t()):f<0},reflectY:function(n){return arguments.length?(h=ji(a*f,a*(l=n?-1:1),c,s),t()):l<0},fitExtent:function(t,n){return Ti(u,t,n)},fitSize:function(t,n){return ki(u,t,n)}}},t.geoProjection=Ei,t.geoProjectionMutator=Ai,t.geoMercator=function(){return Di(Ui).scale(961/ld)},t.geoMercatorRaw=Ui,t.geoOrthographic=function(){return Ei(Hi).scale(249.5).clipAngle(90+ad)},t.geoOrthographicRaw=Hi,t.geoStereographic=function(){return Ei(Xi).scale(250).clipAngle(142)},t.geoStereographicRaw=Xi,t.geoTransverseMercator=function(){var t=Di($i),n=t.center,e=t.rotate;return t.center=function(t){return arguments.length?n([-t[1],t[0]]):(t=n(),[t[1],-t[0]])},t.rotate=function(t){return arguments.length?e([t[0],t[1],t.length>2?t[2]+90:90]):(t=e(),[t[0],t[1],t[2]-90])},e([0,0,90]).scale(159.155)},t.geoTransverseMercatorRaw=$i,t.geoRotation=Wd,t.geoStream=Ed,t.geoTransform=function(t){return{stream:wi(t)}},t.cluster=function(){function t(t){var o,u=0;t.eachAfter(function(t){var e=t.children;e?(t.x=Wi(e),t.y=Gi(e)):(t.x=o?u+=n(t,o):0,t.y=0,o=t)});var a=Qi(t),c=Ki(t),s=a.x-n(a,c)/2,f=c.x+n(c,a)/2;return t.eachAfter(i?function(n){n.x=(n.x-t.x)*e,n.y=(t.y-n.y)*r}:function(n){n.x=(n.x-s)/(f-s)*e,n.y=(1-(t.y?n.y/t.y:1))*r})}var n=Vi,e=1,r=1,i=!1;return t.separation=function(e){return arguments.length?(n=e,t):n},t.size=function(n){return arguments.length?(i=!1,e=+n[0],r=+n[1],t):i?null:[e,r]},t.nodeSize=function(n){return arguments.length?(i=!0,e=+n[0],r=+n[1],t):i?[e,r]:null},t},t.hierarchy=eo,t.pack=function(){function t(t){return t.x=e/2,t.y=r/2,n?t.eachBefore(No(n)).eachAfter(So(i,.5)).eachBefore(Eo(1)):t.eachBefore(No(ko)).eachAfter(So(To,1)).eachAfter(So(i,t.r/Math.min(e,r))).eachBefore(Eo(Math.min(e,r)/(2*t.r))),t}var n=null,e=1,r=1,i=To;return t.radius=function(e){return arguments.length?(n=wo(e),t):n},t.size=function(n){return arguments.length?(e=+n[0],r=+n[1],t):[e,r]},t.padding=function(n){return arguments.length?(i="function"==typeof n?n:Gv(+n),t):i},t},t.packSiblings=function(t){return bo(t),t},t.packEnclose=Zv,t.partition=function(){function t(t){var u=t.height+1;return t.x0=t.y0=i,t.x1=e,t.y1=r/u,t.eachBefore(n(r,u)),o&&t.eachBefore(Jv),t}function n(t,n){return function(e){e.children&&Qv(e,e.x0,t*(e.depth+1)/n,e.x1,t*(e.depth+2)/n);var r=e.x0,o=e.y0,u=e.x1-i,a=e.y1-i;u0)throw new Error("cycle");return o}var n=Ao,e=Co;return t.id=function(e){return arguments.length?(n=Mo(e),t):n},t.parentId=function(n){return arguments.length?(e=Mo(n),t):e},t},t.tree=function(){function t(t){var r=Oo(t);if(r.eachAfter(n),r.parent.m=-r.z,r.eachBefore(e),c)t.eachBefore(i);else{var s=t,f=t,l=t;t.eachBefore(function(t){t.xf.x&&(f=t),t.depth>l.depth&&(l=t)});var h=s===f?1:o(s,f)/2,p=h-s.x,d=u/(f.x+h+p),v=a/(l.depth||1);t.eachBefore(function(t){t.x=(t.x+p)*d,t.y=t.depth*v})}return t}function n(t){var n=t.children,e=t.parent.children,i=t.i?e[t.i-1]:null;if(n){qo(t);var u=(n[0].z+n[n.length-1].z)/2;i?(t.z=i.z+o(t._,i._),t.m=t.z-u):t.z=u}else i&&(t.z=i.z+o(t._,i._));t.parent.A=r(t,i,t.parent.A||e[0])}function e(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function r(t,n,e){if(n){for(var r,i=t,u=t,a=n,c=i.parent.children[0],s=i.m,f=u.m,l=a.m,h=c.m;a=Ro(a),i=Po(i),a&&i;)c=Po(c),(u=Ro(u)).a=t,(r=a.z+l-i.z-s+o(a._,i._))>0&&(Lo(Uo(a,t,e),t,r),s+=r,f+=r),l+=a.m,s+=i.m,h+=c.m,f+=u.m;a&&!Ro(u)&&(u.t=a,u.m+=l-f),i&&!Po(c)&&(c.t=i,c.m+=s-h,e=t)}return e}function i(t){t.x*=u,t.y=t.depth*a}var o=zo,u=1,a=1,c=null;return t.separation=function(n){return arguments.length?(o=n,t):o},t.size=function(n){return arguments.length?(c=!1,u=+n[0],a=+n[1],t):c?null:[u,a]},t.nodeSize=function(n){return arguments.length?(c=!0,u=+n[0],a=+n[1],t):c?[u,a]:null},t},t.treemap=function(){function t(t){return t.x0=t.y0=0,t.x1=i,t.y1=o,t.eachBefore(n),u=[0],r&&t.eachBefore(Jv),t}function n(t){var n=u[t.depth],r=t.x0+n,i=t.y0+n,o=t.x1-n,h=t.y1-n;o=n-1){var s=c[t];return s.x0=r,s.y0=i,s.x1=u,void(s.y1=a)}for(var l=f[t],h=e/2+l,p=t+1,d=n-1;p>>1;f[v]a-i){var g=(r*y+u*_)/e;o(t,p,_,r,i,g,a),o(p,n,y,g,i,u,a)}else{var m=(i*y+a*_)/e;o(t,p,_,r,i,u,m),o(p,n,y,r,m,u,a)}}var u,a,c=t.children,s=c.length,f=new Array(s+1);for(f[0]=a=u=0;u=0;--n)s.push(t[r[o[n]][2]]);for(n=+a;na!=s>a&&u<(c-e)*(a-r)/(s-r)+e&&(f=!f),c=e,s=r;return f},t.polygonLength=function(t){for(var n,e,r=-1,i=t.length,o=t[i-1],u=o[0],a=o[1],c=0;++r1)&&(t-=Math.floor(t));var n=Math.abs(t-.5);return ig.h=360*t-100,ig.s=1.5-1.5*n,ig.l=.8-.9*n,ig+""},t.interpolateWarm=eg,t.interpolateCool=rg,t.interpolateViridis=og,t.interpolateMagma=ug,t.interpolateInferno=ag,t.interpolatePlasma=cg,t.scaleSequential=Ea,t.creator=Hs,t.local=m,t.matcher=Zs,t.mouse=Ks,t.namespace=js,t.namespaces=Bs,t.select=cf,t.selectAll=function(t){return"string"==typeof t?new pt([document.querySelectorAll(t)],[document.documentElement]):new pt([null==t?[]:t],af)},t.selection=dt,t.selector=tf,t.selectorAll=nf,t.style=B,t.touch=sf,t.touches=function(t,n){null==n&&(n=Js().touches);for(var e=0,r=n?n.length:0,i=new Array(r);eh;if(c||(c=t=ve()),lyg)if(d>xg-yg)c.moveTo(l*hg(h),l*vg(h)),c.arc(0,0,l,h,p,!v),f>yg&&(c.moveTo(f*hg(p),f*vg(p)),c.arc(0,0,f,p,h,v));else{var _,y,g=h,m=p,x=h,b=p,w=d,M=d,T=a.apply(this,arguments)/2,k=T>yg&&(i?+i.apply(this,arguments):_g(f*f+l*l)),N=dg(fg(l-f)/2,+r.apply(this,arguments)),S=N,E=N;if(k>yg){var A=Ca(k/f*vg(T)),C=Ca(k/l*vg(T));(w-=2*A)>yg?(A*=v?1:-1,x+=A,b-=A):(w=0,x=b=(h+p)/2),(M-=2*C)>yg?(C*=v?1:-1,g+=C,m-=C):(M=0,g=m=(h+p)/2)}var z=l*hg(g),P=l*vg(g),R=f*hg(b),L=f*vg(b);if(N>yg){var q=l*hg(m),U=l*vg(m),D=f*hg(x),O=f*vg(x);if(dyg?Ua(z,P,D,O,q,U,R,L):[R,L],I=z-F[0],Y=P-F[1],B=q-F[0],j=U-F[1],H=1/vg(Aa((I*B+Y*j)/(_g(I*I+Y*Y)*_g(B*B+j*j)))/2),X=_g(F[0]*F[0]+F[1]*F[1]);S=dg(N,(f-X)/(H-1)),E=dg(N,(l-X)/(H+1))}}M>yg?E>yg?(_=Da(D,O,z,P,l,E,v),y=Da(q,U,R,L,l,E,v),c.moveTo(_.cx+_.x01,_.cy+_.y01),Eyg&&w>yg?S>yg?(_=Da(R,L,q,U,f,-S,v),y=Da(z,P,D,O,f,-S,v),c.lineTo(_.cx+_.x01,_.cy+_.y01),S0&&(p+=l);for(null!=e?d.sort(function(t,n){return e(v[t],v[n])}):null!=r&&d.sort(function(n,e){return r(t[n],t[e])}),a=0,s=p?(y-h*m)/p:0;a0?l*s:0)+m,v[c]={data:t[c],index:a,value:l,startAngle:_,endAngle:f,padAngle:g};return v}var n=kg,e=Tg,r=null,i=sg(0),o=sg(xg),u=sg(0);return t.value=function(e){return arguments.length?(n="function"==typeof e?e:sg(+e),t):n},t.sortValues=function(n){return arguments.length?(e=n,r=null,t):e},t.sort=function(n){return arguments.length?(r=n,e=null,t):r},t.startAngle=function(n){return arguments.length?(i="function"==typeof n?n:sg(+n),t):i},t.endAngle=function(n){return arguments.length?(o="function"==typeof n?n:sg(+n),t):o},t.padAngle=function(n){return arguments.length?(u="function"==typeof n?n:sg(+n),t):u},t},t.areaRadial=Eg,t.radialArea=Eg,t.lineRadial=Sg,t.radialLine=Sg,t.pointRadial=Ag,t.linkHorizontal=function(){return $a(Va)},t.linkVertical=function(){return $a(Wa)},t.linkRadial=function(){var t=$a(Za);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t},t.symbol=function(){function t(){var t;if(r||(r=t=ve()),n.apply(this,arguments).draw(r,+e.apply(this,arguments)),t)return r=null,t+""||null}var n=sg(zg),e=sg(64),r=null;return t.type=function(e){return arguments.length?(n="function"==typeof e?e:sg(e),t):n},t.size=function(n){return arguments.length?(e="function"==typeof n?n:sg(+n),t):e},t.context=function(n){return arguments.length?(r=null==n?null:n,t):r},t},t.symbols=Wg,t.symbolCircle=zg,t.symbolCross=Pg,t.symbolDiamond=qg,t.symbolSquare=Ig,t.symbolStar=Fg,t.symbolTriangle=Bg,t.symbolWye=Vg,t.curveBasisClosed=function(t){return new Qa(t)},t.curveBasisOpen=function(t){return new Ka(t)},t.curveBasis=function(t){return new Ja(t)},t.curveBundle=Gg,t.curveCardinalClosed=Qg,t.curveCardinalOpen=Kg,t.curveCardinal=Jg,t.curveCatmullRomClosed=nm,t.curveCatmullRomOpen=em,t.curveCatmullRom=tm,t.curveLinearClosed=function(t){return new sc(t)},t.curveLinear=bg,t.curveMonotoneX=function(t){return new dc(t)},t.curveMonotoneY=function(t){return new vc(t)},t.curveNatural=function(t){return new yc(t)},t.curveStep=function(t){return new mc(t,.5)},t.curveStepAfter=function(t){return new mc(t,1)},t.curveStepBefore=function(t){return new mc(t,0)},t.stack=function(){function t(t){var o,u,a=n.apply(this,arguments),c=t.length,s=a.length,f=new Array(s);for(o=0;o0){for(var e,r,i,o=0,u=t[0].length;o1)for(var e,r,i,o,u,a,c=0,s=t[n[0]].length;c=0?(r[0]=o,r[1]=o+=i):i<0?(r[1]=u,r[0]=u+=i):r[0]=o},t.stackOffsetNone=rm,t.stackOffsetSilhouette=function(t,n){if((e=t.length)>0){for(var e,r=0,i=t[n[0]],o=i.length;r0&&(r=(e=t[n[0]]).length)>0){for(var e,r,i,o=0,u=1;uUl&&e.name===n)return new Gn([[t]],yh,n,+r)}return null},t.interrupt=jl,t.voronoi=function(){function t(t){return new Kc(t.map(function(r,i){var o=[Math.round(n(r,i,t)/dm)*dm,Math.round(e(r,i,t)/dm)*dm];return o.index=i,o.data=r,o}),r)}var n=wc,e=Mc,r=null;return t.polygons=function(n){return t(n).polygons()},t.links=function(n){return t(n).links()},t.triangles=function(n){return t(n).triangles()},t.x=function(e){return arguments.length?(n="function"==typeof e?e:um(+e),t):n},t.y=function(n){return arguments.length?(e="function"==typeof n?n:um(+n),t):e},t.extent=function(n){return arguments.length?(r=null==n?null:[[+n[0][0],+n[0][1]],[+n[1][0],+n[1][1]]],t):r&&[[r[0][0],r[0][1]],[r[1][0],r[1][1]]]},t.size=function(n){return arguments.length?(r=null==n?null:[[0,0],[+n[0],+n[1]]],t):r&&[r[1][0]-r[0][0],r[1][1]-r[0][1]]},t},t.zoom=function(){function n(t){t.property("__zoom",us).on("wheel.zoom",s).on("mousedown.zoom",f).on("dblclick.zoom",l).filter(cs).on("touchstart.zoom",p).on("touchmove.zoom",d).on("touchend.zoom touchcancel.zoom",v).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(t,n){return(n=Math.max(b,Math.min(w,n)))===t.k?t:new ns(n,t.x,t.y)}function r(t,n,e){var r=n[0]-e[0]*t.k,i=n[1]-e[1]*t.k;return r===t.x&&i===t.y?t:new ns(t.k,r,i)}function i(t,n){var e=t.invertX(n[0][0])-M,r=t.invertX(n[1][0])-T,i=t.invertY(n[0][1])-k,o=t.invertY(n[1][1])-S;return t.translate(r>e?(e+r)/2:Math.min(0,e)||Math.max(0,r),o>i?(i+o)/2:Math.min(0,i)||Math.max(0,o))}function o(t){return[(+t[0][0]+ +t[1][0])/2,(+t[0][1]+ +t[1][1])/2]}function u(t,n,e){t.on("start.zoom",function(){a(this,arguments).start()}).on("interrupt.zoom end.zoom",function(){a(this,arguments).end()}).tween("zoom",function(){var t=this,r=arguments,i=a(t,r),u=m.apply(t,r),c=e||o(u),s=Math.max(u[1][0]-u[0][0],u[1][1]-u[0][1]),f=t.__zoom,l="function"==typeof n?n.apply(t,r):n,h=A(f.invert(c).concat(s/f.k),l.invert(c).concat(s/l.k));return function(t){if(1===t)t=l;else{var n=h(t),e=s/n[2];t=new ns(e,c[0]-n[0]*e,c[1]-n[1]*e)}i.zoom(null,t)}})}function a(t,n){for(var e,r=0,i=C.length;rL}n.zoom("mouse",i(r(n.that.__zoom,n.mouse[0]=Ks(n.that),n.mouse[1]),n.extent))},!0).on("mouseup.zoom",function(){e.on("mousemove.zoom mouseup.zoom",null),_t(t.event.view,n.moved),gm(),n.end()},!0),o=Ks(this),u=t.event.clientX,c=t.event.clientY;lf(t.event.view),rs(),n.mouse=[o,this.__zoom.invert(o)],jl(this),n.start()}}function l(){if(g.apply(this,arguments)){var o=this.__zoom,a=Ks(this),c=o.invert(a),s=i(r(e(o,o.k*(t.event.shiftKey?.5:2)),a,c),m.apply(this,arguments));gm(),E>0?cf(this).transition().duration(E).call(u,s,a):cf(this).call(n.transform,s)}}function p(){if(g.apply(this,arguments)){var n,e,r,i,o=a(this,arguments),u=t.event.changedTouches,c=u.length;for(rs(),e=0;e=0?(o>=Ts?10:o>=ks?5:o>=Ns?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(o>=Ts?10:o>=ks?5:o>=Ns?2:1)}function i(t,n,e){var r=Math.abs(n-t)/Math.max(0,e),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),o=r/i;return o>=Ts?i*=10:o>=ks?i*=5:o>=Ns&&(i*=2),n=0&&(e=t.slice(r+1),t=t.slice(0,r)),t&&!n.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:e}})}function v(t,n){for(var e,r=0,i=t.length;r=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}})}function T(t){return function(){var n=this.__on;if(n){for(var e,r=0,i=-1,o=n.length;rn?1:t>=n?0:NaN}function R(t){return function(){this.removeAttribute(t)}}function L(t){return function(){this.removeAttributeNS(t.space,t.local)}}function q(t,n){return function(){this.setAttribute(t,n)}}function U(t,n){return function(){this.setAttributeNS(t.space,t.local,n)}}function D(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttribute(t):this.setAttribute(t,e)}}function O(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,e)}}function F(t){return function(){this.style.removeProperty(t)}}function I(t,n,e){return function(){this.style.setProperty(t,n,e)}}function Y(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}function B(t,n){return t.style.getPropertyValue(n)||uf(t).getComputedStyle(t,null).getPropertyValue(n)}function j(t){return function(){delete this[t]}}function H(t,n){return function(){this[t]=n}}function X(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}function $(t){return t.trim().split(/^|\s+/)}function V(t){return t.classList||new W(t)}function W(t){this._node=t,this._names=$(t.getAttribute("class")||"")}function Z(t,n){for(var e=V(t),r=-1,i=n.length;++r>8&15|n>>4&240,n>>4&15|240&n,(15&n)<<4|15&n,1)):(n=gf.exec(t))?kt(parseInt(n[1],16)):(n=mf.exec(t))?new At(n[1],n[2],n[3],1):(n=xf.exec(t))?new At(255*n[1]/100,255*n[2]/100,255*n[3]/100,1):(n=bf.exec(t))?Nt(n[1],n[2],n[3],n[4]):(n=wf.exec(t))?Nt(255*n[1]/100,255*n[2]/100,255*n[3]/100,n[4]):(n=Mf.exec(t))?Ct(n[1],n[2]/100,n[3]/100,1):(n=Tf.exec(t))?Ct(n[1],n[2]/100,n[3]/100,n[4]):kf.hasOwnProperty(t)?kt(kf[t]):"transparent"===t?new At(NaN,NaN,NaN,0):null}function kt(t){return new At(t>>16&255,t>>8&255,255&t,1)}function Nt(t,n,e,r){return r<=0&&(t=n=e=NaN),new At(t,n,e,r)}function St(t){return t instanceof Mt||(t=Tt(t)),t?(t=t.rgb(),new At(t.r,t.g,t.b,t.opacity)):new At}function Et(t,n,e,r){return 1===arguments.length?St(t):new At(t,n,e,null==r?1:r)}function At(t,n,e,r){this.r=+t,this.g=+n,this.b=+e,this.opacity=+r}function Ct(t,n,e,r){return r<=0?t=n=e=NaN:e<=0||e>=1?t=n=NaN:n<=0&&(t=NaN),new Rt(t,n,e,r)}function zt(t){if(t instanceof Rt)return new Rt(t.h,t.s,t.l,t.opacity);if(t instanceof Mt||(t=Tt(t)),!t)return new Rt;if(t instanceof Rt)return t;var n=(t=t.rgb()).r/255,e=t.g/255,r=t.b/255,i=Math.min(n,e,r),o=Math.max(n,e,r),u=NaN,a=o-i,c=(o+i)/2;return a?(u=n===o?(e-r)/a+6*(e0&&c<1?0:u,new Rt(u,a,c,t.opacity)}function Pt(t,n,e,r){return 1===arguments.length?zt(t):new Rt(t,n,e,null==r?1:r)}function Rt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Lt(t,n,e){return 255*(t<60?n+(e-n)*t/60:t<180?e:t<240?n+(e-n)*(240-t)/60:n)}function qt(t){if(t instanceof Dt)return new Dt(t.l,t.a,t.b,t.opacity);if(t instanceof Ht){var n=t.h*Nf;return new Dt(t.l,Math.cos(n)*t.c,Math.sin(n)*t.c,t.opacity)}t instanceof At||(t=St(t));var e=Yt(t.r),r=Yt(t.g),i=Yt(t.b),o=Ot((.4124564*e+.3575761*r+.1804375*i)/Ef),u=Ot((.2126729*e+.7151522*r+.072175*i)/Af);return new Dt(116*u-16,500*(o-u),200*(u-Ot((.0193339*e+.119192*r+.9503041*i)/Cf)),t.opacity)}function Ut(t,n,e,r){return 1===arguments.length?qt(t):new Dt(t,n,e,null==r?1:r)}function Dt(t,n,e,r){this.l=+t,this.a=+n,this.b=+e,this.opacity=+r}function Ot(t){return t>Lf?Math.pow(t,1/3):t/Rf+zf}function Ft(t){return t>Pf?t*t*t:Rf*(t-zf)}function It(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Yt(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Bt(t){if(t instanceof Ht)return new Ht(t.h,t.c,t.l,t.opacity);t instanceof Dt||(t=qt(t));var n=Math.atan2(t.b,t.a)*Sf;return new Ht(n<0?n+360:n,Math.sqrt(t.a*t.a+t.b*t.b),t.l,t.opacity)}function jt(t,n,e,r){return 1===arguments.length?Bt(t):new Ht(t,n,e,null==r?1:r)}function Ht(t,n,e,r){this.h=+t,this.c=+n,this.l=+e,this.opacity=+r}function Xt(t){if(t instanceof Vt)return new Vt(t.h,t.s,t.l,t.opacity);t instanceof At||(t=St(t));var n=t.r/255,e=t.g/255,r=t.b/255,i=(Bf*r+If*n-Yf*e)/(Bf+If-Yf),o=r-i,u=(Ff*(e-i)-Df*o)/Of,a=Math.sqrt(u*u+o*o)/(Ff*i*(1-i)),c=a?Math.atan2(u,o)*Sf-120:NaN;return new Vt(c<0?c+360:c,a,i,t.opacity)}function $t(t,n,e,r){return 1===arguments.length?Xt(t):new Vt(t,n,e,null==r?1:r)}function Vt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Wt(t,n,e,r,i){var o=t*t,u=o*t;return((1-3*t+3*o-u)*n+(4-6*o+3*u)*e+(1+3*t+3*o-3*u)*r+u*i)/6}function Zt(t,n){return function(e){return t+e*n}}function Gt(t,n,e){return t=Math.pow(t,e),n=Math.pow(n,e)-t,e=1/e,function(r){return Math.pow(t+r*n,e)}}function Jt(t,n){var e=n-t;return e?Zt(t,e>180||e<-180?e-360*Math.round(e/360):e):Jf(isNaN(t)?n:t)}function Qt(t){return 1==(t=+t)?Kt:function(n,e){return e-n?Gt(n,e,t):Jf(isNaN(n)?e:n)}}function Kt(t,n){var e=n-t;return e?Zt(t,e):Jf(isNaN(t)?n:t)}function tn(t){return function(n){var e,r,i=n.length,o=new Array(i),u=new Array(i),a=new Array(i);for(e=0;e180?n+=360:n-t>180&&(t+=360),o.push({i:e.push(i(e)+"rotate(",null,r)-2,x:rl(t,n)})):n&&e.push(i(e)+"rotate("+n+r)}function a(t,n,e,o){t!==n?o.push({i:e.push(i(e)+"skewX(",null,r)-2,x:rl(t,n)}):n&&e.push(i(e)+"skewX("+n+r)}function c(t,n,e,r,o,u){if(t!==e||n!==r){var a=o.push(i(o)+"scale(",null,",",null,")");u.push({i:a-4,x:rl(t,e)},{i:a-2,x:rl(n,r)})}else 1===e&&1===r||o.push(i(o)+"scale("+e+","+r+")")}return function(n,e){var r=[],i=[];return n=t(n),e=t(e),o(n.translateX,n.translateY,e.translateX,e.translateY,r,i),u(n.rotate,e.rotate,r,i),a(n.skewX,e.skewX,r,i),c(n.scaleX,n.scaleY,e.scaleX,e.scaleY,r,i),n=e=null,function(t){for(var n,e=-1,o=i.length;++e=0&&n._call.call(null,t),n=n._next;--Ml}function _n(){El=(Sl=Cl.now())+Al,Ml=Tl=0;try{vn()}finally{Ml=0,gn(),El=0}}function yn(){var t=Cl.now(),n=t-Sl;n>Nl&&(Al-=n,Sl=t)}function gn(){for(var t,n,e=Vf,r=1/0;e;)e._call?(r>e._time&&(r=e._time),t=e,e=e._next):(n=e._next,e._next=null,e=t?t._next=n:Vf=n);Wf=t,mn(r)}function mn(t){Ml||(Tl&&(Tl=clearTimeout(Tl)),t-El>24?(t<1/0&&(Tl=setTimeout(_n,t-Cl.now()-Al)),kl&&(kl=clearInterval(kl))):(kl||(Sl=Cl.now(),kl=setInterval(yn,Nl)),Ml=1,zl(_n)))}function xn(t,n){var e=t.__transition;if(!e||!(e=e[n])||e.state>ql)throw new Error("too late");return e}function bn(t,n){var e=t.__transition;if(!e||!(e=e[n])||e.state>Dl)throw new Error("too late");return e}function wn(t,n){var e=t.__transition;if(!e||!(e=e[n]))throw new Error("too late");return e}function Mn(t,n,e){function r(c){var s,f,l,h;if(e.state!==Ul)return o();for(s in a)if((h=a[s]).name===e.name){if(h.state===Ol)return Pl(r);h.state===Fl?(h.state=Yl,h.timer.stop(),h.on.call("interrupt",t,t.__data__,h.index,h.group),delete a[s]):+s=0&&(t=t.slice(0,n)),!t||"start"===t})}function Yn(t,n,e){var r,i,o=In(n)?xn:bn;return function(){var u=o(this,t),a=u.on;a!==r&&(i=(r=a).copy()).on(n,e),u.on=i}}function Bn(t){return function(){var n=this.parentNode;for(var e in this.__transition)if(+e!==t)return;n&&n.removeChild(this)}}function jn(t,n){var e,r,i;return function(){var o=B(this,t),u=(this.style.removeProperty(t),B(this,t));return o===u?null:o===e&&u===r?i:i=n(e=o,r=u)}}function Hn(t){return function(){this.style.removeProperty(t)}}function Xn(t,n,e){var r,i;return function(){var o=B(this,t);return o===e?null:o===r?i:i=n(r=o,e)}}function $n(t,n,e){var r,i,o;return function(){var u=B(this,t),a=e(this);return null==a&&(this.style.removeProperty(t),a=B(this,t)),u===a?null:u===r&&a===i?o:o=n(r=u,i=a)}}function Vn(t,n,e){function r(){var r=this,i=n.apply(r,arguments);return i&&function(n){r.style.setProperty(t,i(n),e)}}return r._value=n,r}function Wn(t){return function(){this.textContent=t}}function Zn(t){return function(){var n=t(this);this.textContent=null==n?"":n}}function Gn(t,n,e,r){this._groups=t,this._parents=n,this._name=e,this._id=r}function Jn(t){return dt().transition(t)}function Qn(){return++$l}function Kn(t){return((t*=2)<=1?t*t:--t*(2-t)+1)/2}function te(t){return((t*=2)<=1?t*t*t:(t-=2)*t*t+2)/2}function ne(t){return(1-Math.cos(Jl*t))/2}function ee(t){return((t*=2)<=1?Math.pow(2,10*t-10):2-Math.pow(2,10-10*t))/2}function re(t){return((t*=2)<=1?1-Math.sqrt(1-t*t):Math.sqrt(1-(t-=2)*t)+1)/2}function ie(t){return(t=+t)Math.abs(t[1]-U[1])?b=!0:x=!0),U=t,m=!0,xh(),o()}function o(){var t;switch(y=U[0]-q[0],g=U[1]-q[1],T){case wh:case bh:k&&(y=Math.max(C-a,Math.min(P-p,y)),s=a+y,d=p+y),N&&(g=Math.max(z-l,Math.min(R-v,g)),h=l+g,_=v+g);break;case Mh:k<0?(y=Math.max(C-a,Math.min(P-a,y)),s=a+y,d=p):k>0&&(y=Math.max(C-p,Math.min(P-p,y)),s=a,d=p+y),N<0?(g=Math.max(z-l,Math.min(R-l,g)),h=l+g,_=v):N>0&&(g=Math.max(z-v,Math.min(R-v,g)),h=l,_=v+g);break;case Th:k&&(s=Math.max(C,Math.min(P,a-y*k)),d=Math.max(C,Math.min(P,p+y*k))),N&&(h=Math.max(z,Math.min(R,l-g*N)),_=Math.max(z,Math.min(R,v+g*N)))}d0&&(a=s-y),N<0?v=_-g:N>0&&(l=h-g),T=wh,F.attr("cursor",Eh.selection),o());break;default:return}xh()},!0).on("keyup.brush",function(){switch(t.event.keyCode){case 16:L&&(x=b=L=!1,o());break;case 18:T===Th&&(k<0?p=d:k>0&&(a=s),N<0?v=_:N>0&&(l=h),T=Mh,o());break;case 32:T===wh&&(t.event.altKey?(k&&(p=d-y*k,a=s+y*k),N&&(v=_-g*N,l=h+g*N),T=Th):(k<0?p=d:k>0&&(a=s),N<0?v=_:N>0&&(l=h),T=Mh),F.attr("cursor",Eh[M]),o());break;default:return}xh()},!0).on("mousemove.brush",e,!0).on("mouseup.brush",u,!0);lf(t.event.view)}ue(),jl(w),r.call(w),D.start()}}function a(){var t=this.__brush||{selection:null};return t.extent=s.apply(this,arguments),t.dim=n,t}var c,s=se,f=ce,l=h(e,"start","brush","end"),p=6;return e.move=function(t,e){t.selection?t.on("start.brush",function(){i(this,arguments).beforestart().start()}).on("interrupt.brush end.brush",function(){i(this,arguments).end()}).tween("brush",function(){function t(t){u.selection=1===t&&le(s)?null:f(t),r.call(o),a.brush()}var o=this,u=o.__brush,a=i(o,arguments),c=u.selection,s=n.input("function"==typeof e?e.apply(this,arguments):e,u.extent),f=cl(c,s);return c&&s?t:t(1)}):t.each(function(){var t=this,o=arguments,u=t.__brush,a=n.input("function"==typeof e?e.apply(t,o):e,u.extent),c=i(t,o).beforestart();jl(t),u.selection=null==a||le(a)?null:a,r.call(t),c.start().brush().end()})},o.prototype={beforestart:function(){return 1==++this.active&&(this.state.emitter=this,this.starting=!0),this},start:function(){return this.starting&&(this.starting=!1,this.emit("start")),this},brush:function(){return this.emit("brush"),this},end:function(){return 0==--this.active&&(delete this.state.emitter,this.emit("end")),this},emit:function(t){N(new mh(e,t,n.output(this.state.selection)),l.apply,l,[t,this.that,this.args])}},e.extent=function(t){return arguments.length?(s="function"==typeof t?t:gh([[+t[0][0],+t[0][1]],[+t[1][0],+t[1][1]]]),e):s},e.filter=function(t){return arguments.length?(f="function"==typeof t?t:gh(!!t),e):f},e.handleSize=function(t){return arguments.length?(p=+t,e):p},e.on=function(){var t=l.on.apply(l,arguments);return t===l?e:t},e}function pe(t){return function(n,e){return t(n.source.value+n.target.value,e.source.value+e.target.value)}}function de(){this._x0=this._y0=this._x1=this._y1=null,this._=""}function ve(){return new de}function _e(t){return t.source}function ye(t){return t.target}function ge(t){return t.radius}function me(t){return t.startAngle}function xe(t){return t.endAngle}function be(){}function we(t,n){var e=new be;if(t instanceof be)t.each(function(t,n){e.set(n,t)});else if(Array.isArray(t)){var r,i=-1,o=t.length;if(null==n)for(;++i=(o=(v+y)/2))?v=o:y=o,(f=e>=(u=(_+g)/2))?_=u:g=u,i=p,!(p=p[l=f<<1|s]))return i[l]=d,t;if(a=+t._x.call(null,p.data),c=+t._y.call(null,p.data),n===a&&e===c)return d.next=p,i?i[l]=d:t._root=d,t;do{i=i?i[l]=new Array(4):t._root=new Array(4),(s=n>=(o=(v+y)/2))?v=o:y=o,(f=e>=(u=(_+g)/2))?_=u:g=u}while((l=f<<1|s)==(h=(c>=u)<<1|a>=o));return i[h]=p,i[l]=d,t}function Re(t){return t[0]}function Le(t){return t[1]}function qe(t,n,e){var r=new Ue(null==n?Re:n,null==e?Le:e,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function Ue(t,n,e,r,i,o){this._x=t,this._y=n,this._x0=e,this._y0=r,this._x1=i,this._y1=o,this._root=void 0}function De(t){for(var n={data:t.data},e=n;t=t.next;)e=e.next={data:t.data};return n}function Oe(t){return t.x+t.vx}function Fe(t){return t.y+t.vy}function Ie(t){return t.index}function Ye(t,n){var e=t.get(n);if(!e)throw new Error("missing: "+n);return e}function Be(t){return t.x}function je(t){return t.y}function He(t){return new Xe(t)}function Xe(t){if(!(n=xp.exec(t)))throw new Error("invalid format: "+t);var n,e=n[1]||" ",r=n[2]||">",i=n[3]||"-",o=n[4]||"",u=!!n[5],a=n[6]&&+n[6],c=!!n[7],s=n[8]&&+n[8].slice(1),f=n[9]||"";"n"===f?(c=!0,f="g"):mp[f]||(f=""),(u||"0"===e&&"="===r)&&(u=!0,e="0",r="="),this.fill=e,this.align=r,this.sign=i,this.symbol=o,this.zero=u,this.width=a,this.comma=c,this.precision=s,this.type=f}function $e(n){return bp=Tp(n),t.format=bp.format,t.formatPrefix=bp.formatPrefix,bp}function Ve(){this.reset()}function We(t,n,e){var r=t.s=n+e,i=r-n,o=r-i;t.t=n-o+(e-i)}function Ze(t){return t>1?0:t<-1?cd:Math.acos(t)}function Ge(t){return t>1?sd:t<-1?-sd:Math.asin(t)}function Je(t){return(t=wd(t/2))*t}function Qe(){}function Ke(t,n){t&&Sd.hasOwnProperty(t.type)&&Sd[t.type](t,n)}function tr(t,n,e){var r,i=-1,o=t.length-e;for(n.lineStart();++i=0?1:-1,i=r*e,o=yd(n),u=wd(n),a=Rp*u,c=Pp*o+a*yd(i),s=a*r*wd(i);Ad.add(_d(s,c)),zp=t,Pp=o,Rp=u}function ur(t){return[_d(t[1],t[0]),Ge(t[2])]}function ar(t){var n=t[0],e=t[1],r=yd(e);return[r*yd(n),r*wd(n),wd(e)]}function cr(t,n){return t[0]*n[0]+t[1]*n[1]+t[2]*n[2]}function sr(t,n){return[t[1]*n[2]-t[2]*n[1],t[2]*n[0]-t[0]*n[2],t[0]*n[1]-t[1]*n[0]]}function fr(t,n){t[0]+=n[0],t[1]+=n[1],t[2]+=n[2]}function lr(t,n){return[t[0]*n,t[1]*n,t[2]*n]}function hr(t){var n=Td(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=n,t[1]/=n,t[2]/=n}function pr(t,n){Bp.push(jp=[Lp=t,Up=t]),nDp&&(Dp=n)}function dr(t,n){var e=ar([t*pd,n*pd]);if(Yp){var r=sr(Yp,e),i=sr([r[1],-r[0],0],r);hr(i),i=ur(i);var o,u=t-Op,a=u>0?1:-1,c=i[0]*hd*a,s=dd(u)>180;s^(a*OpDp&&(Dp=o):(c=(c+360)%360-180,s^(a*OpDp&&(Dp=n))),s?txr(Lp,Up)&&(Up=t):xr(t,Up)>xr(Lp,Up)&&(Lp=t):Up>=Lp?(tUp&&(Up=t)):t>Op?xr(Lp,t)>xr(Lp,Up)&&(Up=t):xr(t,Up)>xr(Lp,Up)&&(Lp=t)}else Bp.push(jp=[Lp=t,Up=t]);nDp&&(Dp=n),Yp=e,Op=t}function vr(){Rd.point=dr}function _r(){jp[0]=Lp,jp[1]=Up,Rd.point=pr,Yp=null}function yr(t,n){if(Yp){var e=t-Op;Pd.add(dd(e)>180?e+(e>0?360:-360):e)}else Fp=t,Ip=n;zd.point(t,n),dr(t,n)}function gr(){zd.lineStart()}function mr(){yr(Fp,Ip),zd.lineEnd(),dd(Pd)>ad&&(Lp=-(Up=180)),jp[0]=Lp,jp[1]=Up,Yp=null}function xr(t,n){return(n-=t)<0?n+360:n}function br(t,n){return t[0]-n[0]}function wr(t,n){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:ncd?t-ld:t<-cd?t+ld:t,n]}function Lr(t,n,e){return(t%=ld)?n||e?Ud(Ur(t),Dr(n,e)):Ur(t):n||e?Dr(n,e):Rr}function qr(t){return function(n,e){return n+=t,[n>cd?n-ld:n<-cd?n+ld:n,e]}}function Ur(t){var n=qr(t);return n.invert=qr(-t),n}function Dr(t,n){function e(t,n){var e=yd(n),a=yd(t)*e,c=wd(t)*e,s=wd(n),f=s*r+a*i;return[_d(c*o-f*u,a*r-s*i),Ge(f*o+c*u)]}var r=yd(t),i=wd(t),o=yd(n),u=wd(n);return e.invert=function(t,n){var e=yd(n),a=yd(t)*e,c=wd(t)*e,s=wd(n),f=s*o-c*u;return[_d(c*o+s*u,a*r+f*i),Ge(f*r-a*i)]},e}function Or(t,n,e,r,i,o){if(e){var u=yd(n),a=wd(n),c=r*e;null==i?(i=n+r*ld,o=n-c/2):(i=Fr(u,i),o=Fr(u,o),(r>0?io)&&(i+=r*ld));for(var s,f=i;r>0?f>o:f0)do{s.point(0===f||3===f?t:e,f>1?r:n)}while((f=(f+a+4)%4)!==l);else s.point(o[0],o[1])}function u(r,i){return dd(r[0]-t)0?0:3:dd(r[0]-e)0?2:1:dd(r[1]-n)0?1:0:i>0?3:2}function a(t,n){return c(t.x,n.x)}function c(t,n){var e=u(t,1),r=u(n,1);return e!==r?e-r:0===e?n[1]-t[1]:1===e?t[0]-n[0]:2===e?t[1]-n[1]:n[0]-t[0]}return function(u){function c(t,n){i(t,n)&&w.point(t,n)}function s(){for(var n=0,e=0,i=h.length;er&&(l-o)*(r-u)>(p-u)*(t-o)&&++n:p<=r&&(l-o)*(r-u)<(p-u)*(t-o)&&--n;return n}function f(o,u){var a=i(o,u);if(h&&p.push([o,u]),x)d=o,v=u,_=a,x=!1,a&&(w.lineStart(),w.point(o,u));else if(a&&m)w.point(o,u);else{var c=[y=Math.max(tv,Math.min(Kd,y)),g=Math.max(tv,Math.min(Kd,g))],s=[o=Math.max(tv,Math.min(Kd,o)),u=Math.max(tv,Math.min(Kd,u))];Gd(c,s,t,n,e,r)?(m||(w.lineStart(),w.point(c[0],c[1])),w.point(s[0],s[1]),a||w.lineEnd(),b=!1):a&&(w.lineStart(),w.point(o,u),b=!1)}y=o,g=u,m=a}var l,h,p,d,v,_,y,g,m,x,b,w=u,M=Zd(),T={point:c,lineStart:function(){T.point=f,h&&h.push(p=[]),x=!0,m=!1,y=g=NaN},lineEnd:function(){l&&(f(d,v),_&&m&&M.rejoin(),l.push(M.result())),T.point=c,m&&w.lineEnd()},polygonStart:function(){w=M,l=[],h=[],b=!0},polygonEnd:function(){var t=s(),n=b&&t,e=(l=Cs(l)).length;(n||e)&&(u.polygonStart(),n&&(u.lineStart(),o(null,null,1,u),u.lineEnd()),e&&Qd(l,a,t,o,u),u.polygonEnd()),w=u,l=h=p=null}};return T}}function jr(){iv.point=iv.lineEnd=Qe}function Hr(t,n){Dd=t*=pd,Od=wd(n*=pd),Fd=yd(n),iv.point=Xr}function Xr(t,n){t*=pd;var e=wd(n*=pd),r=yd(n),i=dd(t-Dd),o=yd(i),u=r*wd(i),a=Fd*e-Od*r*o,c=Od*e+Fd*r*o;rv.add(_d(Td(u*u+a*a),c)),Dd=t,Od=e,Fd=r}function $r(t,n){return!(!t||!fv.hasOwnProperty(t.type))&&fv[t.type](t,n)}function Vr(t,n){return 0===cv(t,n)}function Wr(t,n){var e=cv(t[0],t[1]);return cv(t[0],n)+cv(n,t[1])<=e+ad}function Zr(t,n){return!!ev(t.map(Gr),Jr(n))}function Gr(t){return(t=t.map(Jr)).pop(),t}function Jr(t){return[t[0]*pd,t[1]*pd]}function Qr(t,n,e){var r=Ms(t,n-ad,e).concat(n);return function(t){return r.map(function(n){return[t,n]})}}function Kr(t,n,e){var r=Ms(t,n-ad,e).concat(n);return function(t){return r.map(function(n){return[n,t]})}}function ti(){function t(){return{type:"MultiLineString",coordinates:n()}}function n(){return Ms(gd(o/_)*_,i,_).map(h).concat(Ms(gd(s/y)*y,c,y).map(p)).concat(Ms(gd(r/d)*d,e,d).filter(function(t){return dd(t%_)>ad}).map(f)).concat(Ms(gd(a/v)*v,u,v).filter(function(t){return dd(t%y)>ad}).map(l))}var e,r,i,o,u,a,c,s,f,l,h,p,d=10,v=d,_=90,y=360,g=2.5;return t.lines=function(){return n().map(function(t){return{type:"LineString",coordinates:t}})},t.outline=function(){return{type:"Polygon",coordinates:[h(o).concat(p(c).slice(1),h(i).reverse().slice(1),p(s).reverse().slice(1))]}},t.extent=function(n){return arguments.length?t.extentMajor(n).extentMinor(n):t.extentMinor()},t.extentMajor=function(n){return arguments.length?(o=+n[0][0],i=+n[1][0],s=+n[0][1],c=+n[1][1],o>i&&(n=o,o=i,i=n),s>c&&(n=s,s=c,c=n),t.precision(g)):[[o,s],[i,c]]},t.extentMinor=function(n){return arguments.length?(r=+n[0][0],e=+n[1][0],a=+n[0][1],u=+n[1][1],r>e&&(n=r,r=e,e=n),a>u&&(n=a,a=u,u=n),t.precision(g)):[[r,a],[e,u]]},t.step=function(n){return arguments.length?t.stepMajor(n).stepMinor(n):t.stepMinor()},t.stepMajor=function(n){return arguments.length?(_=+n[0],y=+n[1],t):[_,y]},t.stepMinor=function(n){return arguments.length?(d=+n[0],v=+n[1],t):[d,v]},t.precision=function(n){return arguments.length?(g=+n,f=Qr(a,u,90),l=Kr(r,e,g),h=Qr(s,c,90),p=Kr(o,i,g),t):g},t.extentMajor([[-180,-90+ad],[180,90-ad]]).extentMinor([[-180,-80-ad],[180,80+ad]])}function ni(){dv.point=ei}function ei(t,n){dv.point=ri,Id=Bd=t,Yd=jd=n}function ri(t,n){pv.add(jd*t-Bd*n),Bd=t,jd=n}function ii(){ri(Id,Yd)}function oi(t,n){xv+=t,bv+=n,++wv}function ui(){Av.point=ai}function ai(t,n){Av.point=ci,oi($d=t,Vd=n)}function ci(t,n){var e=t-$d,r=n-Vd,i=Td(e*e+r*r);Mv+=i*($d+t)/2,Tv+=i*(Vd+n)/2,kv+=i,oi($d=t,Vd=n)}function si(){Av.point=oi}function fi(){Av.point=hi}function li(){pi(Hd,Xd)}function hi(t,n){Av.point=pi,oi(Hd=$d=t,Xd=Vd=n)}function pi(t,n){var e=t-$d,r=n-Vd,i=Td(e*e+r*r);Mv+=i*($d+t)/2,Tv+=i*(Vd+n)/2,kv+=i,Nv+=(i=Vd*t-$d*n)*($d+t),Sv+=i*(Vd+n),Ev+=3*i,oi($d=t,Vd=n)}function di(t){this._context=t}function vi(t,n){Uv.point=_i,zv=Rv=t,Pv=Lv=n}function _i(t,n){Rv-=t,Lv-=n,qv.add(Td(Rv*Rv+Lv*Lv)),Rv=t,Lv=n}function yi(){this._string=[]}function gi(t){return"m0,"+t+"a"+t+","+t+" 0 1,1 0,"+-2*t+"a"+t+","+t+" 0 1,1 0,"+2*t+"z"}function mi(t){return t.length>1}function xi(t,n){return((t=t.x)[0]<0?t[1]-sd-ad:sd-t[1])-((n=n.x)[0]<0?n[1]-sd-ad:sd-n[1])}function bi(t,n,e,r){var i,o,u=wd(t-e);return dd(u)>ad?vd((wd(n)*(o=yd(r))*wd(e)-wd(r)*(i=yd(n))*wd(t))/(i*o*u)):(n+r)/2}function wi(t){return function(n){var e=new Mi;for(var r in t)e[r]=t[r];return e.stream=n,e}}function Mi(){}function Ti(t,n,e){var r=n[1][0]-n[0][0],i=n[1][1]-n[0][1],o=t.clipExtent&&t.clipExtent();t.scale(150).translate([0,0]),null!=o&&t.clipExtent(null),Ed(e,t.stream(mv));var u=mv.result(),a=Math.min(r/(u[1][0]-u[0][0]),i/(u[1][1]-u[0][1])),c=+n[0][0]+(r-a*(u[1][0]+u[0][0]))/2,s=+n[0][1]+(i-a*(u[1][1]+u[0][1]))/2;return null!=o&&t.clipExtent(o),t.scale(150*a).translate([c,s])}function ki(t,n,e){return Ti(t,[[0,0],n],e)}function Ni(t){return wi({point:function(n,e){n=t(n,e),this.stream.point(n[0],n[1])}})}function Si(t,n){function e(r,i,o,u,a,c,s,f,l,h,p,d,v,_){var y=s-r,g=f-i,m=y*y+g*g;if(m>4*n&&v--){var x=u+h,b=a+p,w=c+d,M=Td(x*x+b*b+w*w),T=Ge(w/=M),k=dd(dd(w)-1)n||dd((y*A+g*C)/m-.5)>.3||u*h+a*p+c*d2?t[2]%360*pd:0,i()):[b*hd,w*hd,M*hd]},n.precision=function(t){return arguments.length?(A=Bv(r,E=t*t),o()):Td(E)},n.fitExtent=function(t,e){return Ti(n,t,e)},n.fitSize=function(t,e){return ki(n,t,e)},function(){return u=t.apply(this,arguments),n.invert=u.invert&&e,i()}}function Ci(t){var n=0,e=cd/3,r=Ai(t),i=r(n,e);return i.parallels=function(t){return arguments.length?r(n=t[0]*pd,e=t[1]*pd):[n*hd,e*hd]},i}function zi(t){function n(t,n){return[t*e,wd(n)/e]}var e=yd(t);return n.invert=function(t,n){return[t/e,Ge(n*e)]},n}function Pi(t,n){function e(t,n){var e=Td(o-2*i*wd(n))/i;return[e*wd(t*=i),u-e*yd(t)]}var r=wd(t),i=(r+wd(n))/2;if(dd(i)0?n<-sd+ad&&(n=-sd+ad):n>sd-ad&&(n=sd-ad);var e=o/bd(Oi(n),i);return[e*wd(i*t),o-e*yd(i*t)]}var r=yd(t),i=t===n?wd(t):xd(r/yd(n))/xd(Oi(n)/Oi(t)),o=r*bd(Oi(t),i)/i;return i?(e.invert=function(t,n){var e=o-n,r=Md(i)*Td(t*t+e*e);return[_d(t,dd(e))/i*Md(e),2*vd(bd(o/r,1/i))-sd]},e):Ui}function Ii(t,n){return[t,n]}function Yi(t,n){function e(t,n){var e=o-n,r=i*t;return[e*wd(r),o-e*yd(r)]}var r=yd(t),i=t===n?wd(t):(r-yd(n))/(n-t),o=r/i+t;return dd(i)=0;)n+=e[r].value;else n=1;t.value=n}function no(t,n){if(t===n)return t;var e=t.ancestors(),r=n.ancestors(),i=null;for(t=e.pop(),n=r.pop();t===n;)i=t,t=e.pop(),n=r.pop();return i}function eo(t,n){var e,r,i,o,u,a=new uo(t),c=+t.value&&(a.value=t.value),s=[a];for(null==n&&(n=ro);e=s.pop();)if(c&&(e.value=+e.data.value),(i=n(e.data))&&(u=i.length))for(e.children=new Array(u),o=u-1;o>=0;--o)s.push(r=e.children[o]=new uo(i[o])),r.parent=e,r.depth=e.depth+1;return a.eachBefore(oo)}function ro(t){return t.children}function io(t){t.data=t.data.data}function oo(t){var n=0;do{t.height=n}while((t=t.parent)&&t.height<++n)}function uo(t){this.data=t,this.depth=this.height=0,this.parent=null}function ao(t){for(var n,e,r=t.length;r;)e=Math.random()*r--|0,n=t[r],t[r]=t[e],t[e]=n;return t}function co(t,n){var e,r;if(lo(n,t))return[n];for(e=0;e0&&e*e>r*r+i*i}function lo(t,n){for(var e=0;ee*e+r*r}function mo(t){var n=t._,e=t.next._,r=n.r+e.r,i=(n.x*e.r+e.x*n.r)/r,o=(n.y*e.r+e.y*n.r)/r;return i*i+o*o}function xo(t){this._=t,this.next=null,this.previous=null}function bo(t){if(!(i=t.length))return 0;var n,e,r,i,o,u,a,c,s,f,l;if(n=t[0],n.x=0,n.y=0,!(i>1))return n.r;if(e=t[1],n.x=-e.r,e.x=n.r,e.y=0,!(i>2))return n.r+e.r;yo(e,n,r=t[2]),n=new xo(n),e=new xo(e),r=new xo(r),n.next=r.previous=e,e.next=n.previous=r,r.next=e.previous=n;t:for(a=3;a=0;)(n=i[o]).z+=e,n.m+=e,e+=n.s+(r+=n.c)}function Uo(t,n,e){return t.a.parent===n.parent?t.a:e}function Do(t,n){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=n}function Oo(t){for(var n,e,r,i,o,u=new Do(t,0),a=[u];n=a.pop();)if(r=n._.children)for(n.children=new Array(o=r.length),i=o-1;i>=0;--i)a.push(e=n.children[i]=new Do(r[i],i)),e.parent=n;return(u.parent=new Do(null,0)).children=[u],u}function Fo(t,n,e,r,i,o){for(var u,a,c,s,f,l,h,p,d,v,_,y=[],g=n.children,m=0,x=0,b=g.length,w=n.value;mh&&(h=a),_=f*f*v,(p=Math.max(h/_,_/l))>d){f-=a;break}d=p}y.push(u={value:f,dice:c1&&u_(t[e[r-2]],t[e[r-1]],t[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function Bo(t){this._size=t,this._call=this._error=null,this._tasks=[],this._data=[],this._waiting=this._active=this._ended=this._start=0}function jo(t){if(!t._start)try{Ho(t)}catch(n){if(t._tasks[t._ended+t._active-1])$o(t,n);else if(!t._data)throw n}}function Ho(t){for(;t._start=t._waiting&&t._active=0;)if((e=t._tasks[r])&&(t._tasks[r]=null,e.abort))try{e.abort()}catch(n){}t._active=NaN,Vo(t)}function Vo(t){if(!t._active&&t._call){var n=t._data;t._data=void 0,t._call(t._error,n)}}function Wo(t){if(null==t)t=1/0;else if(!((t=+t)>=1))throw new Error("invalid concurrency");return new Bo(t)}function Zo(t){return function(n,e){t(null==n?e:null)}}function Go(t){var n=t.responseType;return n&&"text"!==n?t.response:t.responseText}function Jo(t,n){return function(e){return t(e.responseText,n)}}function Qo(t){function n(n){var o=n+"",u=e.get(o);if(!u){if(i!==E_)return i;e.set(o,u=r.push(n))}return t[(u-1)%t.length]}var e=we(),r=[],i=E_;return t=null==t?[]:S_.call(t),n.domain=function(t){if(!arguments.length)return r.slice();r=[],e=we();for(var i,o,u=-1,a=t.length;++u=e?1:r(t)}}}function ru(t){return function(n,e){var r=t(n=+n,e=+e);return function(t){return t<=0?n:t>=1?e:r(t)}}}function iu(t,n,e,r){var i=t[0],o=t[1],u=n[0],a=n[1];return o2?ou:iu,o=u=null,r}function r(n){return(o||(o=i(a,c,f?eu(t):t,s)))(+n)}var i,o,u,a=z_,c=z_,s=cl,f=!1;return r.invert=function(t){return(u||(u=i(c,a,nu,f?ru(n):n)))(+t)},r.domain=function(t){return arguments.length?(a=N_.call(t,C_),e()):a.slice()},r.range=function(t){return arguments.length?(c=S_.call(t),e()):c.slice()},r.rangeRound=function(t){return c=S_.call(t),s=sl,e()},r.clamp=function(t){return arguments.length?(f=!!t,e()):f},r.interpolate=function(t){return arguments.length?(s=t,e()):s},e()}function cu(t){var n=t.domain;return t.ticks=function(t){var e=n();return Ss(e[0],e[e.length-1],null==t?10:t)},t.tickFormat=function(t,e){return P_(n(),t,e)},t.nice=function(e){null==e&&(e=10);var i,o=n(),u=0,a=o.length-1,c=o[u],s=o[a];return s0?i=r(c=Math.floor(c/i)*i,s=Math.ceil(s/i)*i,e):i<0&&(i=r(c=Math.ceil(c*i)/i,s=Math.floor(s*i)/i,e)),i>0?(o[u]=Math.floor(c/i)*i,o[a]=Math.ceil(s/i)*i,n(o)):i<0&&(o[u]=Math.ceil(c*i)/i,o[a]=Math.floor(s*i)/i,n(o)),t},t}function su(){var t=au(nu,rl);return t.copy=function(){return uu(t,su())},cu(t)}function fu(){function t(t){return+t}var n=[0,1];return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=N_.call(e,C_),t):n.slice()},t.copy=function(){return fu().domain(n)},cu(t)}function lu(t,n){return(n=Math.log(n/t))?function(e){return Math.log(e/t)/n}:A_(n)}function hu(t,n){return t<0?function(e){return-Math.pow(-n,e)*Math.pow(-t,1-e)}:function(e){return Math.pow(n,e)*Math.pow(t,1-e)}}function pu(t){return isFinite(t)?+("1e"+t):t<0?0:t}function du(t){return 10===t?pu:t===Math.E?Math.exp:function(n){return Math.pow(t,n)}}function vu(t){return t===Math.E?Math.log:10===t&&Math.log10||2===t&&Math.log2||(t=Math.log(t),function(n){return Math.log(n)/t})}function _u(t){return function(n){return-t(-n)}}function yu(){function n(){return o=vu(i),u=du(i),r()[0]<0&&(o=_u(o),u=_u(u)),e}var e=au(lu,hu).domain([1,10]),r=e.domain,i=10,o=vu(10),u=du(10);return e.base=function(t){return arguments.length?(i=+t,n()):i},e.domain=function(t){return arguments.length?(r(t),n()):r()},e.ticks=function(t){var n,e=r(),a=e[0],c=e[e.length-1];(n=c0){for(;hc)break;v.push(l)}}else for(;h=1;--f)if(!((l=s*f)c)break;v.push(l)}}else v=Ss(h,p,Math.min(p-h,d)).map(u);return n?v.reverse():v},e.tickFormat=function(n,r){if(null==r&&(r=10===i?".0e":","),"function"!=typeof r&&(r=t.format(r)),n===1/0)return r;null==n&&(n=10);var a=Math.max(1,i*n/e.ticks().length);return function(t){var n=t/u(Math.round(o(t)));return n*i0?i[n-1]:e[0],n=i?[o[i-1],r]:[o[n-1],o[n]]},t.copy=function(){return bu().domain([e,r]).range(u)},cu(t)}function wu(){function t(t){if(t<=t)return e[hs(n,t,0,r)]}var n=[.5],e=[0,1],r=1;return t.domain=function(i){return arguments.length?(n=S_.call(i),r=Math.min(n.length,e.length-1),t):n.slice()},t.range=function(i){return arguments.length?(e=S_.call(i),r=Math.min(n.length,e.length-1),t):e.slice()},t.invertExtent=function(t){var r=e.indexOf(t);return[n[r-1],n[r]]},t.copy=function(){return wu().domain(n).range(e)},t}function Mu(t,n,e,r){function i(n){return t(n=new Date(+n)),n}return i.floor=i,i.ceil=function(e){return t(e=new Date(e-1)),n(e,1),t(e),e},i.round=function(t){var n=i(t),e=i.ceil(t);return t-n0))return u;do{u.push(new Date(+e))}while(n(e,o),t(e),e=n)for(;t(n),!e(n);)n.setTime(n-1)},function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;n(t,-1),!e(t););else for(;--r>=0;)for(;n(t,1),!e(t););})},e&&(i.count=function(n,r){return L_.setTime(+n),q_.setTime(+r),t(L_),t(q_),Math.floor(e(L_,q_))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(n){return r(n)%t==0}:function(n){return i.count(0,n)%t==0}):i:null}),i}function Tu(t){return Mu(function(n){n.setDate(n.getDate()-(n.getDay()+7-t)%7),n.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+7*n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*O_)/F_})}function ku(t){return Mu(function(n){n.setUTCDate(n.getUTCDate()-(n.getUTCDay()+7-t)%7),n.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+7*n)},function(t,n){return(n-t)/F_})}function Nu(t){if(0<=t.y&&t.y<100){var n=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return n.setFullYear(t.y),n}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function Su(t){if(0<=t.y&&t.y<100){var n=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return n.setUTCFullYear(t.y),n}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function Eu(t){return{y:t,m:0,d:1,H:0,M:0,S:0,L:0}}function Au(t){function n(t,n){return function(e){var r,i,o,u=[],a=-1,c=0,s=t.length;for(e instanceof Date||(e=new Date(+e));++a=c)return-1;if(37===(i=n.charCodeAt(u++))){if(i=n.charAt(u++),!(o=T[i in Dy?n.charAt(u++):i])||(r=o(t,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}var i=t.dateTime,o=t.date,u=t.time,a=t.periods,c=t.days,s=t.shortDays,f=t.months,l=t.shortMonths,h=Pu(a),p=Ru(a),d=Pu(c),v=Ru(c),_=Pu(s),y=Ru(s),g=Pu(f),m=Ru(f),x=Pu(l),b=Ru(l),w={a:function(t){return s[t.getDay()]},A:function(t){return c[t.getDay()]},b:function(t){return l[t.getMonth()]},B:function(t){return f[t.getMonth()]},c:null,d:Wu,e:Wu,H:Zu,I:Gu,j:Ju,L:Qu,m:Ku,M:ta,p:function(t){return a[+(t.getHours()>=12)]},S:na,U:ea,w:ra,W:ia,x:null,X:null,y:oa,Y:ua,Z:aa,"%":wa},M={a:function(t){return s[t.getUTCDay()]},A:function(t){return c[t.getUTCDay()]},b:function(t){return l[t.getUTCMonth()]},B:function(t){return f[t.getUTCMonth()]},c:null,d:ca,e:ca,H:sa,I:fa,j:la,L:ha,m:pa,M:da,p:function(t){return a[+(t.getUTCHours()>=12)]},S:va,U:_a,w:ya,W:ga,x:null,X:null,y:ma,Y:xa,Z:ba,"%":wa},T={a:function(t,n,e){var r=_.exec(n.slice(e));return r?(t.w=y[r[0].toLowerCase()],e+r[0].length):-1},A:function(t,n,e){var r=d.exec(n.slice(e));return r?(t.w=v[r[0].toLowerCase()],e+r[0].length):-1},b:function(t,n,e){var r=x.exec(n.slice(e));return r?(t.m=b[r[0].toLowerCase()],e+r[0].length):-1},B:function(t,n,e){var r=g.exec(n.slice(e));return r?(t.m=m[r[0].toLowerCase()],e+r[0].length):-1},c:function(t,n,e){return r(t,i,n,e)},d:Yu,e:Yu,H:ju,I:ju,j:Bu,L:$u,m:Iu,M:Hu,p:function(t,n,e){var r=h.exec(n.slice(e));return r?(t.p=p[r[0].toLowerCase()],e+r[0].length):-1},S:Xu,U:qu,w:Lu,W:Uu,x:function(t,n,e){return r(t,o,n,e)},X:function(t,n,e){return r(t,u,n,e)},y:Ou,Y:Du,Z:Fu,"%":Vu};return w.x=n(o,w),w.X=n(u,w),w.c=n(i,w),M.x=n(o,M),M.X=n(u,M),M.c=n(i,M),{format:function(t){var e=n(t+="",w);return e.toString=function(){return t},e},parse:function(t){var n=e(t+="",Nu);return n.toString=function(){return t},n},utcFormat:function(t){var e=n(t+="",M);return e.toString=function(){return t},e},utcParse:function(t){var n=e(t,Su);return n.toString=function(){return t},n}}}function Cu(t,n,e){var r=t<0?"-":"",i=(r?-t:t)+"",o=i.length;return r+(o68?1900:2e3),e+r[0].length):-1}function Fu(t,n,e){var r=/^(Z)|([+-]\d\d)(?:\:?(\d\d))?/.exec(n.slice(e,e+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),e+r[0].length):-1}function Iu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.m=r[0]-1,e+r[0].length):-1}function Yu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.d=+r[0],e+r[0].length):-1}function Bu(t,n,e){var r=Oy.exec(n.slice(e,e+3));return r?(t.m=0,t.d=+r[0],e+r[0].length):-1}function ju(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.H=+r[0],e+r[0].length):-1}function Hu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.M=+r[0],e+r[0].length):-1}function Xu(t,n,e){var r=Oy.exec(n.slice(e,e+2));return r?(t.S=+r[0],e+r[0].length):-1}function $u(t,n,e){var r=Oy.exec(n.slice(e,e+3));return r?(t.L=+r[0],e+r[0].length):-1}function Vu(t,n,e){var r=Fy.exec(n.slice(e,e+1));return r?e+r[0].length:-1}function Wu(t,n){return Cu(t.getDate(),n,2)}function Zu(t,n){return Cu(t.getHours(),n,2)}function Gu(t,n){return Cu(t.getHours()%12||12,n,2)}function Ju(t,n){return Cu(1+$_.count(fy(t),t),n,3)}function Qu(t,n){return Cu(t.getMilliseconds(),n,3)}function Ku(t,n){return Cu(t.getMonth()+1,n,2)}function ta(t,n){return Cu(t.getMinutes(),n,2)}function na(t,n){return Cu(t.getSeconds(),n,2)}function ea(t,n){return Cu(W_.count(fy(t),t),n,2)}function ra(t){return t.getDay()}function ia(t,n){return Cu(Z_.count(fy(t),t),n,2)}function oa(t,n){return Cu(t.getFullYear()%100,n,2)}function ua(t,n){return Cu(t.getFullYear()%1e4,n,4)}function aa(t){var n=t.getTimezoneOffset();return(n>0?"-":(n*=-1,"+"))+Cu(n/60|0,"0",2)+Cu(n%60,"0",2)}function ca(t,n){return Cu(t.getUTCDate(),n,2)}function sa(t,n){return Cu(t.getUTCHours(),n,2)}function fa(t,n){return Cu(t.getUTCHours()%12||12,n,2)}function la(t,n){return Cu(1+_y.count(Ly(t),t),n,3)}function ha(t,n){return Cu(t.getUTCMilliseconds(),n,3)}function pa(t,n){return Cu(t.getUTCMonth()+1,n,2)}function da(t,n){return Cu(t.getUTCMinutes(),n,2)}function va(t,n){return Cu(t.getUTCSeconds(),n,2)}function _a(t,n){return Cu(gy.count(Ly(t),t),n,2)}function ya(t){return t.getUTCDay()}function ga(t,n){return Cu(my.count(Ly(t),t),n,2)}function ma(t,n){return Cu(t.getUTCFullYear()%100,n,2)}function xa(t,n){return Cu(t.getUTCFullYear()%1e4,n,4)}function ba(){return"+0000"}function wa(){return"%"}function Ma(n){return qy=Au(n),t.timeFormat=qy.format,t.timeParse=qy.parse,t.utcFormat=qy.utcFormat,t.utcParse=qy.utcParse,qy}function Ta(t){return new Date(t)}function ka(t){return t instanceof Date?+t:+new Date(+t)}function Na(t,n,e,r,o,u,a,c,s){function f(i){return(a(i)1?0:t<-1?gg:Math.acos(t)}function Ca(t){return t>=1?mg:t<=-1?-mg:Math.asin(t)}function za(t){return t.innerRadius}function Pa(t){return t.outerRadius}function Ra(t){return t.startAngle}function La(t){return t.endAngle}function qa(t){return t&&t.padAngle}function Ua(t,n,e,r,i,o,u,a){var c=e-t,s=r-n,f=u-i,l=a-o,h=(f*(n-o)-l*(t-i))/(l*c-f*s);return[t+h*c,n+h*s]}function Da(t,n,e,r,i,o,u){var a=t-e,c=n-r,s=(u?o:-o)/_g(a*a+c*c),f=s*c,l=-s*a,h=t+f,p=n+l,d=e+f,v=r+l,_=(h+d)/2,y=(p+v)/2,g=d-h,m=v-p,x=g*g+m*m,b=i-o,w=h*v-d*p,M=(m<0?-1:1)*_g(pg(0,b*b*x-w*w)),T=(w*m-g*M)/x,k=(-w*g-m*M)/x,N=(w*m+g*M)/x,S=(-w*g+m*M)/x,E=T-_,A=k-y,C=N-_,z=S-y;return E*E+A*A>C*C+z*z&&(T=N,k=S),{cx:T,cy:k,x01:-f,y01:-l,x11:T*(i/b-1),y11:k*(i/b-1)}}function Oa(t){this._context=t}function Fa(t){return t[0]}function Ia(t){return t[1]}function Ya(t){this._curve=t}function Ba(t){function n(n){return new Ya(t(n))}return n._curve=t,n}function ja(t){var n=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(t){return arguments.length?n(Ba(t)):n()._curve},t}function Ha(t){return t.source}function Xa(t){return t.target}function $a(t){function n(){var n,a=Cg.call(arguments),c=e.apply(this,a),s=r.apply(this,a);if(u||(u=n=ve()),t(u,+i.apply(this,(a[0]=c,a)),+o.apply(this,a),+i.apply(this,(a[0]=s,a)),+o.apply(this,a)),n)return u=null,n+""||null}var e=Ha,r=Xa,i=Fa,o=Ia,u=null;return n.source=function(t){return arguments.length?(e=t,n):e},n.target=function(t){return arguments.length?(r=t,n):r},n.x=function(t){return arguments.length?(i="function"==typeof t?t:sg(+t),n):i},n.y=function(t){return arguments.length?(o="function"==typeof t?t:sg(+t),n):o},n.context=function(t){return arguments.length?(u=null==t?null:t,n):u},n}function Va(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n=(n+r)/2,e,n,i,r,i)}function Wa(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n,e=(e+i)/2,r,e,r,i)}function Za(t,n,e,r,i){var o=Ag(n,e),u=Ag(n,e=(e+i)/2),a=Ag(r,e),c=Ag(r,i);t.moveTo(o[0],o[1]),t.bezierCurveTo(u[0],u[1],a[0],a[1],c[0],c[1])}function Ga(t,n,e){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+n)/6,(t._y0+4*t._y1+e)/6)}function Ja(t){this._context=t}function Qa(t){this._context=t}function Ka(t){this._context=t}function tc(t,n){this._basis=new Ja(t),this._beta=n}function nc(t,n,e){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-n),t._y2+t._k*(t._y1-e),t._x2,t._y2)}function ec(t,n){this._context=t,this._k=(1-n)/6}function rc(t,n){this._context=t,this._k=(1-n)/6}function ic(t,n){this._context=t,this._k=(1-n)/6}function oc(t,n,e){var r=t._x1,i=t._y1,o=t._x2,u=t._y2;if(t._l01_a>yg){var a=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,c=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*a-t._x0*t._l12_2a+t._x2*t._l01_2a)/c,i=(i*a-t._y0*t._l12_2a+t._y2*t._l01_2a)/c}if(t._l23_a>yg){var s=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,f=3*t._l23_a*(t._l23_a+t._l12_a);o=(o*s+t._x1*t._l23_2a-n*t._l12_2a)/f,u=(u*s+t._y1*t._l23_2a-e*t._l12_2a)/f}t._context.bezierCurveTo(r,i,o,u,t._x2,t._y2)}function uc(t,n){this._context=t,this._alpha=n}function ac(t,n){this._context=t,this._alpha=n}function cc(t,n){this._context=t,this._alpha=n}function sc(t){this._context=t}function fc(t){return t<0?-1:1}function lc(t,n,e){var r=t._x1-t._x0,i=n-t._x1,o=(t._y1-t._y0)/(r||i<0&&-0),u=(e-t._y1)/(i||r<0&&-0),a=(o*i+u*r)/(r+i);return(fc(o)+fc(u))*Math.min(Math.abs(o),Math.abs(u),.5*Math.abs(a))||0}function hc(t,n){var e=t._x1-t._x0;return e?(3*(t._y1-t._y0)/e-n)/2:n}function pc(t,n,e){var r=t._x0,i=t._y0,o=t._x1,u=t._y1,a=(o-r)/3;t._context.bezierCurveTo(r+a,i+a*n,o-a,u-a*e,o,u)}function dc(t){this._context=t}function vc(t){this._context=new _c(t)}function _c(t){this._context=t}function yc(t){this._context=t}function gc(t){var n,e,r=t.length-1,i=new Array(r),o=new Array(r),u=new Array(r);for(i[0]=0,o[0]=2,u[0]=t[0]+2*t[1],n=1;n=0;--n)i[n]=(u[n]-i[n+1])/o[n];for(o[r-1]=(t[r]+i[r-1])/2,n=0;n0)){if(o/=h,h<0){if(o0){if(o>l)return;o>f&&(f=o)}if(o=r-c,h||!(o<0)){if(o/=h,h<0){if(o>l)return;o>f&&(f=o)}else if(h>0){if(o0)){if(o/=p,p<0){if(o0){if(o>l)return;o>f&&(f=o)}if(o=i-s,p||!(o<0)){if(o/=p,p<0){if(o>l)return;o>f&&(f=o)}else if(p>0){if(o0||l<1)||(f>0&&(t[0]=[c+f*h,s+f*p]),l<1&&(t[1]=[c+l*h,s+l*p]),!0)}}}}}function Rc(t,n,e,r,i){var o=t[1];if(o)return!0;var u,a,c=t[0],s=t.left,f=t.right,l=s[0],h=s[1],p=f[0],d=f[1],v=(l+p)/2,_=(h+d)/2;if(d===h){if(v=r)return;if(l>p){if(c){if(c[1]>=i)return}else c=[v,e];o=[v,i]}else{if(c){if(c[1]1)if(l>p){if(c){if(c[1]>=i)return}else c=[(e-a)/u,e];o=[(i-a)/u,i]}else{if(c){if(c[1]=r)return}else c=[n,u*n+a];o=[r,u*r+a]}else{if(c){if(c[0]dm||Math.abs(i[0][1]-i[1][1])>dm)||delete lm[o]}function qc(t){return sm[t.index]={site:t,halfedges:[]}}function Uc(t,n){var e=t.site,r=n.left,i=n.right;return e===i&&(i=r,r=e),i?Math.atan2(i[1]-r[1],i[0]-r[0]):(e===r?(r=n[1],i=n[0]):(r=n[0],i=n[1]),Math.atan2(r[0]-i[0],i[1]-r[1]))}function Dc(t,n){return n[+(n.left!==t.site)]}function Oc(t,n){return n[+(n.left===t.site)]}function Fc(){for(var t,n,e,r,i=0,o=sm.length;idm||Math.abs(v-h)>dm)&&(c.splice(a,0,lm.push(Cc(u,p,Math.abs(d-t)dm?[t,Math.abs(l-t)dm?[Math.abs(h-r)dm?[e,Math.abs(l-e)dm?[Math.abs(h-n)=-vm)){var p=c*c+s*s,d=f*f+l*l,v=(l*p-s*d)/h,_=(c*d-f*p)/h,y=hm.pop()||new Yc;y.arc=t,y.site=i,y.x=v+u,y.y=(y.cy=_+a)+Math.sqrt(v*v+_*_),t.circle=y;for(var g=null,m=fm._;m;)if(y.ydm)a=a.L;else{if(!((i=o-Gc(a,u))>dm)){r>-dm?(n=a.P,e=a):i>-dm?(n=a,e=a.N):n=e=a;break}if(!a.R){n=a;break}a=a.R}qc(t);var c=Xc(t);if(cm.insert(n,c),n||e){if(n===e)return jc(n),e=Xc(n.site),cm.insert(c,e),c.edge=e.edge=Ac(n.site,c.site),Bc(n),void Bc(e);if(e){jc(n),jc(e);var s=n.site,f=s[0],l=s[1],h=t[0]-f,p=t[1]-l,d=e.site,v=d[0]-f,_=d[1]-l,y=2*(h*_-p*v),g=h*h+p*p,m=v*v+_*_,x=[(_*g-p*m)/y+f,(h*m-v*g)/y+l];zc(e.edge,s,d,x),c.edge=Ac(s,t,null,x),e.edge=Ac(t,d,null,x),Bc(n),Bc(e)}else c.edge=Ac(n.site,c.site)}}function Zc(t,n){var e=t.site,r=e[0],i=e[1],o=i-n;if(!o)return r;var u=t.P;if(!u)return-1/0;var a=(e=u.site)[0],c=e[1],s=c-n;if(!s)return a;var f=a-r,l=1/o-1/s,h=f/s;return l?(-h+Math.sqrt(h*h-2*l*(f*f/(-2*s)-c+s/2+i-o/2)))/l+r:(r+a)/2}function Gc(t,n){var e=t.N;if(e)return Zc(e,n);var r=t.site;return r[1]===n?r[0]:1/0}function Jc(t,n,e){return(t[0]-e[0])*(n[1]-t[1])-(t[0]-n[0])*(e[1]-t[1])}function Qc(t,n){return n[1]-t[1]||n[0]-t[0]}function Kc(t,n){var e,r,i,o=t.sort(Qc).pop();for(lm=[],sm=new Array(t.length),cm=new Tc,fm=new Tc;;)if(i=am,o&&(!i||o[1]n?1:t>=n?0:NaN},fs=function(t){return 1===t.length&&(t=n(t)),{left:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)<0?r=o+1:i=o}return r},right:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)>0?i=o:r=o+1}return r}}},ls=fs(ss),hs=ls.right,ps=ls.left,ds=function(t){return null===t?NaN:+t},vs=function(t,n){var e,r,i=t.length,o=0,u=-1,a=0,c=0;if(null==n)for(;++u1)return c/(o-1)},_s=function(t,n){var e=vs(t,n);return e?Math.sqrt(e):e},ys=function(t,n){var e,r,i,o=t.length,u=-1;if(null==n){for(;++u=e)for(r=i=e;++ue&&(r=e),i=e)for(r=i=e;++ue&&(r=e),i0)for(t=Math.ceil(t/u),n=Math.floor(n/u),o=new Array(i=Math.ceil(n-t+1));++c=1)return+e(t[r-1],r-1,t);var r,i=(r-1)*n,o=Math.floor(i),u=+e(t[o],o,t);return u+(+e(t[o+1],o+1,t)-u)*(i-o)}},Cs=function(t){for(var n,e,r,i=t.length,o=-1,u=0;++o=0;)for(n=(r=t[i]).length;--n>=0;)e[--u]=r[n];return e},zs=function(t,n){var e,r,i=t.length,o=-1;if(null==n){for(;++o=e)for(r=e;++oe&&(r=e)}else for(;++o=e)for(r=e;++oe&&(r=e);return r},Ps=function(t){if(!(i=t.length))return[];for(var n=-1,e=zs(t,o),r=new Array(e);++n0)for(var e,r,i=new Array(e),o=0;o=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),Bs.hasOwnProperty(n)?{space:Bs[n],local:t}:t},Hs=function(t){var n=js(t);return(n.local?g:y)(n)},Xs=0;x.prototype=m.prototype={constructor:x,get:function(t){for(var n=this._;!(n in t);)if(!(t=t.parentNode))return;return t[n]},set:function(t,n){return t[this._]=n},remove:function(t){return this._ in t&&delete t[this._]},toString:function(){return this._}};var $s=function(t){return function(){return this.matches(t)}};if("undefined"!=typeof document){var Vs=document.documentElement;if(!Vs.matches){var Ws=Vs.webkitMatchesSelector||Vs.msMatchesSelector||Vs.mozMatchesSelector||Vs.oMatchesSelector;$s=function(t){return function(){return Ws.call(this,t)}}}}var Zs=$s,Gs={};t.event=null,"undefined"!=typeof document&&("onmouseenter"in document.documentElement||(Gs={mouseenter:"mouseover",mouseleave:"mouseout"}));var Js=function(){for(var n,e=t.event;n=e.sourceEvent;)e=n;return e},Qs=function(t,n){var e=t.ownerSVGElement||t;if(e.createSVGPoint){var r=e.createSVGPoint();return r.x=n.clientX,r.y=n.clientY,r=r.matrixTransform(t.getScreenCTM().inverse()),[r.x,r.y]}var i=t.getBoundingClientRect();return[n.clientX-i.left-t.clientLeft,n.clientY-i.top-t.clientTop]},Ks=function(t){var n=Js();return n.changedTouches&&(n=n.changedTouches[0]),Qs(t,n)},tf=function(t){return null==t?S:function(){return this.querySelector(t)}},nf=function(t){return null==t?E:function(){return this.querySelectorAll(t)}},ef=function(t){return new Array(t.length)};A.prototype={constructor:A,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,n){return this._parent.insertBefore(t,n)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var rf=function(t){return function(){return t}},of="$",uf=function(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView};W.prototype={add:function(t){this._names.indexOf(t)<0&&(this._names.push(t),this._node.setAttribute("class",this._names.join(" ")))},remove:function(t){var n=this._names.indexOf(t);n>=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var af=[null];pt.prototype=dt.prototype={constructor:pt,select:function(t){"function"!=typeof t&&(t=tf(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i=x&&(x=m+1);!(g=_[x])&&++x=0;)(r=i[o])&&(u&&u!==r.nextSibling&&u.parentNode.insertBefore(r,u),u=r);return this},sort:function(t){t||(t=P);for(var n=this._groups,e=n.length,r=new Array(e),i=0;i1?this.each((null==n?F:"function"==typeof n?Y:I)(t,n,null==e?"":e)):B(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?j:"function"==typeof n?X:H)(t,n)):this.node()[t]},classed:function(t,n){var e=$(t+"");if(arguments.length<2){for(var r=V(this.node()),i=-1,o=e.length;++i=240?t-240:t+120,i,r),Lt(t,i,r),Lt(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1}}));var Nf=Math.PI/180,Sf=180/Math.PI,Ef=.95047,Af=1,Cf=1.08883,zf=4/29,Pf=6/29,Rf=3*Pf*Pf,Lf=Pf*Pf*Pf;pf(Dt,Ut,wt(Mt,{brighter:function(t){return new Dt(this.l+18*(null==t?1:t),this.a,this.b,this.opacity)},darker:function(t){return new Dt(this.l-18*(null==t?1:t),this.a,this.b,this.opacity)},rgb:function(){var t=(this.l+16)/116,n=isNaN(this.a)?t:t+this.a/500,e=isNaN(this.b)?t:t-this.b/200;return t=Af*Ft(t),n=Ef*Ft(n),e=Cf*Ft(e),new At(It(3.2404542*n-1.5371385*t-.4985314*e),It(-.969266*n+1.8760108*t+.041556*e),It(.0556434*n-.2040259*t+1.0572252*e),this.opacity)}})),pf(Ht,jt,wt(Mt,{brighter:function(t){return new Ht(this.h,this.c,this.l+18*(null==t?1:t),this.opacity)},darker:function(t){return new Ht(this.h,this.c,this.l-18*(null==t?1:t),this.opacity)},rgb:function(){return qt(this).rgb()}}));var qf=-.14861,Uf=1.78277,Df=-.29227,Of=-.90649,Ff=1.97294,If=Ff*Of,Yf=Ff*Uf,Bf=Uf*Df-Of*qf;pf(Vt,$t,wt(Mt,{brighter:function(t){return t=null==t?1/.7:Math.pow(1/.7,t),new Vt(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?.7:Math.pow(.7,t),new Vt(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=isNaN(this.h)?0:(this.h+120)*Nf,n=+this.l,e=isNaN(this.s)?0:this.s*n*(1-n),r=Math.cos(t),i=Math.sin(t);return new At(255*(n+e*(qf*r+Uf*i)),255*(n+e*(Df*r+Of*i)),255*(n+e*(Ff*r)),this.opacity)}}));var jf,Hf,Xf,$f,Vf,Wf,Zf=function(t){var n=t.length-1;return function(e){var r=e<=0?e=0:e>=1?(e=1,n-1):Math.floor(e*n),i=t[r],o=t[r+1],u=r>0?t[r-1]:2*i-o,a=ro&&(i=n.slice(o,i),a[u]?a[u]+=i:a[++u]=i),(e=e[0])===(r=r[0])?a[u]?a[u]+=r:a[++u]=r:(a[++u]=null,c.push({i:u,x:rl(e,r)})),o=ul.lastIndex;return oDl&&e.state1e-6)if(Math.abs(f*a-c*s)>1e-6&&i){var h=e-o,p=r-u,d=a*a+c*c,v=h*h+p*p,_=Math.sqrt(d),y=Math.sqrt(l),g=i*Math.tan((Yh-Math.acos((d+l-v)/(2*_*y)))/2),m=g/y,x=g/_;Math.abs(m-1)>1e-6&&(this._+="L"+(t+m*s)+","+(n+m*f)),this._+="A"+i+","+i+",0,0,"+ +(f*h>s*p)+","+(this._x1=t+x*a)+","+(this._y1=n+x*c)}else this._+="L"+(this._x1=t)+","+(this._y1=n);else;},arc:function(t,n,e,r,i,o){t=+t,n=+n;var u=(e=+e)*Math.cos(r),a=e*Math.sin(r),c=t+u,s=n+a,f=1^o,l=o?r-i:i-r;if(e<0)throw new Error("negative radius: "+e);null===this._x1?this._+="M"+c+","+s:(Math.abs(this._x1-c)>1e-6||Math.abs(this._y1-s)>1e-6)&&(this._+="L"+c+","+s),e&&(l<0&&(l=l%Bh+Bh),l>jh?this._+="A"+e+","+e+",0,1,"+f+","+(t-u)+","+(n-a)+"A"+e+","+e+",0,1,"+f+","+(this._x1=c)+","+(this._y1=s):l>1e-6&&(this._+="A"+e+","+e+",0,"+ +(l>=Yh)+","+f+","+(this._x1=t+e*Math.cos(i))+","+(this._y1=n+e*Math.sin(i))))},rect:function(t,n,e,r){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+n)+"h"+ +e+"v"+ +r+"h"+-e+"Z"},toString:function(){return this._}};be.prototype=we.prototype={constructor:be,has:function(t){return"$"+t in this},get:function(t){return this["$"+t]},set:function(t,n){return this["$"+t]=n,this},remove:function(t){var n="$"+t;return n in this&&delete this[n]},clear:function(){for(var t in this)"$"===t[0]&&delete this[t]},keys:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(n.slice(1));return t},values:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(this[n]);return t},entries:function(){var t=[];for(var n in this)"$"===n[0]&&t.push({key:n.slice(1),value:this[n]});return t},size:function(){var t=0;for(var n in this)"$"===n[0]&&++t;return t},empty:function(){for(var t in this)if("$"===t[0])return!1;return!0},each:function(t){for(var n in this)"$"===n[0]&&t(this[n],n.slice(1),this)}};var Hh=we.prototype;Se.prototype=Ee.prototype={constructor:Se,has:Hh.has,add:function(t){return t+="",this["$"+t]=t,this},remove:Hh.remove,clear:Hh.clear,values:Hh.keys,size:Hh.size,empty:Hh.empty,each:Hh.each};var Xh={},$h={},Vh=34,Wh=10,Zh=13,Gh=function(t){function n(t,n){function e(){if(s)return $h;if(f)return f=!1,Xh;var n,e,r=a;if(t.charCodeAt(r)===Vh){for(;a++=u?s=!0:(e=t.charCodeAt(a++))===Wh?f=!0:e===Zh&&(f=!0,t.charCodeAt(a)===Wh&&++a),t.slice(r+1,n-1).replace(/""/g,'"')}for(;af&&(f=r),il&&(l=i));for(ft||t>i||r>n||n>o))return this;var u,a,c=i-e,s=this._root;switch(a=(n<(r+o)/2)<<1|t<(e+i)/2){case 0:do{u=new Array(4),u[a]=s,s=u}while(c*=2,i=e+c,o=r+c,t>i||n>o);break;case 1:do{u=new Array(4),u[a]=s,s=u}while(c*=2,e=i-c,o=r+c,e>t||n>o);break;case 2:do{u=new Array(4),u[a]=s,s=u}while(c*=2,i=e+c,r=o-c,t>i||r>n);break;case 3:do{u=new Array(4),u[a]=s,s=u}while(c*=2,e=i-c,r=o-c,e>t||r>n)}this._root&&this._root.length&&(this._root=s)}return this._x0=e,this._y0=r,this._x1=i,this._y1=o,this},fp.data=function(){var t=[];return this.visit(function(n){if(!n.length)do{t.push(n.data)}while(n=n.next)}),t},fp.extent=function(t){return arguments.length?this.cover(+t[0][0],+t[0][1]).cover(+t[1][0],+t[1][1]):isNaN(this._x0)?void 0:[[this._x0,this._y0],[this._x1,this._y1]]},fp.find=function(t,n,e){var r,i,o,u,a,c,s,f=this._x0,l=this._y0,h=this._x1,p=this._y1,d=[],v=this._root;for(v&&d.push(new sp(v,f,l,h,p)),null==e?e=1/0:(f=t-e,l=n-e,h=t+e,p=n+e,e*=e);c=d.pop();)if(!(!(v=c.node)||(i=c.x0)>h||(o=c.y0)>p||(u=c.x1)=y)<<1|t>=_)&&(c=d[d.length-1],d[d.length-1]=d[d.length-1-s],d[d.length-1-s]=c)}else{var g=t-+this._x.call(null,v.data),m=n-+this._y.call(null,v.data),x=g*g+m*m;if(x=(a=(d+_)/2))?d=a:_=a,(f=u>=(c=(v+y)/2))?v=c:y=c,n=p,!(p=p[l=f<<1|s]))return this;if(!p.length)break;(n[l+1&3]||n[l+2&3]||n[l+3&3])&&(e=n,h=l)}for(;p.data!==t;)if(r=p,!(p=p.next))return this;return(i=p.next)&&delete p.next,r?(i?r.next=i:delete r.next,this):n?(i?n[l]=i:delete n[l],(p=n[0]||n[1]||n[2]||n[3])&&p===(n[3]||n[2]||n[1]||n[0])&&!p.length&&(e?e[h]=p:this._root=p),this):(this._root=i,this)},fp.removeAll=function(t){for(var n=0,e=t.length;n1?r[0]+r.slice(2):r,+t.slice(e+1)]},vp=function(t){return(t=dp(Math.abs(t)))?t[1]:NaN},_p=function(t,n){return function(e,r){for(var i=e.length,o=[],u=0,a=t[0],c=0;i>0&&a>0&&(c+a+1>r&&(a=Math.max(1,r-c)),o.push(e.substring(i-=a,i+a)),!((c+=a+1)>r));)a=t[u=(u+1)%t.length];return o.reverse().join(n)}},yp=function(t){return function(n){return n.replace(/[0-9]/g,function(n){return t[+n]})}},gp=function(t,n){var e=dp(t,n);if(!e)return t+"";var r=e[0],i=e[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")},mp={"":function(t,n){t:for(var e,r=(t=t.toPrecision(n)).length,i=1,o=-1;i0&&(o=0)}return o>0?t.slice(0,o)+t.slice(e+1):t},"%":function(t,n){return(100*t).toFixed(n)},b:function(t){return Math.round(t).toString(2)},c:function(t){return t+""},d:function(t){return Math.round(t).toString(10)},e:function(t,n){return t.toExponential(n)},f:function(t,n){return t.toFixed(n)},g:function(t,n){return t.toPrecision(n)},o:function(t){return Math.round(t).toString(8)},p:function(t,n){return gp(100*t,n)},r:gp,s:function(t,n){var e=dp(t,n);if(!e)return t+"";var r=e[0],i=e[1],o=i-(lp=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,u=r.length;return o===u?r:o>u?r+new Array(o-u+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+dp(t,Math.max(0,n+o-1))[0]},X:function(t){return Math.round(t).toString(16).toUpperCase()},x:function(t){return Math.round(t).toString(16)}},xp=/^(?:(.)?([<>=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?([a-z%])?$/i;He.prototype=Xe.prototype,Xe.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(null==this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(null==this.precision?"":"."+Math.max(0,0|this.precision))+this.type};var bp,wp=function(t){return t},Mp=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"],Tp=function(t){function n(t){function n(t){var n,r,u,f=_,x=y;if("c"===v)x=g(t)+x,t="";else{var b=(t=+t)<0;if(t=g(Math.abs(t),d),b&&0==+t&&(b=!1),f=(b?"("===s?s:"-":"-"===s||"("===s?"":s)+f,x=x+("s"===v?Mp[8+lp/3]:"")+(b&&"("===s?")":""),m)for(n=-1,r=t.length;++n(u=t.charCodeAt(n))||u>57){x=(46===u?i+t.slice(n+1):t.slice(n))+x,t=t.slice(0,n);break}}p&&!l&&(t=e(t,1/0));var w=f.length+t.length+x.length,M=w>1)+f+t+x+M.slice(w);break;default:t=M+f+t+x}return o(t)}var a=(t=He(t)).fill,c=t.align,s=t.sign,f=t.symbol,l=t.zero,h=t.width,p=t.comma,d=t.precision,v=t.type,_="$"===f?r[0]:"#"===f&&/[boxX]/.test(v)?"0"+v.toLowerCase():"",y="$"===f?r[1]:/[%p]/.test(v)?u:"",g=mp[v],m=!v||/[defgprs%]/.test(v);return d=null==d?v?6:12:/[gprs]/.test(v)?Math.max(1,Math.min(21,d)):Math.max(0,Math.min(20,d)),n.toString=function(){return t+""},n}var e=t.grouping&&t.thousands?_p(t.grouping,t.thousands):wp,r=t.currency,i=t.decimal,o=t.numerals?yp(t.numerals):wp,u=t.percent||"%";return{format:n,formatPrefix:function(t,e){var r=n((t=He(t),t.type="f",t)),i=3*Math.max(-8,Math.min(8,Math.floor(vp(e)/3))),o=Math.pow(10,-i),u=Mp[8+i/3];return function(t){return r(o*t)+u}}}};$e({decimal:".",thousands:",",grouping:[3],currency:["$",""]});var kp=function(t){return Math.max(0,-vp(Math.abs(t)))},Np=function(t,n){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(vp(n)/3)))-vp(Math.abs(t)))},Sp=function(t,n){return t=Math.abs(t),n=Math.abs(n)-t,Math.max(0,vp(n)-vp(t))+1},Ep=function(){return new Ve};Ve.prototype={constructor:Ve,reset:function(){this.s=this.t=0},add:function(t){We(ud,t,this.t),We(this,ud.s,this.s),this.s?this.t+=ud.t:this.s=ud.t},valueOf:function(){return this.s}};var Ap,Cp,zp,Pp,Rp,Lp,qp,Up,Dp,Op,Fp,Ip,Yp,Bp,jp,Hp,Xp,$p,Vp,Wp,Zp,Gp,Jp,Qp,Kp,td,nd,ed,rd,id,od,ud=new Ve,ad=1e-6,cd=Math.PI,sd=cd/2,fd=cd/4,ld=2*cd,hd=180/cd,pd=cd/180,dd=Math.abs,vd=Math.atan,_d=Math.atan2,yd=Math.cos,gd=Math.ceil,md=Math.exp,xd=Math.log,bd=Math.pow,wd=Math.sin,Md=Math.sign||function(t){return t>0?1:t<0?-1:0},Td=Math.sqrt,kd=Math.tan,Nd={Feature:function(t,n){Ke(t.geometry,n)},FeatureCollection:function(t,n){for(var e=t.features,r=-1,i=e.length;++rad?Dp=90:Pd<-ad&&(qp=-90),jp[0]=Lp,jp[1]=Up}},Ld={sphere:Qe,point:Mr,lineStart:kr,lineEnd:Er,polygonStart:function(){Ld.lineStart=Ar,Ld.lineEnd=Cr},polygonEnd:function(){Ld.lineStart=kr,Ld.lineEnd=Er}},qd=function(t){return function(){return t}},Ud=function(t,n){function e(e,r){return e=t(e,r),n(e[0],e[1])}return t.invert&&n.invert&&(e.invert=function(e,r){return(e=n.invert(e,r))&&t.invert(e[0],e[1])}),e};Rr.invert=Rr;var Dd,Od,Fd,Id,Yd,Bd,jd,Hd,Xd,$d,Vd,Wd=function(t){function n(n){return n=t(n[0]*pd,n[1]*pd),n[0]*=hd,n[1]*=hd,n}return t=Lr(t[0]*pd,t[1]*pd,t.length>2?t[2]*pd:0),n.invert=function(n){return n=t.invert(n[0]*pd,n[1]*pd),n[0]*=hd,n[1]*=hd,n},n},Zd=function(){var t,n=[];return{point:function(n,e){t.push([n,e])},lineStart:function(){n.push(t=[])},lineEnd:Qe,rejoin:function(){n.length>1&&n.push(n.pop().concat(n.shift()))},result:function(){var e=n;return n=[],t=null,e}}},Gd=function(t,n,e,r,i,o){var u,a=t[0],c=t[1],s=0,f=1,l=n[0]-a,h=n[1]-c;if(u=e-a,l||!(u>0)){if(u/=l,l<0){if(u0){if(u>f)return;u>s&&(s=u)}if(u=i-a,l||!(u<0)){if(u/=l,l<0){if(u>f)return;u>s&&(s=u)}else if(l>0){if(u0)){if(u/=h,h<0){if(u0){if(u>f)return;u>s&&(s=u)}if(u=o-c,h||!(u<0)){if(u/=h,h<0){if(u>f)return;u>s&&(s=u)}else if(h>0){if(u0&&(t[0]=a+s*l,t[1]=c+s*h),f<1&&(n[0]=a+f*l,n[1]=c+f*h),!0}}}}},Jd=function(t,n){return dd(t[0]-n[0])=0;--o)i.point((f=s[o])[0],f[1]);else r(h.x,h.p.x,-1,i);h=h.p}s=(h=h.o).z,p=!p}while(!h.v);i.lineEnd()}}},Kd=1e9,tv=-Kd,nv=Ep(),ev=function(t,n){var e=n[0],r=n[1],i=[wd(e),-yd(e),0],o=0,u=0;nv.reset();for(var a=0,c=t.length;a=0?1:-1,T=M*w,k=T>cd,N=d*x;if(nv.add(_d(N*M*wd(T),v*b+N*yd(T))),o+=k?w+M*ld:w,k^h>=e^g>=e){var S=sr(ar(l),ar(y));hr(S);var E=sr(i,S);hr(E);var A=(k^w>=0?-1:1)*Ge(E[2]);(r>A||r===A&&(S[0]||S[1]))&&(u+=k^w>=0?1:-1)}}return(o<-ad||oyv&&(yv=t),n<_v&&(_v=n),n>gv&&(gv=n)},lineStart:Qe,lineEnd:Qe,polygonStart:Qe,polygonEnd:Qe,result:function(){var t=[[vv,_v],[yv,gv]];return yv=gv=-(_v=vv=1/0),t}},xv=0,bv=0,wv=0,Mv=0,Tv=0,kv=0,Nv=0,Sv=0,Ev=0,Av={point:oi,lineStart:ui,lineEnd:si,polygonStart:function(){Av.lineStart=fi,Av.lineEnd=li},polygonEnd:function(){Av.point=oi,Av.lineStart=ui,Av.lineEnd=si},result:function(){var t=Ev?[Nv/Ev,Sv/Ev]:kv?[Mv/kv,Tv/kv]:wv?[xv/wv,bv/wv]:[NaN,NaN];return xv=bv=wv=Mv=Tv=kv=Nv=Sv=Ev=0,t}};di.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._context.moveTo(t,n),this._point=1;break;case 1:this._context.lineTo(t,n);break;default:this._context.moveTo(t+this._radius,n),this._context.arc(t,n,this._radius,0,ld)}},result:Qe};var Cv,zv,Pv,Rv,Lv,qv=Ep(),Uv={point:Qe,lineStart:function(){Uv.point=vi},lineEnd:function(){Cv&&_i(zv,Pv),Uv.point=Qe},polygonStart:function(){Cv=!0},polygonEnd:function(){Cv=null},result:function(){var t=+qv;return qv.reset(),t}};yi.prototype={_radius:4.5,_circle:gi(4.5),pointRadius:function(t){return(t=+t)!==this._radius&&(this._radius=t,this._circle=null),this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._string.push("Z"),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._string.push("M",t,",",n),this._point=1;break;case 1:this._string.push("L",t,",",n);break;default:null==this._circle&&(this._circle=gi(this._radius)),this._string.push("M",t,",",n,this._circle)}},result:function(){if(this._string.length){var t=this._string.join("");return this._string=[],t}return null}};var Dv=function(t,n,e,r){return function(i,o){function u(n,e){var r=i(n,e);t(n=r[0],e=r[1])&&o.point(n,e)}function a(t,n){var e=i(t,n);_.point(e[0],e[1])}function c(){b.point=a,_.lineStart()}function s(){b.point=u,_.lineEnd()}function f(t,n){v.push([t,n]);var e=i(t,n);m.point(e[0],e[1])}function l(){m.lineStart(),v=[]}function h(){f(v[0][0],v[0][1]),m.lineEnd();var t,n,e,r,i=m.clean(),u=g.result(),a=u.length;if(v.pop(),p.push(v),v=null,a)if(1&i){if(e=u[0],(n=e.length-1)>0){for(x||(o.polygonStart(),x=!0),o.lineStart(),t=0;t1&&2&i&&u.push(u.pop().concat(u.shift())),d.push(u.filter(mi))}var p,d,v,_=n(o),y=i.invert(r[0],r[1]),g=Zd(),m=n(g),x=!1,b={point:u,lineStart:c,lineEnd:s,polygonStart:function(){b.point=f,b.lineStart=l,b.lineEnd=h,d=[],p=[]},polygonEnd:function(){b.point=u,b.lineStart=c,b.lineEnd=s,d=Cs(d);var t=ev(p,y);d.length?(x||(o.polygonStart(),x=!0),Qd(d,xi,t,e,o)):t&&(x||(o.polygonStart(),x=!0),o.lineStart(),e(null,null,1,o),o.lineEnd()),x&&(o.polygonEnd(),x=!1),d=p=null},sphere:function(){o.polygonStart(),o.lineStart(),e(null,null,1,o),o.lineEnd(),o.polygonEnd()}};return b}},Ov=Dv(function(){return!0},function(t){var n,e=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),n=1},point:function(o,u){var a=o>0?cd:-cd,c=dd(o-e);dd(c-cd)0?sd:-sd),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(a,r),t.point(o,r),n=0):i!==a&&c>=cd&&(dd(e-i)ad){var o=t[0]o}function r(t,n,e){var r=[1,0,0],i=sr(ar(t),ar(n)),u=cr(i,i),a=i[0],c=u-a*a;if(!c)return!e&&t;var s=o*u/c,f=-o*a/c,l=sr(r,i),h=lr(r,s);fr(h,lr(i,f));var p=l,d=cr(h,p),v=cr(p,p),_=d*d-v*(cr(h,h)-1);if(!(_<0)){var y=Td(_),g=lr(p,(-d-y)/v);if(fr(g,h),g=ur(g),!e)return g;var m,x=t[0],b=n[0],w=t[1],M=n[1];b0^g[1]<(dd(g[0]-x)cd^(x<=g[0]&&g[0]<=b)){var S=lr(p,(-d+y)/v);return fr(S,h),[g,ur(S)]}}}function i(n,e){var r=u?t:cd-t,i=0;return n<-r?i|=1:n>r&&(i|=2),e<-r?i|=4:e>r&&(i|=8),i}var o=yd(t),u=o>0,a=dd(o)>ad;return Dv(e,function(t){var n,o,c,s,f;return{lineStart:function(){s=c=!1,f=1},point:function(l,h){var p,d=[l,h],v=e(l,h),_=u?v?0:i(l,h):v?i(l+(l<0?cd:-cd),h):0;if(!n&&(s=c=v)&&t.lineStart(),v!==c&&(!(p=r(n,d))||Jd(n,p)||Jd(d,p))&&(d[0]+=ad,d[1]+=ad,v=e(d[0],d[1])),v!==c)f=0,v?(t.lineStart(),p=r(d,n),t.point(p[0],p[1])):(p=r(n,d),t.point(p[0],p[1]),t.lineEnd()),n=p;else if(a&&n&&u^v){var y;_&o||!(y=r(d,n,!0))||(f=0,u?(t.lineStart(),t.point(y[0][0],y[0][1]),t.point(y[1][0],y[1][1]),t.lineEnd()):(t.point(y[1][0],y[1][1]),t.lineEnd(),t.lineStart(),t.point(y[0][0],y[0][1])))}!v||n&&Jd(n,d)||t.point(d[0],d[1]),n=d,c=v,o=_},lineEnd:function(){c&&t.lineEnd(),n=null},clean:function(){return f|(s&&c)<<1}}},function(e,r,i,o){Or(o,t,n,i,e,r)},u?[0,-t]:[-cd,t-cd])};Mi.prototype={constructor:Mi,point:function(t,n){this.stream.point(t,n)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};var Iv=16,Yv=yd(30*pd),Bv=function(t,n){return+n?Si(t,n):Ni(t)},jv=wi({point:function(t,n){this.stream.point(t*pd,n*pd)}}),Hv=function(){return Ci(Pi).scale(155.424).center([0,33.6442])},Xv=function(){return Hv().parallels([29.5,45.5]).scale(1070).translate([480,250]).rotate([96,0]).center([-.6,38.7])},$v=Li(function(t){return Td(2/(1+t))});$v.invert=qi(function(t){return 2*Ge(t/2)});var Vv=Li(function(t){return(t=Ze(t))&&t/wd(t)});Vv.invert=qi(function(t){return t});Ui.invert=function(t,n){return[t,2*vd(md(n))-sd]};Ii.invert=Ii;Bi.invert=qi(vd);Hi.invert=qi(Ge);Xi.invert=qi(function(t){return 2*vd(t)});$i.invert=function(t,n){return[-n,2*vd(md(t))-sd]};uo.prototype=eo.prototype={constructor:uo,count:function(){return this.eachAfter(to)},each:function(t){var n,e,r,i,o=this,u=[o];do{for(n=u.reverse(),u=[];o=n.pop();)if(t(o),e=o.children)for(r=0,i=e.length;r=0;--e)i.push(n[e]);return this},sum:function(t){return this.eachAfter(function(n){for(var e=+t(n.data)||0,r=n.children,i=r&&r.length;--i>=0;)e+=r[i].value;n.value=e})},sort:function(t){return this.eachBefore(function(n){n.children&&n.children.sort(t)})},path:function(t){for(var n=this,e=no(n,t),r=[n];n!==e;)n=n.parent,r.push(n);for(var i=r.length;t!==e;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,n=[t];t=t.parent;)n.push(t);return n},descendants:function(){var t=[];return this.each(function(n){t.push(n)}),t},leaves:function(){var t=[];return this.eachBefore(function(n){n.children||t.push(n)}),t},links:function(){var t=this,n=[];return t.each(function(e){e!==t&&n.push({source:e.parent,target:e})}),n},copy:function(){return eo(this).eachBefore(io)}};var Wv=Array.prototype.slice,Zv=function(t){for(var n,e,r=0,i=(t=ao(Wv.call(t))).length,o=[];r1?n:1)},e}(r_),o_=function t(n){function e(t,e,r,i,o){if((u=t._squarify)&&u.ratio===n)for(var u,a,c,s,f,l=-1,h=u.length,p=t.value;++l1?n:1)},e}(r_),u_=function(t,n,e){return(n[0]-t[0])*(e[1]-t[1])-(n[1]-t[1])*(e[0]-t[0])},a_=[].slice,c_={};Bo.prototype=Wo.prototype={constructor:Bo,defer:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("defer after await");if(null!=this._error)return this;var n=a_.call(arguments,1);return n.push(t),++this._waiting,this._tasks.push(n),jo(this),this},abort:function(){return null==this._error&&$o(this,new Error("abort")),this},await:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("multiple await");return this._call=function(n,e){t.apply(null,[n].concat(e))},Vo(this),this},awaitAll:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("multiple await");return this._call=t,Vo(this),this}};var s_=function(){return Math.random()},f_=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,1===arguments.length?(e=t,t=0):e-=t,function(){return n()*e+t}}return e.source=t,e}(s_),l_=function t(n){function e(t,e){var r,i;return t=null==t?0:+t,e=null==e?1:+e,function(){var o;if(null!=r)o=r,r=null;else do{r=2*n()-1,o=2*n()-1,i=r*r+o*o}while(!i||i>1);return t+e*o*Math.sqrt(-2*Math.log(i)/i)}}return e.source=t,e}(s_),h_=function t(n){function e(){var t=l_.source(n).apply(this,arguments);return function(){return Math.exp(t())}}return e.source=t,e}(s_),p_=function t(n){function e(t){return function(){for(var e=0,r=0;r=200&&e<300||304===e){if(o)try{n=o.call(r,s)}catch(t){return void a.call("error",r,t)}else n=s;a.call("load",r,n)}else a.call("error",r,t)}var r,i,o,u,a=h("beforesend","progress","load","error"),c=we(),s=new XMLHttpRequest,f=null,l=null,p=0;if("undefined"==typeof XDomainRequest||"withCredentials"in s||!/^(http(s)?:)?\/\//.test(t)||(s=new XDomainRequest),"onload"in s?s.onload=s.onerror=s.ontimeout=e:s.onreadystatechange=function(t){s.readyState>3&&e(t)},s.onprogress=function(t){a.call("progress",r,t)},r={header:function(t,n){return t=(t+"").toLowerCase(),arguments.length<2?c.get(t):(null==n?c.remove(t):c.set(t,n+""),r)},mimeType:function(t){return arguments.length?(i=null==t?null:t+"",r):i},responseType:function(t){return arguments.length?(u=t,r):u},timeout:function(t){return arguments.length?(p=+t,r):p},user:function(t){return arguments.length<1?f:(f=null==t?null:t+"",r)},password:function(t){return arguments.length<1?l:(l=null==t?null:t+"",r)},response:function(t){return o=t,r},get:function(t,n){return r.send("GET",t,n)},post:function(t,n){return r.send("POST",t,n)},send:function(n,e,o){return s.open(n,t,!0,f,l),null==i||c.has("accept")||c.set("accept",i+",*/*"),s.setRequestHeader&&c.each(function(t,n){s.setRequestHeader(n,t)}),null!=i&&s.overrideMimeType&&s.overrideMimeType(i),null!=u&&(s.responseType=u),p>0&&(s.timeout=p),null==o&&"function"==typeof e&&(o=e,e=null),null!=o&&1===o.length&&(o=Zo(o)),null!=o&&r.on("error",o).on("load",function(t){o(null,t)}),a.call("beforesend",r,s),s.send(null==e?null:e),r},abort:function(){return s.abort(),r},on:function(){var t=a.on.apply(a,arguments);return t===a?r:t}},null!=n){if("function"!=typeof n)throw new Error("invalid callback: "+n);return r.get(n)}return r},y_=function(t,n){return function(e,r){var i=__(e).mimeType(t).response(n);if(null!=r){if("function"!=typeof r)throw new Error("invalid callback: "+r);return i.get(r)}return i}},g_=y_("text/html",function(t){return document.createRange().createContextualFragment(t.responseText)}),m_=y_("application/json",function(t){return JSON.parse(t.responseText)}),x_=y_("text/plain",function(t){return t.responseText}),b_=y_("application/xml",function(t){var n=t.responseXML;if(!n)throw new Error("parse error");return n}),w_=function(t,n){return function(e,r,i){arguments.length<3&&(i=r,r=null);var o=__(e).mimeType(t);return o.row=function(t){return arguments.length?o.response(Jo(n,r=t)):r},o.row(r),i?o.get(i):o}},M_=w_("text/csv",Qh),T_=w_("text/tab-separated-values",rp),k_=Array.prototype,N_=k_.map,S_=k_.slice,E_={name:"implicit"},A_=function(t){return function(){return t}},C_=function(t){return+t},z_=[0,1],P_=function(n,e,r){var o,u=n[0],a=n[n.length-1],c=i(u,a,null==e?10:e);switch((r=He(null==r?",f":r)).type){case"s":var s=Math.max(Math.abs(u),Math.abs(a));return null!=r.precision||isNaN(o=Np(c,s))||(r.precision=o),t.formatPrefix(r,s);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(o=Sp(c,Math.max(Math.abs(u),Math.abs(a))))||(r.precision=o-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(o=kp(c))||(r.precision=o-2*("%"===r.type))}return t.format(r)},R_=function(t,n){var e,r=0,i=(t=t.slice()).length-1,o=t[r],u=t[i];return u0?t>1?Mu(function(n){n.setTime(Math.floor(n/t)*t)},function(n,e){n.setTime(+n+e*t)},function(n,e){return(e-n)/t}):U_:null};var D_=U_.range,O_=6e4,F_=6048e5,I_=Mu(function(t){t.setTime(1e3*Math.floor(t/1e3))},function(t,n){t.setTime(+t+1e3*n)},function(t,n){return(n-t)/1e3},function(t){return t.getUTCSeconds()}),Y_=I_.range,B_=Mu(function(t){t.setTime(Math.floor(t/O_)*O_)},function(t,n){t.setTime(+t+n*O_)},function(t,n){return(n-t)/O_},function(t){return t.getMinutes()}),j_=B_.range,H_=Mu(function(t){var n=t.getTimezoneOffset()*O_%36e5;n<0&&(n+=36e5),t.setTime(36e5*Math.floor((+t-n)/36e5)+n)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getHours()}),X_=H_.range,$_=Mu(function(t){t.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*O_)/864e5},function(t){return t.getDate()-1}),V_=$_.range,W_=Tu(0),Z_=Tu(1),G_=Tu(2),J_=Tu(3),Q_=Tu(4),K_=Tu(5),ty=Tu(6),ny=W_.range,ey=Z_.range,ry=G_.range,iy=J_.range,oy=Q_.range,uy=K_.range,ay=ty.range,cy=Mu(function(t){t.setDate(1),t.setHours(0,0,0,0)},function(t,n){t.setMonth(t.getMonth()+n)},function(t,n){return n.getMonth()-t.getMonth()+12*(n.getFullYear()-t.getFullYear())},function(t){return t.getMonth()}),sy=cy.range,fy=Mu(function(t){t.setMonth(0,1),t.setHours(0,0,0,0)},function(t,n){t.setFullYear(t.getFullYear()+n)},function(t,n){return n.getFullYear()-t.getFullYear()},function(t){return t.getFullYear()});fy.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Mu(function(n){n.setFullYear(Math.floor(n.getFullYear()/t)*t),n.setMonth(0,1),n.setHours(0,0,0,0)},function(n,e){n.setFullYear(n.getFullYear()+e*t)}):null};var ly=fy.range,hy=Mu(function(t){t.setUTCSeconds(0,0)},function(t,n){t.setTime(+t+n*O_)},function(t,n){return(n-t)/O_},function(t){return t.getUTCMinutes()}),py=hy.range,dy=Mu(function(t){t.setUTCMinutes(0,0,0)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getUTCHours()}),vy=dy.range,_y=Mu(function(t){t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+n)},function(t,n){return(n-t)/864e5},function(t){return t.getUTCDate()-1}),yy=_y.range,gy=ku(0),my=ku(1),xy=ku(2),by=ku(3),wy=ku(4),My=ku(5),Ty=ku(6),ky=gy.range,Ny=my.range,Sy=xy.range,Ey=by.range,Ay=wy.range,Cy=My.range,zy=Ty.range,Py=Mu(function(t){t.setUTCDate(1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCMonth(t.getUTCMonth()+n)},function(t,n){return n.getUTCMonth()-t.getUTCMonth()+12*(n.getUTCFullYear()-t.getUTCFullYear())},function(t){return t.getUTCMonth()}),Ry=Py.range,Ly=Mu(function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCFullYear(t.getUTCFullYear()+n)},function(t,n){return n.getUTCFullYear()-t.getUTCFullYear()},function(t){return t.getUTCFullYear()});Ly.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Mu(function(n){n.setUTCFullYear(Math.floor(n.getUTCFullYear()/t)*t),n.setUTCMonth(0,1),n.setUTCHours(0,0,0,0)},function(n,e){n.setUTCFullYear(n.getUTCFullYear()+e*t)}):null};var qy,Uy=Ly.range,Dy={"-":"",_:" ",0:"0"},Oy=/^\s*\d+/,Fy=/^%/,Iy=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g;Ma({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});var Yy=Date.prototype.toISOString?function(t){return t.toISOString()}:t.utcFormat("%Y-%m-%dT%H:%M:%S.%LZ"),By=+new Date("2000-01-01T00:00:00.000Z")?function(t){var n=new Date(t);return isNaN(n)?null:n}:t.utcParse("%Y-%m-%dT%H:%M:%S.%LZ"),jy=1e3,Hy=60*jy,Xy=60*Hy,$y=24*Xy,Vy=7*$y,Wy=30*$y,Zy=365*$y,Gy=function(t){return t.match(/.{6}/g).map(function(t){return"#"+t})},Jy=Gy("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf"),Qy=Gy("393b795254a36b6ecf9c9ede6379398ca252b5cf6bcedb9c8c6d31bd9e39e7ba52e7cb94843c39ad494ad6616be7969c7b4173a55194ce6dbdde9ed6"),Ky=Gy("3182bd6baed69ecae1c6dbefe6550dfd8d3cfdae6bfdd0a231a35474c476a1d99bc7e9c0756bb19e9ac8bcbddcdadaeb636363969696bdbdbdd9d9d9"),tg=Gy("1f77b4aec7e8ff7f0effbb782ca02c98df8ad62728ff98969467bdc5b0d58c564bc49c94e377c2f7b6d27f7f7fc7c7c7bcbd22dbdb8d17becf9edae5"),ng=wl($t(300,.5,0),$t(-240,.5,1)),eg=wl($t(-100,.75,.35),$t(80,1.5,.8)),rg=wl($t(260,.75,.35),$t(80,1.5,.8)),ig=$t(),og=Sa(Gy("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725")),ug=Sa(Gy("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),ag=Sa(Gy("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),cg=Sa(Gy("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921")),sg=function(t){return function(){return t}},fg=Math.abs,lg=Math.atan2,hg=Math.cos,pg=Math.max,dg=Math.min,vg=Math.sin,_g=Math.sqrt,yg=1e-12,gg=Math.PI,mg=gg/2,xg=2*gg;Oa.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:this._context.lineTo(t,n)}}};var bg=function(t){return new Oa(t)},wg=function(){function t(t){var a,c,s,f=t.length,l=!1;for(null==i&&(u=o(s=ve())),a=0;a<=f;++a)!(a=f;--l)s.point(_[l],y[l]);s.lineEnd(),s.areaEnd()}v&&(_[n]=+e(h,n,t),y[n]=+i(h,n,t),s.point(r?+r(h,n,t):_[n],o?+o(h,n,t):y[n]))}if(p)return s=null,p+""||null}function n(){return wg().defined(u).curve(c).context(a)}var e=Fa,r=null,i=sg(0),o=Ia,u=sg(!0),a=null,c=bg,s=null;return t.x=function(n){return arguments.length?(e="function"==typeof n?n:sg(+n),r=null,t):e},t.x0=function(n){return arguments.length?(e="function"==typeof n?n:sg(+n),t):e},t.x1=function(n){return arguments.length?(r=null==n?null:"function"==typeof n?n:sg(+n),t):r},t.y=function(n){return arguments.length?(i="function"==typeof n?n:sg(+n),o=null,t):i},t.y0=function(n){return arguments.length?(i="function"==typeof n?n:sg(+n),t):i},t.y1=function(n){return arguments.length?(o=null==n?null:"function"==typeof n?n:sg(+n),t):o},t.lineX0=t.lineY0=function(){return n().x(e).y(i)},t.lineY1=function(){return n().x(e).y(o)},t.lineX1=function(){return n().x(r).y(i)},t.defined=function(n){return arguments.length?(u="function"==typeof n?n:sg(!!n),t):u},t.curve=function(n){return arguments.length?(c=n,null!=a&&(s=c(a)),t):c},t.context=function(n){return arguments.length?(null==n?a=s=null:s=c(a=n),t):a},t},Tg=function(t,n){return nt?1:n>=t?0:NaN},kg=function(t){return t},Ng=Ba(bg);Ya.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,n){this._curve.point(n*Math.sin(t),n*-Math.cos(t))}};var Sg=function(){return ja(wg().curve(Ng))},Eg=function(){var t=Mg().curve(Ng),n=t.curve,e=t.lineX0,r=t.lineX1,i=t.lineY0,o=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return ja(e())},delete t.lineX0,t.lineEndAngle=function(){return ja(r())},delete t.lineX1,t.lineInnerRadius=function(){return ja(i())},delete t.lineY0,t.lineOuterRadius=function(){return ja(o())},delete t.lineY1,t.curve=function(t){return arguments.length?n(Ba(t)):n()._curve},t},Ag=function(t,n){return[(n=+n)*Math.cos(t-=Math.PI/2),n*Math.sin(t)]},Cg=Array.prototype.slice,zg={draw:function(t,n){var e=Math.sqrt(n/gg);t.moveTo(e,0),t.arc(0,0,e,0,xg)}},Pg={draw:function(t,n){var e=Math.sqrt(n/5)/2;t.moveTo(-3*e,-e),t.lineTo(-e,-e),t.lineTo(-e,-3*e),t.lineTo(e,-3*e),t.lineTo(e,-e),t.lineTo(3*e,-e),t.lineTo(3*e,e),t.lineTo(e,e),t.lineTo(e,3*e),t.lineTo(-e,3*e),t.lineTo(-e,e),t.lineTo(-3*e,e),t.closePath()}},Rg=Math.sqrt(1/3),Lg=2*Rg,qg={draw:function(t,n){var e=Math.sqrt(n/Lg),r=e*Rg;t.moveTo(0,-e),t.lineTo(r,0),t.lineTo(0,e),t.lineTo(-r,0),t.closePath()}},Ug=Math.sin(gg/10)/Math.sin(7*gg/10),Dg=Math.sin(xg/10)*Ug,Og=-Math.cos(xg/10)*Ug,Fg={draw:function(t,n){var e=Math.sqrt(.8908130915292852*n),r=Dg*e,i=Og*e;t.moveTo(0,-e),t.lineTo(r,i);for(var o=1;o<5;++o){var u=xg*o/5,a=Math.cos(u),c=Math.sin(u);t.lineTo(c*e,-a*e),t.lineTo(a*r-c*i,c*r+a*i)}t.closePath()}},Ig={draw:function(t,n){var e=Math.sqrt(n),r=-e/2;t.rect(r,r,e,e)}},Yg=Math.sqrt(3),Bg={draw:function(t,n){var e=-Math.sqrt(n/(3*Yg));t.moveTo(0,2*e),t.lineTo(-Yg*e,-e),t.lineTo(Yg*e,-e),t.closePath()}},jg=-.5,Hg=Math.sqrt(3)/2,Xg=1/Math.sqrt(12),$g=3*(Xg/2+1),Vg={draw:function(t,n){var e=Math.sqrt(n/$g),r=e/2,i=e*Xg,o=r,u=e*Xg+e,a=-o,c=u;t.moveTo(r,i),t.lineTo(o,u),t.lineTo(a,c),t.lineTo(jg*r-Hg*i,Hg*r+jg*i),t.lineTo(jg*o-Hg*u,Hg*o+jg*u),t.lineTo(jg*a-Hg*c,Hg*a+jg*c),t.lineTo(jg*r+Hg*i,jg*i-Hg*r),t.lineTo(jg*o+Hg*u,jg*u-Hg*o),t.lineTo(jg*a+Hg*c,jg*c-Hg*a),t.closePath()}},Wg=[zg,Pg,qg,Ig,Fg,Bg,Vg],Zg=function(){};Ja.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:Ga(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};Qa.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x2=t,this._y2=n;break;case 1:this._point=2,this._x3=t,this._y3=n;break;case 2:this._point=3,this._x4=t,this._y4=n,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+n)/6);break;default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};Ka.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var e=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+n)/6;this._line?this._context.lineTo(e,r):this._context.moveTo(e,r);break;case 3:this._point=4;default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};tc.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,n=this._y,e=t.length-1;if(e>0)for(var r,i=t[0],o=n[0],u=t[e]-i,a=n[e]-o,c=-1;++c<=e;)r=c/e,this._basis.point(this._beta*t[c]+(1-this._beta)*(i+r*u),this._beta*n[c]+(1-this._beta)*(o+r*a));this._x=this._y=null,this._basis.lineEnd()},point:function(t,n){this._x.push(+t),this._y.push(+n)}};var Gg=function t(n){function e(t){return 1===n?new Ja(t):new tc(t,n)}return e.beta=function(n){return t(+n)},e}(.85);ec.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:nc(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2,this._x1=t,this._y1=n;break;case 2:this._point=3;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Jg=function t(n){function e(t){return new ec(t,n)}return e.tension=function(n){return t(+n)},e}(0);rc.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Qg=function t(n){function e(t){return new rc(t,n)}return e.tension=function(n){return t(+n)},e}(0);ic.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Kg=function t(n){function e(t){return new ic(t,n)}return e.tension=function(n){return t(+n)},e}(0);uc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var tm=function t(n){function e(t){return n?new uc(t,n):new ec(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);ac.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var nm=function t(n){function e(t){return n?new ac(t,n):new rc(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);cc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var em=function t(n){function e(t){return n?new cc(t,n):new ic(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);sc.prototype={areaStart:Zg,areaEnd:Zg,lineStart:function(){this._point=0},lineEnd:function(){this._point&&this._context.closePath()},point:function(t,n){t=+t,n=+n,this._point?this._context.lineTo(t,n):(this._point=1,this._context.moveTo(t,n))}};dc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=this._t0=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x1,this._y1);break;case 3:pc(this,this._t0,hc(this,this._t0))}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){var e=NaN;if(t=+t,n=+n,t!==this._x1||n!==this._y1){switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,pc(this,hc(this,e=lc(this,t,n)),e);break;default:pc(this,this._t0,e=lc(this,t,n))}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n,this._t0=e}}},(vc.prototype=Object.create(dc.prototype)).point=function(t,n){dc.prototype.point.call(this,n,t)},_c.prototype={moveTo:function(t,n){this._context.moveTo(n,t)},closePath:function(){this._context.closePath()},lineTo:function(t,n){this._context.lineTo(n,t)},bezierCurveTo:function(t,n,e,r,i,o){this._context.bezierCurveTo(n,t,r,e,o,i)}},yc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=[],this._y=[]},lineEnd:function(){var t=this._x,n=this._y,e=t.length;if(e)if(this._line?this._context.lineTo(t[0],n[0]):this._context.moveTo(t[0],n[0]),2===e)this._context.lineTo(t[1],n[1]);else for(var r=gc(t),i=gc(n),o=0,u=1;u=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,n),this._context.lineTo(t,n);else{var e=this._x*(1-this._t)+t*this._t;this._context.lineTo(e,this._y),this._context.lineTo(e,n)}}this._x=t,this._y=n}};var rm=function(t,n){if((i=t.length)>1)for(var e,r,i,o=1,u=t[n[0]],a=u.length;o=0;)e[n]=n;return e},om=function(t){var n=t.map(bc);return im(t).sort(function(t,e){return n[t]-n[e]})},um=function(t){return function(){return t}};Tc.prototype={constructor:Tc,insert:function(t,n){var e,r,i;if(t){if(n.P=t,n.N=t.N,t.N&&(t.N.P=n),t.N=n,t.R){for(t=t.R;t.L;)t=t.L;t.L=n}else t.R=n;e=t}else this._?(t=Ec(this._),n.P=null,n.N=t,t.P=t.L=n,e=t):(n.P=n.N=null,this._=n,e=null);for(n.L=n.R=null,n.U=e,n.C=!0,t=n;e&&e.C;)e===(r=e.U).L?(i=r.R)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.R&&(Nc(this,e),e=(t=e).U),e.C=!1,r.C=!0,Sc(this,r)):(i=r.L)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.L&&(Sc(this,e),e=(t=e).U),e.C=!1,r.C=!0,Nc(this,r)),e=t.U;this._.C=!1},remove:function(t){t.N&&(t.N.P=t.P),t.P&&(t.P.N=t.N),t.N=t.P=null;var n,e,r,i=t.U,o=t.L,u=t.R;if(e=o?u?Ec(u):o:u,i?i.L===t?i.L=e:i.R=e:this._=e,o&&u?(r=e.C,e.C=t.C,e.L=o,o.U=e,e!==u?(i=e.U,e.U=t.U,t=e.R,i.L=t,e.R=u,u.U=e):(e.U=i,i=e,t=e.R)):(r=t.C,t=e),t&&(t.U=i),!r)if(t&&t.C)t.C=!1;else{do{if(t===this._)break;if(t===i.L){if((n=i.R).C&&(n.C=!1,i.C=!0,Nc(this,i),n=i.R),n.L&&n.L.C||n.R&&n.R.C){n.R&&n.R.C||(n.L.C=!1,n.C=!0,Sc(this,n),n=i.R),n.C=i.C,i.C=n.R.C=!1,Nc(this,i),t=this._;break}}else if((n=i.L).C&&(n.C=!1,i.C=!0,Sc(this,i),n=i.L),n.L&&n.L.C||n.R&&n.R.C){n.L&&n.L.C||(n.R.C=!1,n.C=!0,Nc(this,n),n=i.L),n.C=i.C,i.C=n.L.C=!1,Sc(this,i),t=this._;break}n.C=!0,t=i,i=i.U}while(!t.C);t&&(t.C=!1)}}};var am,cm,sm,fm,lm,hm=[],pm=[],dm=1e-6,vm=1e-12;Kc.prototype={constructor:Kc,polygons:function(){var t=this.edges;return this.cells.map(function(n){var e=n.halfedges.map(function(e){return Dc(n,t[e])});return e.data=n.site.data,e})},triangles:function(){var t=[],n=this.edges;return this.cells.forEach(function(e,r){if(o=(i=e.halfedges).length)for(var i,o,u,a=e.site,c=-1,s=n[i[o-1]],f=s.left===a?s.right:s.left;++c=a)return null;var c=t-i.site[0],s=n-i.site[1],f=c*c+s*s;do{i=o.cells[r=u],u=null,i.halfedges.forEach(function(e){var r=o.edges[e],a=r.left;if(a!==i.site&&a||(a=r.right)){var c=t-a[0],s=n-a[1],l=c*c+s*s;lt?1:n>=t?0:NaN},t.deviation=_s,t.extent=ys,t.histogram=function(){function t(t){var o,u,a=t.length,c=new Array(a);for(o=0;ol;)h.pop(),--p;var d,v=new Array(p+1);for(o=0;o<=p;++o)(d=v[o]=[]).x0=o>0?h[o-1]:f,d.x1=o=e)for(r=e;++or&&(r=e)}else for(;++o=e)for(r=e;++or&&(r=e);return r},t.mean=function(t,n){var e,r=t.length,i=r,o=-1,u=0;if(null==n)for(;++o=o.length)return null!=e&&n.sort(e),null!=r?r(n):n;for(var c,s,f,l=-1,h=n.length,p=o[i++],d=we(),v=u();++lo.length)return t;var i,a=u[e-1];return null!=r&&e>=o.length?i=t.entries():(i=[],t.each(function(t,r){i.push({key:r,values:n(t,e)})})),null!=a?i.sort(function(t,n){return a(t.key,n.key)}):i}var e,r,i,o=[],u=[];return i={object:function(n){return t(n,0,Me,Te)},map:function(n){return t(n,0,ke,Ne)},entries:function(e){return n(t(e,0,ke,Ne),0)},key:function(t){return o.push(t),i},sortKeys:function(t){return u[o.length-1]=t,i},sortValues:function(t){return e=t,i},rollup:function(t){return r=t,i}}},t.set=Ee,t.map=we,t.keys=function(t){var n=[];for(var e in t)n.push(e);return n},t.values=function(t){var n=[];for(var e in t)n.push(t[e]);return n},t.entries=function(t){var n=[];for(var e in t)n.push({key:e,value:t[e]});return n},t.color=Tt,t.rgb=Et,t.hsl=Pt,t.lab=Ut,t.hcl=jt,t.cubehelix=$t,t.dispatch=h,t.drag=function(){function n(t){t.on("mousedown.drag",e).filter(bt).on("touchstart.drag",o).on("touchmove.drag",u).on("touchend.drag touchcancel.drag",a).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(){if(!p&&d.apply(this,arguments)){var n=c("mouse",v.apply(this,arguments),Ks,this,arguments);n&&(cf(t.event.view).on("mousemove.drag",r,!0).on("mouseup.drag",i,!0),lf(t.event.view),vt(),l=!1,s=t.event.clientX,f=t.event.clientY,n("start"))}}function r(){if(ff(),!l){var n=t.event.clientX-s,e=t.event.clientY-f;l=n*n+e*e>x}y.mouse("drag")}function i(){cf(t.event.view).on("mousemove.drag mouseup.drag",null),_t(t.event.view,l),ff(),y.mouse("end")}function o(){if(d.apply(this,arguments)){var n,e,r=t.event.changedTouches,i=v.apply(this,arguments),o=r.length;for(n=0;nc+p||is+p||or.index){var d=c-a.x-a.vx,v=s-a.y-a.vy,_=d*d+v*v;_t.r&&(t.r=t[n].r)}function r(){if(i){var n,e,r=i.length;for(o=new Array(r),n=0;n=f)){(t.data!==o||t.next)&&(0===i&&(i=cp(),p+=i*i),0===c&&(c=cp(),p+=c*c),p1?(null==n?l.remove(t):l.set(t,i(n)),o):l.get(t)},find:function(n,e,r){var i,o,u,a,c,s=0,f=t.length;for(null==r?r=1/0:r*=r,s=0;s1?(d.on(t,n),o):d.on(t)}}},t.forceX=function(t){function n(t){for(var n,e=0,u=r.length;exr(r[0],r[1])&&(r[1]=i[1]),xr(i[0],r[1])>xr(r[0],r[1])&&(r[0]=i[0])):o.push(r=i);for(u=-1/0,n=0,r=o[e=o.length-1];n<=e;r=i,++n)i=o[n],(a=xr(r[1],i[0]))>u&&(u=a,Lp=i[0],Up=r[1])}return Bp=jp=null,Lp===1/0||qp===1/0?[[NaN,NaN],[NaN,NaN]]:[[Lp,qp],[Up,Dp]]},t.geoCentroid=function(t){Hp=Xp=$p=Vp=Wp=Zp=Gp=Jp=Qp=Kp=td=0,Ed(t,Ld);var n=Qp,e=Kp,r=td,i=n*n+e*e+r*r;return i<1e-12&&(n=Zp,e=Gp,r=Jp,Xp=.12&&i<.234&&r>=-.425&&r<-.214?s:i>=.166&&i<.234&&r>=-.214&&r<-.115?f:c).invert(t)},t.stream=function(t){return e&&r===t?e:e=Ri([c.stream(r=t),s.stream(t),f.stream(t)])},t.precision=function(t){return arguments.length?(c.precision(t),s.precision(t),f.precision(t),n()):c.precision()},t.scale=function(n){return arguments.length?(c.scale(n),s.scale(.35*n),f.scale(n),t.translate(c.translate())):c.scale()},t.translate=function(t){if(!arguments.length)return c.translate();var e=c.scale(),r=+t[0],a=+t[1];return i=c.translate(t).clipExtent([[r-.455*e,a-.238*e],[r+.455*e,a+.238*e]]).stream(l),o=s.translate([r-.307*e,a+.201*e]).clipExtent([[r-.425*e+ad,a+.12*e+ad],[r-.214*e-ad,a+.234*e-ad]]).stream(l),u=f.translate([r-.205*e,a+.212*e]).clipExtent([[r-.214*e+ad,a+.166*e+ad],[r-.115*e-ad,a+.234*e-ad]]).stream(l),n()},t.fitExtent=function(n,e){return Ti(t,n,e)},t.fitSize=function(n,e){return ki(t,n,e)},t.scale(1070)},t.geoAzimuthalEqualArea=function(){return Ei($v).scale(124.75).clipAngle(179.999)},t.geoAzimuthalEqualAreaRaw=$v,t.geoAzimuthalEquidistant=function(){return Ei(Vv).scale(79.4188).clipAngle(179.999)},t.geoAzimuthalEquidistantRaw=Vv,t.geoConicConformal=function(){return Ci(Fi).scale(109.5).parallels([30,30])},t.geoConicConformalRaw=Fi,t.geoConicEqualArea=Hv,t.geoConicEqualAreaRaw=Pi,t.geoConicEquidistant=function(){return Ci(Yi).scale(131.154).center([0,13.9389])},t.geoConicEquidistantRaw=Yi,t.geoEquirectangular=function(){return Ei(Ii).scale(152.63)},t.geoEquirectangularRaw=Ii,t.geoGnomonic=function(){return Ei(Bi).scale(144.049).clipAngle(60)},t.geoGnomonicRaw=Bi,t.geoIdentity=function(){function t(){return i=o=null,u}var n,e,r,i,o,u,a=1,c=0,s=0,f=1,l=1,h=lv,p=null,d=lv;return u={stream:function(t){return i&&o===t?i:i=h(d(o=t))},clipExtent:function(i){return arguments.length?(d=null==i?(p=n=e=r=null,lv):Br(p=+i[0][0],n=+i[0][1],e=+i[1][0],r=+i[1][1]),t()):null==p?null:[[p,n],[e,r]]},scale:function(n){return arguments.length?(h=ji((a=+n)*f,a*l,c,s),t()):a},translate:function(n){return arguments.length?(h=ji(a*f,a*l,c=+n[0],s=+n[1]),t()):[c,s]},reflectX:function(n){return arguments.length?(h=ji(a*(f=n?-1:1),a*l,c,s),t()):f<0},reflectY:function(n){return arguments.length?(h=ji(a*f,a*(l=n?-1:1),c,s),t()):l<0},fitExtent:function(t,n){return Ti(u,t,n)},fitSize:function(t,n){return ki(u,t,n)}}},t.geoProjection=Ei,t.geoProjectionMutator=Ai,t.geoMercator=function(){return Di(Ui).scale(961/ld)},t.geoMercatorRaw=Ui,t.geoOrthographic=function(){return Ei(Hi).scale(249.5).clipAngle(90+ad)},t.geoOrthographicRaw=Hi,t.geoStereographic=function(){return Ei(Xi).scale(250).clipAngle(142)},t.geoStereographicRaw=Xi,t.geoTransverseMercator=function(){var t=Di($i),n=t.center,e=t.rotate;return t.center=function(t){return arguments.length?n([-t[1],t[0]]):(t=n(),[t[1],-t[0]])},t.rotate=function(t){return arguments.length?e([t[0],t[1],t.length>2?t[2]+90:90]):(t=e(),[t[0],t[1],t[2]-90])},e([0,0,90]).scale(159.155)},t.geoTransverseMercatorRaw=$i,t.geoRotation=Wd,t.geoStream=Ed,t.geoTransform=function(t){return{stream:wi(t)}},t.cluster=function(){function t(t){var o,u=0;t.eachAfter(function(t){var e=t.children;e?(t.x=Wi(e),t.y=Gi(e)):(t.x=o?u+=n(t,o):0,t.y=0,o=t)});var a=Qi(t),c=Ki(t),s=a.x-n(a,c)/2,f=c.x+n(c,a)/2;return t.eachAfter(i?function(n){n.x=(n.x-t.x)*e,n.y=(t.y-n.y)*r}:function(n){n.x=(n.x-s)/(f-s)*e,n.y=(1-(t.y?n.y/t.y:1))*r})}var n=Vi,e=1,r=1,i=!1;return t.separation=function(e){return arguments.length?(n=e,t):n},t.size=function(n){return arguments.length?(i=!1,e=+n[0],r=+n[1],t):i?null:[e,r]},t.nodeSize=function(n){return arguments.length?(i=!0,e=+n[0],r=+n[1],t):i?[e,r]:null},t},t.hierarchy=eo,t.pack=function(){function t(t){return t.x=e/2,t.y=r/2,n?t.eachBefore(No(n)).eachAfter(So(i,.5)).eachBefore(Eo(1)):t.eachBefore(No(ko)).eachAfter(So(To,1)).eachAfter(So(i,t.r/Math.min(e,r))).eachBefore(Eo(Math.min(e,r)/(2*t.r))),t}var n=null,e=1,r=1,i=To;return t.radius=function(e){return arguments.length?(n=wo(e),t):n},t.size=function(n){return arguments.length?(e=+n[0],r=+n[1],t):[e,r]},t.padding=function(n){return arguments.length?(i="function"==typeof n?n:Gv(+n),t):i},t},t.packSiblings=function(t){return bo(t),t},t.packEnclose=Zv,t.partition=function(){function t(t){var u=t.height+1;return t.x0=t.y0=i,t.x1=e,t.y1=r/u,t.eachBefore(n(r,u)),o&&t.eachBefore(Jv),t}function n(t,n){return function(e){e.children&&Qv(e,e.x0,t*(e.depth+1)/n,e.x1,t*(e.depth+2)/n);var r=e.x0,o=e.y0,u=e.x1-i,a=e.y1-i;u0)throw new Error("cycle");return o}var n=Ao,e=Co;return t.id=function(e){return arguments.length?(n=Mo(e),t):n},t.parentId=function(n){return arguments.length?(e=Mo(n),t):e},t},t.tree=function(){function t(t){var r=Oo(t);if(r.eachAfter(n),r.parent.m=-r.z,r.eachBefore(e),c)t.eachBefore(i);else{var s=t,f=t,l=t;t.eachBefore(function(t){t.xf.x&&(f=t),t.depth>l.depth&&(l=t)});var h=s===f?1:o(s,f)/2,p=h-s.x,d=u/(f.x+h+p),v=a/(l.depth||1);t.eachBefore(function(t){t.x=(t.x+p)*d,t.y=t.depth*v})}return t}function n(t){var n=t.children,e=t.parent.children,i=t.i?e[t.i-1]:null;if(n){qo(t);var u=(n[0].z+n[n.length-1].z)/2;i?(t.z=i.z+o(t._,i._),t.m=t.z-u):t.z=u}else i&&(t.z=i.z+o(t._,i._));t.parent.A=r(t,i,t.parent.A||e[0])}function e(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function r(t,n,e){if(n){for(var r,i=t,u=t,a=n,c=i.parent.children[0],s=i.m,f=u.m,l=a.m,h=c.m;a=Ro(a),i=Po(i),a&&i;)c=Po(c),(u=Ro(u)).a=t,(r=a.z+l-i.z-s+o(a._,i._))>0&&(Lo(Uo(a,t,e),t,r),s+=r,f+=r),l+=a.m,s+=i.m,h+=c.m,f+=u.m;a&&!Ro(u)&&(u.t=a,u.m+=l-f),i&&!Po(c)&&(c.t=i,c.m+=s-h,e=t)}return e}function i(t){t.x*=u,t.y=t.depth*a}var o=zo,u=1,a=1,c=null;return t.separation=function(n){return arguments.length?(o=n,t):o},t.size=function(n){return arguments.length?(c=!1,u=+n[0],a=+n[1],t):c?null:[u,a]},t.nodeSize=function(n){return arguments.length?(c=!0,u=+n[0],a=+n[1],t):c?[u,a]:null},t},t.treemap=function(){function t(t){return t.x0=t.y0=0,t.x1=i,t.y1=o,t.eachBefore(n),u=[0],r&&t.eachBefore(Jv),t}function n(t){var n=u[t.depth],r=t.x0+n,i=t.y0+n,o=t.x1-n,h=t.y1-n;o=n-1){var s=c[t];return s.x0=r,s.y0=i,s.x1=u,void(s.y1=a)}for(var l=f[t],h=e/2+l,p=t+1,d=n-1;p>>1;f[v]a-i){var g=(r*y+u*_)/e;o(t,p,_,r,i,g,a),o(p,n,y,g,i,u,a)}else{var m=(i*y+a*_)/e;o(t,p,_,r,i,u,m),o(p,n,y,r,m,u,a)}}var u,a,c=t.children,s=c.length,f=new Array(s+1);for(f[0]=a=u=0;u=0;--n)s.push(t[r[o[n]][2]]);for(n=+a;na!=s>a&&u<(c-e)*(a-r)/(s-r)+e&&(f=!f),c=e,s=r;return f},t.polygonLength=function(t){for(var n,e,r=-1,i=t.length,o=t[i-1],u=o[0],a=o[1],c=0;++r1)&&(t-=Math.floor(t));var n=Math.abs(t-.5);return ig.h=360*t-100,ig.s=1.5-1.5*n,ig.l=.8-.9*n,ig+""},t.interpolateWarm=eg,t.interpolateCool=rg,t.interpolateViridis=og,t.interpolateMagma=ug,t.interpolateInferno=ag,t.interpolatePlasma=cg,t.scaleSequential=Ea,t.creator=Hs,t.local=m,t.matcher=Zs,t.mouse=Ks,t.namespace=js,t.namespaces=Bs,t.select=cf,t.selectAll=function(t){return"string"==typeof t?new pt([document.querySelectorAll(t)],[document.documentElement]):new pt([null==t?[]:t],af)},t.selection=dt,t.selector=tf,t.selectorAll=nf,t.style=B,t.touch=sf,t.touches=function(t,n){null==n&&(n=Js().touches);for(var e=0,r=n?n.length:0,i=new Array(r);eh;if(c||(c=t=ve()),lyg)if(d>xg-yg)c.moveTo(l*hg(h),l*vg(h)),c.arc(0,0,l,h,p,!v),f>yg&&(c.moveTo(f*hg(p),f*vg(p)),c.arc(0,0,f,p,h,v));else{var _,y,g=h,m=p,x=h,b=p,w=d,M=d,T=a.apply(this,arguments)/2,k=T>yg&&(i?+i.apply(this,arguments):_g(f*f+l*l)),N=dg(fg(l-f)/2,+r.apply(this,arguments)),S=N,E=N;if(k>yg){var A=Ca(k/f*vg(T)),C=Ca(k/l*vg(T));(w-=2*A)>yg?(A*=v?1:-1,x+=A,b-=A):(w=0,x=b=(h+p)/2),(M-=2*C)>yg?(C*=v?1:-1,g+=C,m-=C):(M=0,g=m=(h+p)/2)}var z=l*hg(g),P=l*vg(g),R=f*hg(b),L=f*vg(b);if(N>yg){var q=l*hg(m),U=l*vg(m),D=f*hg(x),O=f*vg(x);if(dyg?Ua(z,P,D,O,q,U,R,L):[R,L],I=z-F[0],Y=P-F[1],B=q-F[0],j=U-F[1],H=1/vg(Aa((I*B+Y*j)/(_g(I*I+Y*Y)*_g(B*B+j*j)))/2),X=_g(F[0]*F[0]+F[1]*F[1]);S=dg(N,(f-X)/(H-1)),E=dg(N,(l-X)/(H+1))}}M>yg?E>yg?(_=Da(D,O,z,P,l,E,v),y=Da(q,U,R,L,l,E,v),c.moveTo(_.cx+_.x01,_.cy+_.y01),Eyg&&w>yg?S>yg?(_=Da(R,L,q,U,f,-S,v),y=Da(z,P,D,O,f,-S,v),c.lineTo(_.cx+_.x01,_.cy+_.y01),S0&&(p+=l);for(null!=e?d.sort(function(t,n){return e(v[t],v[n])}):null!=r&&d.sort(function(n,e){return r(t[n],t[e])}),a=0,s=p?(y-h*m)/p:0;a0?l*s:0)+m,v[c]={data:t[c],index:a,value:l,startAngle:_,endAngle:f,padAngle:g};return v}var n=kg,e=Tg,r=null,i=sg(0),o=sg(xg),u=sg(0);return t.value=function(e){return arguments.length?(n="function"==typeof e?e:sg(+e),t):n},t.sortValues=function(n){return arguments.length?(e=n,r=null,t):e},t.sort=function(n){return arguments.length?(r=n,e=null,t):r},t.startAngle=function(n){return arguments.length?(i="function"==typeof n?n:sg(+n),t):i},t.endAngle=function(n){return arguments.length?(o="function"==typeof n?n:sg(+n),t):o},t.padAngle=function(n){return arguments.length?(u="function"==typeof n?n:sg(+n),t):u},t},t.areaRadial=Eg,t.radialArea=Eg,t.lineRadial=Sg,t.radialLine=Sg,t.pointRadial=Ag,t.linkHorizontal=function(){return $a(Va)},t.linkVertical=function(){return $a(Wa)},t.linkRadial=function(){var t=$a(Za);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t},t.symbol=function(){function t(){var t;if(r||(r=t=ve()),n.apply(this,arguments).draw(r,+e.apply(this,arguments)),t)return r=null,t+""||null}var n=sg(zg),e=sg(64),r=null;return t.type=function(e){return arguments.length?(n="function"==typeof e?e:sg(e),t):n},t.size=function(n){return arguments.length?(e="function"==typeof n?n:sg(+n),t):e},t.context=function(n){return arguments.length?(r=null==n?null:n,t):r},t},t.symbols=Wg,t.symbolCircle=zg,t.symbolCross=Pg,t.symbolDiamond=qg,t.symbolSquare=Ig,t.symbolStar=Fg,t.symbolTriangle=Bg,t.symbolWye=Vg,t.curveBasisClosed=function(t){return new Qa(t)},t.curveBasisOpen=function(t){return new Ka(t)},t.curveBasis=function(t){return new Ja(t)},t.curveBundle=Gg,t.curveCardinalClosed=Qg,t.curveCardinalOpen=Kg,t.curveCardinal=Jg,t.curveCatmullRomClosed=nm,t.curveCatmullRomOpen=em,t.curveCatmullRom=tm,t.curveLinearClosed=function(t){return new sc(t)},t.curveLinear=bg,t.curveMonotoneX=function(t){return new dc(t)},t.curveMonotoneY=function(t){return new vc(t)},t.curveNatural=function(t){return new yc(t)},t.curveStep=function(t){return new mc(t,.5)},t.curveStepAfter=function(t){return new mc(t,1)},t.curveStepBefore=function(t){return new mc(t,0)},t.stack=function(){function t(t){var o,u,a=n.apply(this,arguments),c=t.length,s=a.length,f=new Array(s);for(o=0;o0){for(var e,r,i,o=0,u=t[0].length;o1)for(var e,r,i,o,u,a,c=0,s=t[n[0]].length;c=0?(r[0]=o,r[1]=o+=i):i<0?(r[1]=u,r[0]=u+=i):r[0]=o},t.stackOffsetNone=rm,t.stackOffsetSilhouette=function(t,n){if((e=t.length)>0){for(var e,r=0,i=t[n[0]],o=i.length;r0&&(r=(e=t[n[0]]).length)>0){for(var e,r,i,o=0,u=1;uUl&&e.name===n)return new Gn([[t]],yh,n,+r)}return null},t.interrupt=jl,t.voronoi=function(){function t(t){return new Kc(t.map(function(r,i){var o=[Math.round(n(r,i,t)/dm)*dm,Math.round(e(r,i,t)/dm)*dm];return o.index=i,o.data=r,o}),r)}var n=wc,e=Mc,r=null;return t.polygons=function(n){return t(n).polygons()},t.links=function(n){return t(n).links()},t.triangles=function(n){return t(n).triangles()},t.x=function(e){return arguments.length?(n="function"==typeof e?e:um(+e),t):n},t.y=function(n){return arguments.length?(e="function"==typeof n?n:um(+n),t):e},t.extent=function(n){return arguments.length?(r=null==n?null:[[+n[0][0],+n[0][1]],[+n[1][0],+n[1][1]]],t):r&&[[r[0][0],r[0][1]],[r[1][0],r[1][1]]]},t.size=function(n){return arguments.length?(r=null==n?null:[[0,0],[+n[0],+n[1]]],t):r&&[r[1][0]-r[0][0],r[1][1]-r[0][1]]},t},t.zoom=function(){function n(t){t.property("__zoom",us).on("wheel.zoom",s).on("mousedown.zoom",f).on("dblclick.zoom",l).filter(cs).on("touchstart.zoom",p).on("touchmove.zoom",d).on("touchend.zoom touchcancel.zoom",v).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(t,n){return(n=Math.max(b,Math.min(w,n)))===t.k?t:new ns(n,t.x,t.y)}function r(t,n,e){var r=n[0]-e[0]*t.k,i=n[1]-e[1]*t.k;return r===t.x&&i===t.y?t:new ns(t.k,r,i)}function i(t,n){var e=t.invertX(n[0][0])-M,r=t.invertX(n[1][0])-T,i=t.invertY(n[0][1])-k,o=t.invertY(n[1][1])-S;return t.translate(r>e?(e+r)/2:Math.min(0,e)||Math.max(0,r),o>i?(i+o)/2:Math.min(0,i)||Math.max(0,o))}function o(t){return[(+t[0][0]+ +t[1][0])/2,(+t[0][1]+ +t[1][1])/2]}function u(t,n,e){t.on("start.zoom",function(){a(this,arguments).start()}).on("interrupt.zoom end.zoom",function(){a(this,arguments).end()}).tween("zoom",function(){var t=this,r=arguments,i=a(t,r),u=m.apply(t,r),c=e||o(u),s=Math.max(u[1][0]-u[0][0],u[1][1]-u[0][1]),f=t.__zoom,l="function"==typeof n?n.apply(t,r):n,h=A(f.invert(c).concat(s/f.k),l.invert(c).concat(s/l.k));return function(t){if(1===t)t=l;else{var n=h(t),e=s/n[2];t=new ns(e,c[0]-n[0]*e,c[1]-n[1]*e)}i.zoom(null,t)}})}function a(t,n){for(var e,r=0,i=C.length;rL}n.zoom("mouse",i(r(n.that.__zoom,n.mouse[0]=Ks(n.that),n.mouse[1]),n.extent))},!0).on("mouseup.zoom",function(){e.on("mousemove.zoom mouseup.zoom",null),_t(t.event.view,n.moved),gm(),n.end()},!0),o=Ks(this),u=t.event.clientX,c=t.event.clientY;lf(t.event.view),rs(),n.mouse=[o,this.__zoom.invert(o)],jl(this),n.start()}}function l(){if(g.apply(this,arguments)){var o=this.__zoom,a=Ks(this),c=o.invert(a),s=i(r(e(o,o.k*(t.event.shiftKey?.5:2)),a,c),m.apply(this,arguments));gm(),E>0?cf(this).transition().duration(E).call(u,s,a):cf(this).call(n.transform,s)}}function p(){if(g.apply(this,arguments)){var n,e,r,i,o=a(this,arguments),u=t.event.changedTouches,c=u.length;for(rs(),e=0;e
     
    ';this.parentEl=i("object"==typeof e&&e.parentEl&&i(e.parentEl).length?e.parentEl:this.parentEl),this.container=i(n).appendTo(this.parentEl),this.setOptions(e,a);var r=this.container;i.each(this.buttonClasses,function(t,e){r.find("button").addClass(e)}),this.container.find(".daterangepicker_start_input label").html(this.locale.fromLabel),this.container.find(".daterangepicker_end_input label").html(this.locale.toLabel),this.applyClass.length&&this.container.find(".applyBtn").addClass(this.applyClass),this.cancelClass.length&&this.container.find(".cancelBtn").addClass(this.cancelClass),this.container.find(".applyBtn").html(this.locale.applyLabel),this.container.find(".cancelBtn").html(this.locale.cancelLabel),this.container.find(".calendar").on("click.daterangepicker",".prev",i.proxy(this.clickPrev,this)).on("click.daterangepicker",".next",i.proxy(this.clickNext,this)).on("click.daterangepicker","td.available",i.proxy(this.clickDate,this)).on("mouseenter.daterangepicker","td.available",i.proxy(this.hoverDate,this)).on("mouseleave.daterangepicker","td.available",i.proxy(this.updateFormInputs,this)).on("change.daterangepicker","select.yearselect",i.proxy(this.updateMonthYear,this)).on("change.daterangepicker","select.monthselect",i.proxy(this.updateMonthYear,this)).on("change.daterangepicker","select.hourselect,select.minuteselect,select.secondselect,select.ampmselect",i.proxy(this.updateTime,this)),this.container.find(".ranges").on("click.daterangepicker","button.applyBtn",i.proxy(this.clickApply,this)).on("click.daterangepicker","button.cancelBtn",i.proxy(this.clickCancel,this)).on("click.daterangepicker",".daterangepicker_start_input,.daterangepicker_end_input",i.proxy(this.showCalendars,this)).on("change.daterangepicker",".daterangepicker_start_input,.daterangepicker_end_input",i.proxy(this.inputsChanged,this)).on("keydown.daterangepicker",".daterangepicker_start_input,.daterangepicker_end_input",i.proxy(this.inputsKeydown,this)).on("click.daterangepicker","li",i.proxy(this.clickRange,this)).on("mouseenter.daterangepicker","li",i.proxy(this.enterRange,this)).on("mouseleave.daterangepicker","li",i.proxy(this.updateFormInputs,this)),this.element.is("input")?this.element.on({"click.daterangepicker":i.proxy(this.show,this),"focus.daterangepicker":i.proxy(this.show,this),"keyup.daterangepicker":i.proxy(this.updateFromControl,this)}):this.element.on("click.daterangepicker",i.proxy(this.toggle,this))};s.prototype={constructor:s,setOptions:function(t,e){if(this.startDate=a().startOf("day"),this.endDate=a().endOf("day"),this.timeZone=a().zone(),this.minDate=!1,this.maxDate=!1,this.dateLimit=!1,this.showDropdowns=!1,this.showWeekNumbers=!1,this.timePicker=!1,this.timePickerSeconds=!1,this.timePickerIncrement=30,this.timePicker12Hour=!0,this.singleDatePicker=!1,this.ranges={},this.opens="right",this.element.hasClass("pull-right")&&(this.opens="left"),this.buttonClasses=["btn","btn-small btn-sm"],this.applyClass="btn-success",this.cancelClass="btn-default",this.format="MM/DD/YYYY",this.separator=" - ",this.locale={applyLabel:"Apply",cancelLabel:"Cancel",fromLabel:"From",toLabel:"To",weekLabel:"W",customRangeLabel:"Custom Range",daysOfWeek:a.weekdaysMin(),monthNames:a.monthsShort(),firstDay:a.localeData()._week.dow},this.cb=function(){},"string"==typeof t.format&&(this.format=t.format),"string"==typeof t.separator&&(this.separator=t.separator),"string"==typeof t.startDate&&(this.startDate=a(t.startDate,this.format)),"string"==typeof t.endDate&&(this.endDate=a(t.endDate,this.format)),"string"==typeof t.minDate&&(this.minDate=a(t.minDate,this.format)),"string"==typeof t.maxDate&&(this.maxDate=a(t.maxDate,this.format)),"object"==typeof t.startDate&&(this.startDate=a(t.startDate)),"object"==typeof t.endDate&&(this.endDate=a(t.endDate)),"object"==typeof t.minDate&&(this.minDate=a(t.minDate)),"object"==typeof t.maxDate&&(this.maxDate=a(t.maxDate)),"string"==typeof t.applyClass&&(this.applyClass=t.applyClass),"string"==typeof t.cancelClass&&(this.cancelClass=t.cancelClass),"object"==typeof t.dateLimit&&(this.dateLimit=t.dateLimit),"object"==typeof t.locale&&("object"==typeof t.locale.daysOfWeek&&(this.locale.daysOfWeek=t.locale.daysOfWeek.slice()),"object"==typeof t.locale.monthNames&&(this.locale.monthNames=t.locale.monthNames.slice()),"number"==typeof t.locale.firstDay&&(this.locale.firstDay=t.locale.firstDay),"string"==typeof t.locale.applyLabel&&(this.locale.applyLabel=t.locale.applyLabel),"string"==typeof t.locale.cancelLabel&&(this.locale.cancelLabel=t.locale.cancelLabel),"string"==typeof t.locale.fromLabel&&(this.locale.fromLabel=t.locale.fromLabel),"string"==typeof t.locale.toLabel&&(this.locale.toLabel=t.locale.toLabel),"string"==typeof t.locale.weekLabel&&(this.locale.weekLabel=t.locale.weekLabel),"string"==typeof t.locale.customRangeLabel&&(this.locale.customRangeLabel=t.locale.customRangeLabel)),"string"==typeof t.opens&&(this.opens=t.opens),"boolean"==typeof t.showWeekNumbers&&(this.showWeekNumbers=t.showWeekNumbers),"string"==typeof t.buttonClasses&&(this.buttonClasses=[t.buttonClasses]),"object"==typeof t.buttonClasses&&(this.buttonClasses=t.buttonClasses),"boolean"==typeof t.showDropdowns&&(this.showDropdowns=t.showDropdowns),"boolean"==typeof t.singleDatePicker&&(this.singleDatePicker=t.singleDatePicker,this.singleDatePicker&&(this.endDate=this.startDate.clone())),"boolean"==typeof t.timePicker&&(this.timePicker=t.timePicker),"boolean"==typeof t.timePickerSeconds&&(this.timePickerSeconds=t.timePickerSeconds),"number"==typeof t.timePickerIncrement&&(this.timePickerIncrement=t.timePickerIncrement),"boolean"==typeof t.timePicker12Hour&&(this.timePicker12Hour=t.timePicker12Hour),0!=this.locale.firstDay)for(var s=this.locale.firstDay;s>0;)this.locale.daysOfWeek.push(this.locale.daysOfWeek.shift()),s--;var n,r,o;if("undefined"==typeof t.startDate&&"undefined"==typeof t.endDate&&i(this.element).is("input[type=text]")){var h=i(this.element).val(),l=h.split(this.separator);n=r=null,2==l.length?(n=a(l[0],this.format),r=a(l[1],this.format)):this.singleDatePicker&&""!==h&&(n=a(h,this.format),r=a(h,this.format)),null!==n&&null!==r&&(this.startDate=n,this.endDate=r)}if("string"==typeof t.timeZone||"number"==typeof t.timeZone?(this.timeZone=t.timeZone,this.startDate.zone(this.timeZone),this.endDate.zone(this.timeZone)):this.timeZone=a(this.startDate).zone(),"object"==typeof t.ranges){for(o in t.ranges)n="string"==typeof t.ranges[o][0]?a(t.ranges[o][0],this.format):a(t.ranges[o][0]),r="string"==typeof t.ranges[o][1]?a(t.ranges[o][1],this.format):a(t.ranges[o][1]),this.minDate&&n.isBefore(this.minDate)&&(n=a(this.minDate)),this.maxDate&&r.isAfter(this.maxDate)&&(r=a(this.maxDate)),this.minDate&&r.isBefore(this.minDate)||this.maxDate&&n.isAfter(this.maxDate)||(this.ranges[o]=[n,r]);var c="
      ";for(o in this.ranges)c+="
    • "+o+"
    • ";c+="
    • "+this.locale.customRangeLabel+"
    • ",c+="
    ",this.container.find(".ranges ul").remove(),this.container.find(".ranges").prepend(c)}if("function"==typeof e&&(this.cb=e),this.timePicker||(this.startDate=this.startDate.startOf("day"),this.endDate=this.endDate.endOf("day")),this.singleDatePicker?(this.opens="right",this.container.addClass("single"),this.container.find(".calendar.right").show(),this.container.find(".calendar.left").hide(),this.timePicker?this.container.find(".ranges .daterangepicker_start_input, .ranges .daterangepicker_end_input").hide():this.container.find(".ranges").hide(),this.container.find(".calendar.right").hasClass("single")||this.container.find(".calendar.right").addClass("single")):(this.container.removeClass("single"),this.container.find(".calendar.right").removeClass("single"),this.container.find(".ranges").show()),this.oldStartDate=this.startDate.clone(),this.oldEndDate=this.endDate.clone(),this.oldChosenLabel=this.chosenLabel,this.leftCalendar={month:a([this.startDate.year(),this.startDate.month(),1,this.startDate.hour(),this.startDate.minute(),this.startDate.second()]),calendar:[]},this.rightCalendar={month:a([this.endDate.year(),this.endDate.month(),1,this.endDate.hour(),this.endDate.minute(),this.endDate.second()]),calendar:[]},"right"==this.opens||"center"==this.opens){var d=this.container.find(".calendar.first"),f=this.container.find(".calendar.second");f.hasClass("single")&&(f.removeClass("single"),d.addClass("single")),d.removeClass("left").addClass("right"),f.removeClass("right").addClass("left"),this.singleDatePicker&&(d.show(),f.hide())}"undefined"!=typeof t.ranges||this.singleDatePicker||this.container.addClass("show-calendar"),this.container.addClass("opens"+this.opens),this.updateView(),this.updateCalendars()},setStartDate:function(t){"string"==typeof t&&(this.startDate=a(t,this.format).zone(this.timeZone)),"object"==typeof t&&(this.startDate=a(t)),this.timePicker||(this.startDate=this.startDate.startOf("day")),this.oldStartDate=this.startDate.clone(),this.updateView(),this.updateCalendars(),this.updateInputText()},setEndDate:function(t){"string"==typeof t&&(this.endDate=a(t,this.format).zone(this.timeZone)),"object"==typeof t&&(this.endDate=a(t)),this.timePicker||(this.endDate=this.endDate.endOf("day")),this.oldEndDate=this.endDate.clone(),this.updateView(),this.updateCalendars(),this.updateInputText()},updateView:function(){this.leftCalendar.month.month(this.startDate.month()).year(this.startDate.year()).hour(this.startDate.hour()).minute(this.startDate.minute()),this.rightCalendar.month.month(this.endDate.month()).year(this.endDate.year()).hour(this.endDate.hour()).minute(this.endDate.minute()),this.updateFormInputs()},updateFormInputs:function(){this.container.find("input[name=daterangepicker_start]").val(this.startDate.format(this.format)),this.container.find("input[name=daterangepicker_end]").val(this.endDate.format(this.format)),this.startDate.isSame(this.endDate)||this.startDate.isBefore(this.endDate)?this.container.find("button.applyBtn").removeAttr("disabled"):this.container.find("button.applyBtn").attr("disabled","disabled")},updateFromControl:function(){if(this.element.is("input")&&this.element.val().length){var t=this.element.val().split(this.separator),e=null,i=null;2===t.length&&(e=a(t[0],this.format).zone(this.timeZone),i=a(t[1],this.format).zone(this.timeZone)),(this.singleDatePicker||null===e||null===i)&&(e=a(this.element.val(),this.format).zone(this.timeZone),i=e),i.isBefore(e)||(this.oldStartDate=this.startDate.clone(),this.oldEndDate=this.endDate.clone(),this.startDate=e,this.endDate=i,this.startDate.isSame(this.oldStartDate)&&this.endDate.isSame(this.oldEndDate)||this.notify(),this.updateCalendars())}},notify:function(){this.updateView(),this.cb(this.startDate,this.endDate,this.chosenLabel)},move:function(){var t={top:0,left:0},e=i(window).width();this.parentEl.is("body")||(t={top:this.parentEl.offset().top-this.parentEl.scrollTop(),left:this.parentEl.offset().left-this.parentEl.scrollLeft()},e=this.parentEl[0].clientWidth+this.parentEl.offset().left),"left"==this.opens?(this.container.css({top:this.element.offset().top+this.element.outerHeight()-t.top,right:e-this.element.offset().left-this.element.outerWidth(),left:"auto"}),this.container.offset().left<0&&this.container.css({right:"auto",left:9})):"center"==this.opens?(this.container.css({top:this.element.offset().top+this.element.outerHeight()-t.top,left:this.element.offset().left-t.left+this.element.outerWidth()/2-this.container.outerWidth()/2,right:"auto"}),this.container.offset().left<0&&this.container.css({right:"auto",left:9})):(this.container.css({top:this.element.offset().top+this.element.outerHeight()-t.top,left:this.element.offset().left-t.left,right:"auto"}),this.container.offset().left+this.container.outerWidth()>i(window).width()&&this.container.css({left:"auto",right:0}))},toggle:function(){this.element.hasClass("active")?this.hide():this.show()},show:function(){this.isShowing||(this.element.addClass("active"),this.container.show(),this.move(),this._outsideClickProxy=i.proxy(function(t){this.outsideClick(t)},this),i(document).on("mousedown.daterangepicker",this._outsideClickProxy).on("touchend.daterangepicker",this._outsideClickProxy).on("click.daterangepicker","[data-toggle=dropdown]",this._outsideClickProxy).on("focusin.daterangepicker",this._outsideClickProxy),this.isShowing=!0,this.element.trigger("show.daterangepicker",this))},outsideClick:function(t){var e=i(t.target);"focusin"==t.type||e.closest(this.element).length||e.closest(this.container).length||e.closest(".calendar-date").length||this.hide()},hide:function(){this.isShowing&&(i(document).off(".daterangepicker"),this.element.removeClass("active"),this.container.hide(),this.startDate.isSame(this.oldStartDate)&&this.endDate.isSame(this.oldEndDate)||this.notify(),this.oldStartDate=this.startDate.clone(),this.oldEndDate=this.endDate.clone(),this.isShowing=!1,this.element.trigger("hide.daterangepicker",this))},enterRange:function(t){var e=t.target.innerHTML;if(e==this.locale.customRangeLabel)this.updateView();else{var a=this.ranges[e];this.container.find("input[name=daterangepicker_start]").val(a[0].format(this.format)),this.container.find("input[name=daterangepicker_end]").val(a[1].format(this.format))}},showCalendars:function(){this.container.addClass("show-calendar"),this.move(),this.element.trigger("showCalendar.daterangepicker",this)},hideCalendars:function(){this.container.removeClass("show-calendar"),this.element.trigger("hideCalendar.daterangepicker",this)},inputsChanged:function(t){var e=i(t.target),s=a(e.val(),this.format);if(s.isValid()){var n,r;"daterangepicker_start"===e.attr("name")?(n=s,r=this.endDate):(n=this.startDate,r=s),this.setCustomDates(n,r)}},inputsKeydown:function(t){13===t.keyCode&&(this.inputsChanged(t),this.notify())},updateInputText:function(){this.element.is("input")&&!this.singleDatePicker?this.element.val(this.startDate.format(this.format)+this.separator+this.endDate.format(this.format)):this.element.is("input")&&this.element.val(this.endDate.format(this.format))},clickRange:function(t){var e=t.target.innerHTML;if(this.chosenLabel=e,e==this.locale.customRangeLabel)this.showCalendars();else{var a=this.ranges[e];this.startDate=a[0],this.endDate=a[1],this.timePicker||(this.startDate.startOf("day"),this.endDate.endOf("day")),this.leftCalendar.month.month(this.startDate.month()).year(this.startDate.year()).hour(this.startDate.hour()).minute(this.startDate.minute()),this.rightCalendar.month.month(this.endDate.month()).year(this.endDate.year()).hour(this.endDate.hour()).minute(this.endDate.minute()),this.updateCalendars(),this.updateInputText(),this.hideCalendars(),this.hide(),this.element.trigger("apply.daterangepicker",this)}},clickPrev:function(t){var e=i(t.target).parents(".calendar");e.hasClass("left")?this.leftCalendar.month.subtract(1,"month"):this.rightCalendar.month.subtract(1,"month"),this.updateCalendars()},clickNext:function(t){var e=i(t.target).parents(".calendar");e.hasClass("left")?this.leftCalendar.month.add(1,"month"):this.rightCalendar.month.add(1,"month"),this.updateCalendars()},hoverDate:function(t){var e=i(t.target).attr("data-title"),a=e.substr(1,1),s=e.substr(3,1),n=i(t.target).parents(".calendar");n.hasClass("left")?this.container.find("input[name=daterangepicker_start]").val(this.leftCalendar.calendar[a][s].format(this.format)):this.container.find("input[name=daterangepicker_end]").val(this.rightCalendar.calendar[a][s].format(this.format))},setCustomDates:function(t,e){if(this.chosenLabel=this.locale.customRangeLabel,t.isAfter(e)){var i=this.endDate.diff(this.startDate);e=a(t).add(i,"ms")}this.startDate=t,this.endDate=e,this.updateView(),this.updateCalendars()},clickDate:function(t){var e,s,n=i(t.target).attr("data-title"),r=n.substr(1,1),o=n.substr(3,1),h=i(t.target).parents(".calendar");if(h.hasClass("left")){if(e=this.leftCalendar.calendar[r][o],s=this.endDate,"object"==typeof this.dateLimit){var l=a(e).add(this.dateLimit).startOf("day");s.isAfter(l)&&(s=l)}}else if(e=this.startDate,s=this.rightCalendar.calendar[r][o],"object"==typeof this.dateLimit){var c=a(s).subtract(this.dateLimit).startOf("day");e.isBefore(c)&&(e=c)}this.singleDatePicker&&h.hasClass("left")?s=e.clone():this.singleDatePicker&&h.hasClass("right")&&(e=s.clone()),h.find("td").removeClass("active"),i(t.target).addClass("active"),this.setCustomDates(e,s),this.timePicker||s.endOf("day"),this.singleDatePicker&&!this.timePicker&&this.clickApply()},clickApply:function(){this.updateInputText(),this.hide(),this.element.trigger("apply.daterangepicker",this)},clickCancel:function(){this.startDate=this.oldStartDate,this.endDate=this.oldEndDate,this.chosenLabel=this.oldChosenLabel,this.updateView(),this.updateCalendars(),this.hide(),this.element.trigger("cancel.daterangepicker",this)},updateMonthYear:function(t){var e=i(t.target).closest(".calendar").hasClass("left"),a=e?"left":"right",s=this.container.find(".calendar."+a),n=parseInt(s.find(".monthselect").val(),10),r=s.find(".yearselect").val();this[a+"Calendar"].month.month(n).year(r),this.updateCalendars()},updateTime:function(t){var e=i(t.target).closest(".calendar"),a=e.hasClass("left"),s=parseInt(e.find(".hourselect").val(),10),n=parseInt(e.find(".minuteselect").val(),10),r=0;if(this.timePickerSeconds&&(r=parseInt(e.find(".secondselect").val(),10)),this.timePicker12Hour){var o=e.find(".ampmselect").val();"PM"===o&&12>s&&(s+=12),"AM"===o&&12===s&&(s=0)}if(a){var h=this.startDate.clone();h.hour(s),h.minute(n),h.second(r),this.startDate=h,this.leftCalendar.month.hour(s).minute(n).second(r),this.singleDatePicker&&(this.endDate=h.clone())}else{var l=this.endDate.clone();l.hour(s),l.minute(n),l.second(r),this.endDate=l,this.singleDatePicker&&(this.startDate=l.clone()),this.rightCalendar.month.hour(s).minute(n).second(r)}this.updateView(),this.updateCalendars()},updateCalendars:function(){this.leftCalendar.calendar=this.buildCalendar(this.leftCalendar.month.month(),this.leftCalendar.month.year(),this.leftCalendar.month.hour(),this.leftCalendar.month.minute(),this.leftCalendar.month.second(),"left"),this.rightCalendar.calendar=this.buildCalendar(this.rightCalendar.month.month(),this.rightCalendar.month.year(),this.rightCalendar.month.hour(),this.rightCalendar.month.minute(),this.rightCalendar.month.second(),"right"),this.container.find(".calendar.left").empty().html(this.renderCalendar(this.leftCalendar.calendar,this.startDate,this.minDate,this.maxDate,"left")),this.container.find(".calendar.right").empty().html(this.renderCalendar(this.rightCalendar.calendar,this.endDate,this.singleDatePicker?this.minDate:this.startDate,this.maxDate,"right")),this.container.find(".ranges li").removeClass("active");var t=!0,e=0;for(var a in this.ranges)this.timePicker?this.startDate.isSame(this.ranges[a][0])&&this.endDate.isSame(this.ranges[a][1])&&(t=!1,this.chosenLabel=this.container.find(".ranges li:eq("+e+")").addClass("active").html()):this.startDate.format("YYYY-MM-DD")==this.ranges[a][0].format("YYYY-MM-DD")&&this.endDate.format("YYYY-MM-DD")==this.ranges[a][1].format("YYYY-MM-DD")&&(t=!1,this.chosenLabel=this.container.find(".ranges li:eq("+e+")").addClass("active").html()),e++;t&&(this.chosenLabel=this.container.find(".ranges li:last").addClass("active").html(),this.showCalendars())},buildCalendar:function(t,e,i,s,n,r){var o,h=a([e,t]).daysInMonth(),l=a([e,t,1]),c=a([e,t,h]),d=a(l).subtract(1,"month").month(),f=a(l).subtract(1,"month").year(),m=a([f,d]).daysInMonth(),p=l.day(),u=[];for(u.firstDay=l,u.lastDay=c,o=0;6>o;o++)u[o]=[];var D=m-p+this.locale.firstDay+1;D>m&&(D-=7),p==this.locale.firstDay&&(D=m-6);var g,y,k=a([f,d,D,12,s,n]).zone(this.timeZone);for(o=0,g=0,y=0;42>o;o++,g++,k=a(k).add(24,"hour"))o>0&&g%7===0&&(g=0,y++),u[y][g]=k.clone().hour(i),k.hour(12),this.minDate&&u[y][g].format("YYYY-MM-DD")==this.minDate.format("YYYY-MM-DD")&&u[y][g].isBefore(this.minDate)&&"left"==r&&(u[y][g]=this.minDate.clone()),this.maxDate&&u[y][g].format("YYYY-MM-DD")==this.maxDate.format("YYYY-MM-DD")&&u[y][g].isAfter(this.maxDate)&&"right"==r&&(u[y][g]=this.maxDate.clone());return u},renderDropdowns:function(t,e,a){for(var i=t.month(),s=t.year(),n=a&&a.year()||s+5,r=e&&e.year()||s-50,o='";for(var d='",o+d},renderCalendar:function(t,e,a,s,n){var r='
    ';r+='',r+="",r+="",this.showWeekNumbers&&(r+=""),r+=!a||a.isBefore(t.firstDay)?'':"";var o=this.locale.monthNames[t[1][1].month()]+t[1][1].format(" YYYY");this.showDropdowns&&(o=this.renderDropdowns(t[1][1],a,s)),r+='",r+=!s||s.isAfter(t.lastDay)?'':"",r+="",r+="",this.showWeekNumbers&&(r+='"),i.each(this.locale.daysOfWeek,function(t,e){r+=""}),r+="",r+="",r+="";for(var h=0;6>h;h++){r+="",this.showWeekNumbers&&(r+='");for(var l=0;7>l;l++){var c="available ";c+=t[h][l].month()==t[1][1].month()?"":"off",a&&t[h][l].isBefore(a,"day")||s&&t[h][l].isAfter(s,"day")?c=" off disabled ":t[h][l].format("YYYY-MM-DD")==e.format("YYYY-MM-DD")?(c+=" active ",t[h][l].format("YYYY-MM-DD")==this.startDate.format("YYYY-MM-DD")&&(c+=" start-date "),t[h][l].format("YYYY-MM-DD")==this.endDate.format("YYYY-MM-DD")&&(c+=" end-date ")):t[h][l]>=this.startDate&&t[h][l]<=this.endDate&&(c+=" in-range ",t[h][l].isSame(this.startDate)&&(c+=" start-date "),t[h][l].isSame(this.endDate)&&(c+=" end-date "));var d="r"+h+"c"+l;r+='"}r+=""}r+="",r+="
    '+o+"
    '+this.locale.weekLabel+""+e+"
    '+t[h][0].week()+"'+t[h][l].date()+"
    ",r+="
    ";var f;if(this.timePicker){r+='
    ',r+=' : ",r+=' ",this.timePickerSeconds){for(r+=': "}if(this.timePicker12Hour){r+='"}r+="
    "}return r},remove:function(){this.container.remove(),this.element.off(".daterangepicker"),this.element.removeData("daterangepicker")}},i.fn.daterangepicker=function(t,e){return this.each(function(){var a=i(this);a.data("daterangepicker")&&a.data("daterangepicker").remove(),a.data("daterangepicker",new s(a,t,e))}),this}}); \ No newline at end of file +!function(t,e){if("function"==typeof define&&define.amd)define(["moment","jquery","exports"],function(a,i,s){t.daterangepicker=e(t,s,a,i)});else if("undefined"!=typeof exports){var a,i=require("moment");try{a=require("jquery")}catch(s){if(a=window.jQuery,!a)throw new Error("jQuery dependency not found")}e(t,exports,i,a)}else t.daterangepicker=e(t,{},t.moment,t.jQuery||t.Zepto||t.ender||t.$)}(this,function(t,e,a,i){var s=function(t,e,a){("object"!=typeof e||null===e)&&(e={});var s="";"string"==typeof e.calender_style&&(s=e.calender_style),this.parentEl="body",this.element=i(t),this.isShowing=!1;var n='';this.parentEl=i("object"==typeof e&&e.parentEl&&i(e.parentEl).length?e.parentEl:this.parentEl),this.container=i(n).appendTo(this.parentEl),this.setOptions(e,a);var r=this.container;i.each(this.buttonClasses,function(t,e){r.find("button").addClass(e)}),this.container.find(".daterangepicker_start_input label").html(this.locale.fromLabel),this.container.find(".daterangepicker_end_input label").html(this.locale.toLabel),this.applyClass.length&&this.container.find(".applyBtn").addClass(this.applyClass),this.cancelClass.length&&this.container.find(".cancelBtn").addClass(this.cancelClass),this.container.find(".applyBtn").html(this.locale.applyLabel),this.container.find(".cancelBtn").html(this.locale.cancelLabel),this.container.find(".calendar").on("click.daterangepicker",".prev",i.proxy(this.clickPrev,this)).on("click.daterangepicker",".next",i.proxy(this.clickNext,this)).on("click.daterangepicker","td.available",i.proxy(this.clickDate,this)).on("mouseenter.daterangepicker","td.available",i.proxy(this.hoverDate,this)).on("mouseleave.daterangepicker","td.available",i.proxy(this.updateFormInputs,this)).on("change.daterangepicker","select.yearselect",i.proxy(this.updateMonthYear,this)).on("change.daterangepicker","select.monthselect",i.proxy(this.updateMonthYear,this)).on("change.daterangepicker","select.hourselect,select.minuteselect,select.secondselect,select.ampmselect",i.proxy(this.updateTime,this)),this.container.find(".ranges").on("click.daterangepicker","button.applyBtn",i.proxy(this.clickApply,this)).on("click.daterangepicker","button.cancelBtn",i.proxy(this.clickCancel,this)).on("click.daterangepicker",".daterangepicker_start_input,.daterangepicker_end_input",i.proxy(this.showCalendars,this)).on("change.daterangepicker",".daterangepicker_start_input,.daterangepicker_end_input",i.proxy(this.inputsChanged,this)).on("keydown.daterangepicker",".daterangepicker_start_input,.daterangepicker_end_input",i.proxy(this.inputsKeydown,this)).on("click.daterangepicker","li",i.proxy(this.clickRange,this)).on("mouseenter.daterangepicker","li",i.proxy(this.enterRange,this)).on("mouseleave.daterangepicker","li",i.proxy(this.updateFormInputs,this)),this.element.is("input")?this.element.on({"click.daterangepicker":i.proxy(this.show,this),"focus.daterangepicker":i.proxy(this.show,this),"keyup.daterangepicker":i.proxy(this.updateFromControl,this)}):this.element.on("click.daterangepicker",i.proxy(this.toggle,this))};s.prototype={constructor:s,setOptions:function(t,e){if(this.startDate=a().startOf("day"),this.endDate=a().endOf("day"),this.timeZone=a().zone(),this.minDate=!1,this.maxDate=!1,this.dateLimit=!1,this.showDropdowns=!1,this.showWeekNumbers=!1,this.timePicker=!1,this.timePickerSeconds=!1,this.timePickerIncrement=30,this.timePicker12Hour=!0,this.singleDatePicker=!1,this.ranges={},this.opens="right",this.element.hasClass("pull-right")&&(this.opens="left"),this.buttonClasses=["btn","btn-small btn-sm"],this.applyClass="btn-success",this.cancelClass="btn-default",this.format="MM/DD/YYYY",this.separator=" - ",this.locale={applyLabel:"Apply",cancelLabel:"Cancel",fromLabel:"From",toLabel:"To",weekLabel:"W",customRangeLabel:"Custom Range",daysOfWeek:a.weekdaysMin(),monthNames:a.monthsShort(),firstDay:a.localeData()._week.dow},this.cb=function(){},"string"==typeof t.format&&(this.format=t.format),"string"==typeof t.separator&&(this.separator=t.separator),"string"==typeof t.startDate&&(this.startDate=a(t.startDate,this.format)),"string"==typeof t.endDate&&(this.endDate=a(t.endDate,this.format)),"string"==typeof t.minDate&&(this.minDate=a(t.minDate,this.format)),"string"==typeof t.maxDate&&(this.maxDate=a(t.maxDate,this.format)),"object"==typeof t.startDate&&(this.startDate=a(t.startDate)),"object"==typeof t.endDate&&(this.endDate=a(t.endDate)),"object"==typeof t.minDate&&(this.minDate=a(t.minDate)),"object"==typeof t.maxDate&&(this.maxDate=a(t.maxDate)),"string"==typeof t.applyClass&&(this.applyClass=t.applyClass),"string"==typeof t.cancelClass&&(this.cancelClass=t.cancelClass),"object"==typeof t.dateLimit&&(this.dateLimit=t.dateLimit),"object"==typeof t.locale&&("object"==typeof t.locale.daysOfWeek&&(this.locale.daysOfWeek=t.locale.daysOfWeek.slice()),"object"==typeof t.locale.monthNames&&(this.locale.monthNames=t.locale.monthNames.slice()),"number"==typeof t.locale.firstDay&&(this.locale.firstDay=t.locale.firstDay),"string"==typeof t.locale.applyLabel&&(this.locale.applyLabel=t.locale.applyLabel),"string"==typeof t.locale.cancelLabel&&(this.locale.cancelLabel=t.locale.cancelLabel),"string"==typeof t.locale.fromLabel&&(this.locale.fromLabel=t.locale.fromLabel),"string"==typeof t.locale.toLabel&&(this.locale.toLabel=t.locale.toLabel),"string"==typeof t.locale.weekLabel&&(this.locale.weekLabel=t.locale.weekLabel),"string"==typeof t.locale.customRangeLabel&&(this.locale.customRangeLabel=t.locale.customRangeLabel)),"string"==typeof t.opens&&(this.opens=t.opens),"boolean"==typeof t.showWeekNumbers&&(this.showWeekNumbers=t.showWeekNumbers),"string"==typeof t.buttonClasses&&(this.buttonClasses=[t.buttonClasses]),"object"==typeof t.buttonClasses&&(this.buttonClasses=t.buttonClasses),"boolean"==typeof t.showDropdowns&&(this.showDropdowns=t.showDropdowns),"boolean"==typeof t.singleDatePicker&&(this.singleDatePicker=t.singleDatePicker,this.singleDatePicker&&(this.endDate=this.startDate.clone())),"boolean"==typeof t.timePicker&&(this.timePicker=t.timePicker),"boolean"==typeof t.timePickerSeconds&&(this.timePickerSeconds=t.timePickerSeconds),"number"==typeof t.timePickerIncrement&&(this.timePickerIncrement=t.timePickerIncrement),"boolean"==typeof t.timePicker12Hour&&(this.timePicker12Hour=t.timePicker12Hour),0!=this.locale.firstDay)for(var s=this.locale.firstDay;s>0;)this.locale.daysOfWeek.push(this.locale.daysOfWeek.shift()),s--;var n,r,o;if("undefined"==typeof t.startDate&&"undefined"==typeof t.endDate&&i(this.element).is("input[type=text]")){var h=i(this.element).val(),l=h.split(this.separator);n=r=null,2==l.length?(n=a(l[0],this.format),r=a(l[1],this.format)):this.singleDatePicker&&""!==h&&(n=a(h,this.format),r=a(h,this.format)),null!==n&&null!==r&&(this.startDate=n,this.endDate=r)}if("string"==typeof t.timeZone||"number"==typeof t.timeZone?(this.timeZone=t.timeZone,this.startDate.zone(this.timeZone),this.endDate.zone(this.timeZone)):this.timeZone=a(this.startDate).zone(),"object"==typeof t.ranges){for(o in t.ranges)n="string"==typeof t.ranges[o][0]?a(t.ranges[o][0],this.format):a(t.ranges[o][0]),r="string"==typeof t.ranges[o][1]?a(t.ranges[o][1],this.format):a(t.ranges[o][1]),this.minDate&&n.isBefore(this.minDate)&&(n=a(this.minDate)),this.maxDate&&r.isAfter(this.maxDate)&&(r=a(this.maxDate)),this.minDate&&r.isBefore(this.minDate)||this.maxDate&&n.isAfter(this.maxDate)||(this.ranges[o]=[n,r]);var c="
      ";for(o in this.ranges)c+="
    • "+o+"
    • ";c+="
    • "+this.locale.customRangeLabel+"
    • ",c+="
    ",this.container.find(".ranges ul").remove(),this.container.find(".ranges").prepend(c)}if("function"==typeof e&&(this.cb=e),this.timePicker||(this.startDate=this.startDate.startOf("day"),this.endDate=this.endDate.endOf("day")),this.singleDatePicker?(this.opens="right",this.container.addClass("single"),this.container.find(".calendar.right").show(),this.container.find(".calendar.left").hide(),this.timePicker?this.container.find(".ranges .daterangepicker_start_input, .ranges .daterangepicker_end_input").hide():this.container.find(".ranges").hide(),this.container.find(".calendar.right").hasClass("single")||this.container.find(".calendar.right").addClass("single")):(this.container.removeClass("single"),this.container.find(".calendar.right").removeClass("single"),this.container.find(".ranges").show()),this.oldStartDate=this.startDate.clone(),this.oldEndDate=this.endDate.clone(),this.oldChosenLabel=this.chosenLabel,this.leftCalendar={month:a([this.startDate.year(),this.startDate.month(),1,this.startDate.hour(),this.startDate.minute(),this.startDate.second()]),calendar:[]},this.rightCalendar={month:a([this.endDate.year(),this.endDate.month(),1,this.endDate.hour(),this.endDate.minute(),this.endDate.second()]),calendar:[]},"right"==this.opens||"center"==this.opens){var d=this.container.find(".calendar.first"),f=this.container.find(".calendar.second");f.hasClass("single")&&(f.removeClass("single"),d.addClass("single")),d.removeClass("left").addClass("right"),f.removeClass("right").addClass("left"),this.singleDatePicker&&(d.show(),f.hide())}"undefined"!=typeof t.ranges||this.singleDatePicker||this.container.addClass("show-calendar"),this.container.addClass("opens"+this.opens),this.updateView(),this.updateCalendars()},setStartDate:function(t){"string"==typeof t&&(this.startDate=a(t,this.format).zone(this.timeZone)),"object"==typeof t&&(this.startDate=a(t)),this.timePicker||(this.startDate=this.startDate.startOf("day")),this.oldStartDate=this.startDate.clone(),this.updateView(),this.updateCalendars(),this.updateInputText()},setEndDate:function(t){"string"==typeof t&&(this.endDate=a(t,this.format).zone(this.timeZone)),"object"==typeof t&&(this.endDate=a(t)),this.timePicker||(this.endDate=this.endDate.endOf("day")),this.oldEndDate=this.endDate.clone(),this.updateView(),this.updateCalendars(),this.updateInputText()},updateView:function(){this.leftCalendar.month.month(this.startDate.month()).year(this.startDate.year()).hour(this.startDate.hour()).minute(this.startDate.minute()),this.rightCalendar.month.month(this.endDate.month()).year(this.endDate.year()).hour(this.endDate.hour()).minute(this.endDate.minute()),this.updateFormInputs()},updateFormInputs:function(){this.container.find("input[name=daterangepicker_start]").val(this.startDate.format(this.format)),this.container.find("input[name=daterangepicker_end]").val(this.endDate.format(this.format)),this.startDate.isSame(this.endDate)||this.startDate.isBefore(this.endDate)?this.container.find("button.applyBtn").removeAttr("disabled"):this.container.find("button.applyBtn").attr("disabled","disabled")},updateFromControl:function(){if(this.element.is("input")&&this.element.val().length){var t=this.element.val().split(this.separator),e=null,i=null;2===t.length&&(e=a(t[0],this.format).zone(this.timeZone),i=a(t[1],this.format).zone(this.timeZone)),(this.singleDatePicker||null===e||null===i)&&(e=a(this.element.val(),this.format).zone(this.timeZone),i=e),i.isBefore(e)||(this.oldStartDate=this.startDate.clone(),this.oldEndDate=this.endDate.clone(),this.startDate=e,this.endDate=i,this.startDate.isSame(this.oldStartDate)&&this.endDate.isSame(this.oldEndDate)||this.notify(),this.updateCalendars())}},notify:function(){this.updateView(),this.cb(this.startDate,this.endDate,this.chosenLabel)},move:function(){var t={top:0,left:0},e=i(window).width();this.parentEl.is("body")||(t={top:this.parentEl.offset().top-this.parentEl.scrollTop(),left:this.parentEl.offset().left-this.parentEl.scrollLeft()},e=this.parentEl[0].clientWidth+this.parentEl.offset().left),"left"==this.opens?(this.container.css({top:this.element.offset().top+this.element.outerHeight()-t.top,right:e-this.element.offset().left-this.element.outerWidth(),left:"auto"}),this.container.offset().left<0&&this.container.css({right:"auto",left:9})):"center"==this.opens?(this.container.css({top:this.element.offset().top+this.element.outerHeight()-t.top,left:this.element.offset().left-t.left+this.element.outerWidth()/2-this.container.outerWidth()/2,right:"auto"}),this.container.offset().left<0&&this.container.css({right:"auto",left:9})):(this.container.css({top:this.element.offset().top+this.element.outerHeight()-t.top,left:this.element.offset().left-t.left,right:"auto"}),this.container.offset().left+this.container.outerWidth()>i(window).width()&&this.container.css({left:"auto",right:0}))},toggle:function(){this.element.hasClass("active")?this.hide():this.show()},show:function(){this.isShowing||(this.element.addClass("active"),this.container.show(),this.move(),this._outsideClickProxy=i.proxy(function(t){this.outsideClick(t)},this),i(document).on("mousedown.daterangepicker",this._outsideClickProxy).on("touchend.daterangepicker",this._outsideClickProxy).on("click.daterangepicker","[data-toggle=dropdown]",this._outsideClickProxy).on("focusin.daterangepicker",this._outsideClickProxy),this.isShowing=!0,this.element.trigger("show.daterangepicker",this))},outsideClick:function(t){var e=i(t.target);"focusin"==t.type||e.closest(this.element).length||e.closest(this.container).length||e.closest(".calendar-date").length||this.hide()},hide:function(){this.isShowing&&(i(document).off(".daterangepicker"),this.element.removeClass("active"),this.container.hide(),this.startDate.isSame(this.oldStartDate)&&this.endDate.isSame(this.oldEndDate)||this.notify(),this.oldStartDate=this.startDate.clone(),this.oldEndDate=this.endDate.clone(),this.isShowing=!1,this.element.trigger("hide.daterangepicker",this))},enterRange:function(t){var e=t.target.innerHTML;if(e==this.locale.customRangeLabel)this.updateView();else{var a=this.ranges[e];this.container.find("input[name=daterangepicker_start]").val(a[0].format(this.format)),this.container.find("input[name=daterangepicker_end]").val(a[1].format(this.format))}},showCalendars:function(){this.container.addClass("show-calendar"),this.move(),this.element.trigger("showCalendar.daterangepicker",this)},hideCalendars:function(){this.container.removeClass("show-calendar"),this.element.trigger("hideCalendar.daterangepicker",this)},inputsChanged:function(t){var e=i(t.target),s=a(e.val(),this.format);if(s.isValid()){var n,r;"daterangepicker_start"===e.attr("name")?(n=s,r=this.endDate):(n=this.startDate,r=s),this.setCustomDates(n,r)}},inputsKeydown:function(t){13===t.keyCode&&(this.inputsChanged(t),this.notify())},updateInputText:function(){this.element.is("input")&&!this.singleDatePicker?this.element.val(this.startDate.format(this.format)+this.separator+this.endDate.format(this.format)):this.element.is("input")&&this.element.val(this.endDate.format(this.format))},clickRange:function(t){var e=t.target.innerHTML;if(this.chosenLabel=e,e==this.locale.customRangeLabel)this.showCalendars();else{var a=this.ranges[e];this.startDate=a[0],this.endDate=a[1],this.timePicker||(this.startDate.startOf("day"),this.endDate.endOf("day")),this.leftCalendar.month.month(this.startDate.month()).year(this.startDate.year()).hour(this.startDate.hour()).minute(this.startDate.minute()),this.rightCalendar.month.month(this.endDate.month()).year(this.endDate.year()).hour(this.endDate.hour()).minute(this.endDate.minute()),this.updateCalendars(),this.updateInputText(),this.hideCalendars(),this.hide(),this.element.trigger("apply.daterangepicker",this)}},clickPrev:function(t){var e=i(t.target).parents(".calendar");e.hasClass("left")?this.leftCalendar.month.subtract(1,"month"):this.rightCalendar.month.subtract(1,"month"),this.updateCalendars()},clickNext:function(t){var e=i(t.target).parents(".calendar");e.hasClass("left")?this.leftCalendar.month.add(1,"month"):this.rightCalendar.month.add(1,"month"),this.updateCalendars()},hoverDate:function(t){var e=i(t.target).attr("data-title"),a=e.substr(1,1),s=e.substr(3,1),n=i(t.target).parents(".calendar");n.hasClass("left")?this.container.find("input[name=daterangepicker_start]").val(this.leftCalendar.calendar[a][s].format(this.format)):this.container.find("input[name=daterangepicker_end]").val(this.rightCalendar.calendar[a][s].format(this.format))},setCustomDates:function(t,e){if(this.chosenLabel=this.locale.customRangeLabel,t.isAfter(e)){var i=this.endDate.diff(this.startDate);e=a(t).add(i,"ms")}this.startDate=t,this.endDate=e,this.updateView(),this.updateCalendars()},clickDate:function(t){var e,s,n=i(t.target).attr("data-title"),r=n.substr(1,1),o=n.substr(3,1),h=i(t.target).parents(".calendar");if(h.hasClass("left")){if(e=this.leftCalendar.calendar[r][o],s=this.endDate,"object"==typeof this.dateLimit){var l=a(e).add(this.dateLimit).startOf("day");s.isAfter(l)&&(s=l)}}else if(e=this.startDate,s=this.rightCalendar.calendar[r][o],"object"==typeof this.dateLimit){var c=a(s).subtract(this.dateLimit).startOf("day");e.isBefore(c)&&(e=c)}this.singleDatePicker&&h.hasClass("left")?s=e.clone():this.singleDatePicker&&h.hasClass("right")&&(e=s.clone()),h.find("td").removeClass("active"),i(t.target).addClass("active"),this.setCustomDates(e,s),this.timePicker||s.endOf("day"),this.singleDatePicker&&!this.timePicker&&this.clickApply()},clickApply:function(){this.updateInputText(),this.hide(),this.element.trigger("apply.daterangepicker",this)},clickCancel:function(){this.startDate=this.oldStartDate,this.endDate=this.oldEndDate,this.chosenLabel=this.oldChosenLabel,this.updateView(),this.updateCalendars(),this.hide(),this.element.trigger("cancel.daterangepicker",this)},updateMonthYear:function(t){var e=i(t.target).closest(".calendar").hasClass("left"),a=e?"left":"right",s=this.container.find(".calendar."+a),n=parseInt(s.find(".monthselect").val(),10),r=s.find(".yearselect").val();this[a+"Calendar"].month.month(n).year(r),this.updateCalendars()},updateTime:function(t){var e=i(t.target).closest(".calendar"),a=e.hasClass("left"),s=parseInt(e.find(".hourselect").val(),10),n=parseInt(e.find(".minuteselect").val(),10),r=0;if(this.timePickerSeconds&&(r=parseInt(e.find(".secondselect").val(),10)),this.timePicker12Hour){var o=e.find(".ampmselect").val();"PM"===o&&12>s&&(s+=12),"AM"===o&&12===s&&(s=0)}if(a){var h=this.startDate.clone();h.hour(s),h.minute(n),h.second(r),this.startDate=h,this.leftCalendar.month.hour(s).minute(n).second(r),this.singleDatePicker&&(this.endDate=h.clone())}else{var l=this.endDate.clone();l.hour(s),l.minute(n),l.second(r),this.endDate=l,this.singleDatePicker&&(this.startDate=l.clone()),this.rightCalendar.month.hour(s).minute(n).second(r)}this.updateView(),this.updateCalendars()},updateCalendars:function(){this.leftCalendar.calendar=this.buildCalendar(this.leftCalendar.month.month(),this.leftCalendar.month.year(),this.leftCalendar.month.hour(),this.leftCalendar.month.minute(),this.leftCalendar.month.second(),"left"),this.rightCalendar.calendar=this.buildCalendar(this.rightCalendar.month.month(),this.rightCalendar.month.year(),this.rightCalendar.month.hour(),this.rightCalendar.month.minute(),this.rightCalendar.month.second(),"right"),this.container.find(".calendar.left").empty().html(this.renderCalendar(this.leftCalendar.calendar,this.startDate,this.minDate,this.maxDate,"left")),this.container.find(".calendar.right").empty().html(this.renderCalendar(this.rightCalendar.calendar,this.endDate,this.singleDatePicker?this.minDate:this.startDate,this.maxDate,"right")),this.container.find(".ranges li").removeClass("active");var t=!0,e=0;for(var a in this.ranges)this.timePicker?this.startDate.isSame(this.ranges[a][0])&&this.endDate.isSame(this.ranges[a][1])&&(t=!1,this.chosenLabel=this.container.find(".ranges li:eq("+e+")").addClass("active").html()):this.startDate.format("YYYY-MM-DD")==this.ranges[a][0].format("YYYY-MM-DD")&&this.endDate.format("YYYY-MM-DD")==this.ranges[a][1].format("YYYY-MM-DD")&&(t=!1,this.chosenLabel=this.container.find(".ranges li:eq("+e+")").addClass("active").html()),e++;t&&(this.chosenLabel=this.container.find(".ranges li:last").addClass("active").html(),this.showCalendars())},buildCalendar:function(t,e,i,s,n,r){var o,h=a([e,t]).daysInMonth(),l=a([e,t,1]),c=a([e,t,h]),d=a(l).subtract(1,"month").month(),f=a(l).subtract(1,"month").year(),m=a([f,d]).daysInMonth(),p=l.day(),u=[];for(u.firstDay=l,u.lastDay=c,o=0;6>o;o++)u[o]=[];var D=m-p+this.locale.firstDay+1;D>m&&(D-=7),p==this.locale.firstDay&&(D=m-6);var g,y,k=a([f,d,D,12,s,n]).zone(this.timeZone);for(o=0,g=0,y=0;42>o;o++,g++,k=a(k).add(24,"hour"))o>0&&g%7===0&&(g=0,y++),u[y][g]=k.clone().hour(i),k.hour(12),this.minDate&&u[y][g].format("YYYY-MM-DD")==this.minDate.format("YYYY-MM-DD")&&u[y][g].isBefore(this.minDate)&&"left"==r&&(u[y][g]=this.minDate.clone()),this.maxDate&&u[y][g].format("YYYY-MM-DD")==this.maxDate.format("YYYY-MM-DD")&&u[y][g].isAfter(this.maxDate)&&"right"==r&&(u[y][g]=this.maxDate.clone());return u},renderDropdowns:function(t,e,a){for(var i=t.month(),s=t.year(),n=a&&a.year()||s+5,r=e&&e.year()||s-50,o='";for(var d='",o+d},renderCalendar:function(t,e,a,s,n){var r='
    ';r+='',r+="",r+="",this.showWeekNumbers&&(r+=""),r+=!a||a.isBefore(t.firstDay)?'':"";var o=this.locale.monthNames[t[1][1].month()]+t[1][1].format(" YYYY");this.showDropdowns&&(o=this.renderDropdowns(t[1][1],a,s)),r+='",r+=!s||s.isAfter(t.lastDay)?'':"",r+="",r+="",this.showWeekNumbers&&(r+='"),i.each(this.locale.daysOfWeek,function(t,e){r+=""}),r+="",r+="",r+="";for(var h=0;6>h;h++){r+="",this.showWeekNumbers&&(r+='");for(var l=0;7>l;l++){var c="available ";c+=t[h][l].month()==t[1][1].month()?"":"off",a&&t[h][l].isBefore(a,"day")||s&&t[h][l].isAfter(s,"day")?c=" off disabled ":t[h][l].format("YYYY-MM-DD")==e.format("YYYY-MM-DD")?(c+=" active ",t[h][l].format("YYYY-MM-DD")==this.startDate.format("YYYY-MM-DD")&&(c+=" start-date "),t[h][l].format("YYYY-MM-DD")==this.endDate.format("YYYY-MM-DD")&&(c+=" end-date ")):t[h][l]>=this.startDate&&t[h][l]<=this.endDate&&(c+=" in-range ",t[h][l].isSame(this.startDate)&&(c+=" start-date "),t[h][l].isSame(this.endDate)&&(c+=" end-date "));var d="r"+h+"c"+l;r+='"}r+=""}r+="",r+="
    '+o+"
    '+this.locale.weekLabel+""+e+"
    '+t[h][0].week()+"'+t[h][l].date()+"
    ",r+="
    ";var f;if(this.timePicker){r+='
    ',r+=' : ",r+=' ",this.timePickerSeconds){for(r+=': "}if(this.timePicker12Hour){r+='"}r+="
    "}return r},remove:function(){this.container.remove(),this.element.off(".daterangepicker"),this.element.removeData("daterangepicker")}},i.fn.daterangepicker=function(t,e){return this.each(function(){var a=i(this);a.data("daterangepicker")&&a.data("daterangepicker").remove(),a.data("daterangepicker",new s(a,t,e))}),this}}); diff --git a/ui/js/moment/moment.min.js b/ui/js/moment/moment.min.js index 024d488f..c7f6dcd4 100755 --- a/ui/js/moment/moment.min.js +++ b/ui/js/moment/moment.min.js @@ -4,4 +4,4 @@ //! license : MIT //! momentjs.com (function(a){function b(a,b,c){switch(arguments.length){case 2:return null!=a?a:b;case 3:return null!=a?a:null!=b?b:c;default:throw new Error("Implement me")}}function c(a,b){return Bb.call(a,b)}function d(){return{empty:!1,unusedTokens:[],unusedInput:[],overflow:-2,charsLeftOver:0,nullInput:!1,invalidMonth:null,invalidFormat:!1,userInvalidated:!1,iso:!1}}function e(a){vb.suppressDeprecationWarnings===!1&&"undefined"!=typeof console&&console.warn&&console.warn("Deprecation warning: "+a)}function f(a,b){var c=!0;return o(function(){return c&&(e(a),c=!1),b.apply(this,arguments)},b)}function g(a,b){sc[a]||(e(b),sc[a]=!0)}function h(a,b){return function(c){return r(a.call(this,c),b)}}function i(a,b){return function(c){return this.localeData().ordinal(a.call(this,c),b)}}function j(a,b){var c,d,e=12*(b.year()-a.year())+(b.month()-a.month()),f=a.clone().add(e,"months");return 0>b-f?(c=a.clone().add(e-1,"months"),d=(b-f)/(f-c)):(c=a.clone().add(e+1,"months"),d=(b-f)/(c-f)),-(e+d)}function k(a,b,c){var d;return null==c?b:null!=a.meridiemHour?a.meridiemHour(b,c):null!=a.isPM?(d=a.isPM(c),d&&12>b&&(b+=12),d||12!==b||(b=0),b):b}function l(){}function m(a,b){b!==!1&&H(a),p(this,a),this._d=new Date(+a._d),uc===!1&&(uc=!0,vb.updateOffset(this),uc=!1)}function n(a){var b=A(a),c=b.year||0,d=b.quarter||0,e=b.month||0,f=b.week||0,g=b.day||0,h=b.hour||0,i=b.minute||0,j=b.second||0,k=b.millisecond||0;this._milliseconds=+k+1e3*j+6e4*i+36e5*h,this._days=+g+7*f,this._months=+e+3*d+12*c,this._data={},this._locale=vb.localeData(),this._bubble()}function o(a,b){for(var d in b)c(b,d)&&(a[d]=b[d]);return c(b,"toString")&&(a.toString=b.toString),c(b,"valueOf")&&(a.valueOf=b.valueOf),a}function p(a,b){var c,d,e;if("undefined"!=typeof b._isAMomentObject&&(a._isAMomentObject=b._isAMomentObject),"undefined"!=typeof b._i&&(a._i=b._i),"undefined"!=typeof b._f&&(a._f=b._f),"undefined"!=typeof b._l&&(a._l=b._l),"undefined"!=typeof b._strict&&(a._strict=b._strict),"undefined"!=typeof b._tzm&&(a._tzm=b._tzm),"undefined"!=typeof b._isUTC&&(a._isUTC=b._isUTC),"undefined"!=typeof b._offset&&(a._offset=b._offset),"undefined"!=typeof b._pf&&(a._pf=b._pf),"undefined"!=typeof b._locale&&(a._locale=b._locale),Kb.length>0)for(c in Kb)d=Kb[c],e=b[d],"undefined"!=typeof e&&(a[d]=e);return a}function q(a){return 0>a?Math.ceil(a):Math.floor(a)}function r(a,b,c){for(var d=""+Math.abs(a),e=a>=0;d.lengthd;d++)(c&&a[d]!==b[d]||!c&&C(a[d])!==C(b[d]))&&g++;return g+f}function z(a){if(a){var b=a.toLowerCase().replace(/(.)s$/,"$1");a=lc[a]||mc[b]||b}return a}function A(a){var b,d,e={};for(d in a)c(a,d)&&(b=z(d),b&&(e[b]=a[d]));return e}function B(b){var c,d;if(0===b.indexOf("week"))c=7,d="day";else{if(0!==b.indexOf("month"))return;c=12,d="month"}vb[b]=function(e,f){var g,h,i=vb._locale[b],j=[];if("number"==typeof e&&(f=e,e=a),h=function(a){var b=vb().utc().set(d,a);return i.call(vb._locale,b,e||"")},null!=f)return h(f);for(g=0;c>g;g++)j.push(h(g));return j}}function C(a){var b=+a,c=0;return 0!==b&&isFinite(b)&&(c=b>=0?Math.floor(b):Math.ceil(b)),c}function D(a,b){return new Date(Date.UTC(a,b+1,0)).getUTCDate()}function E(a,b,c){return jb(vb([a,11,31+b-c]),b,c).week}function F(a){return G(a)?366:365}function G(a){return a%4===0&&a%100!==0||a%400===0}function H(a){var b;a._a&&-2===a._pf.overflow&&(b=a._a[Db]<0||a._a[Db]>11?Db:a._a[Eb]<1||a._a[Eb]>D(a._a[Cb],a._a[Db])?Eb:a._a[Fb]<0||a._a[Fb]>24||24===a._a[Fb]&&(0!==a._a[Gb]||0!==a._a[Hb]||0!==a._a[Ib])?Fb:a._a[Gb]<0||a._a[Gb]>59?Gb:a._a[Hb]<0||a._a[Hb]>59?Hb:a._a[Ib]<0||a._a[Ib]>999?Ib:-1,a._pf._overflowDayOfYear&&(Cb>b||b>Eb)&&(b=Eb),a._pf.overflow=b)}function I(b){return null==b._isValid&&(b._isValid=!isNaN(b._d.getTime())&&b._pf.overflow<0&&!b._pf.empty&&!b._pf.invalidMonth&&!b._pf.nullInput&&!b._pf.invalidFormat&&!b._pf.userInvalidated,b._strict&&(b._isValid=b._isValid&&0===b._pf.charsLeftOver&&0===b._pf.unusedTokens.length&&b._pf.bigHour===a)),b._isValid}function J(a){return a?a.toLowerCase().replace("_","-"):a}function K(a){for(var b,c,d,e,f=0;f0;){if(d=L(e.slice(0,b).join("-")))return d;if(c&&c.length>=b&&y(e,c,!0)>=b-1)break;b--}f++}return null}function L(a){var b=null;if(!Jb[a]&&Lb)try{b=vb.locale(),require("./locale/"+a),vb.locale(b)}catch(c){}return Jb[a]}function M(a,b){var c,d;return b._isUTC?(c=b.clone(),d=(vb.isMoment(a)||x(a)?+a:+vb(a))-+c,c._d.setTime(+c._d+d),vb.updateOffset(c,!1),c):vb(a).local()}function N(a){return a.match(/\[[\s\S]/)?a.replace(/^\[|\]$/g,""):a.replace(/\\/g,"")}function O(a){var b,c,d=a.match(Pb);for(b=0,c=d.length;c>b;b++)d[b]=rc[d[b]]?rc[d[b]]:N(d[b]);return function(e){var f="";for(b=0;c>b;b++)f+=d[b]instanceof Function?d[b].call(e,a):d[b];return f}}function P(a,b){return a.isValid()?(b=Q(b,a.localeData()),nc[b]||(nc[b]=O(b)),nc[b](a)):a.localeData().invalidDate()}function Q(a,b){function c(a){return b.longDateFormat(a)||a}var d=5;for(Qb.lastIndex=0;d>=0&&Qb.test(a);)a=a.replace(Qb,c),Qb.lastIndex=0,d-=1;return a}function R(a,b){var c,d=b._strict;switch(a){case"Q":return _b;case"DDDD":return bc;case"YYYY":case"GGGG":case"gggg":return d?cc:Tb;case"Y":case"G":case"g":return ec;case"YYYYYY":case"YYYYY":case"GGGGG":case"ggggg":return d?dc:Ub;case"S":if(d)return _b;case"SS":if(d)return ac;case"SSS":if(d)return bc;case"DDD":return Sb;case"MMM":case"MMMM":case"dd":case"ddd":case"dddd":return Wb;case"a":case"A":return b._locale._meridiemParse;case"x":return Zb;case"X":return $b;case"Z":case"ZZ":return Xb;case"T":return Yb;case"SSSS":return Vb;case"MM":case"DD":case"YY":case"GG":case"gg":case"HH":case"hh":case"mm":case"ss":case"ww":case"WW":return d?ac:Rb;case"M":case"D":case"d":case"H":case"h":case"m":case"s":case"w":case"W":case"e":case"E":return Rb;case"Do":return d?b._locale._ordinalParse:b._locale._ordinalParseLenient;default:return c=new RegExp($(Z(a.replace("\\","")),"i"))}}function S(a){a=a||"";var b=a.match(Xb)||[],c=b[b.length-1]||[],d=(c+"").match(jc)||["-",0,0],e=+(60*d[1])+C(d[2]);return"+"===d[0]?e:-e}function T(a,b,c){var d,e=c._a;switch(a){case"Q":null!=b&&(e[Db]=3*(C(b)-1));break;case"M":case"MM":null!=b&&(e[Db]=C(b)-1);break;case"MMM":case"MMMM":d=c._locale.monthsParse(b,a,c._strict),null!=d?e[Db]=d:c._pf.invalidMonth=b;break;case"D":case"DD":null!=b&&(e[Eb]=C(b));break;case"Do":null!=b&&(e[Eb]=C(parseInt(b.match(/\d{1,2}/)[0],10)));break;case"DDD":case"DDDD":null!=b&&(c._dayOfYear=C(b));break;case"YY":e[Cb]=vb.parseTwoDigitYear(b);break;case"YYYY":case"YYYYY":case"YYYYYY":e[Cb]=C(b);break;case"a":case"A":c._meridiem=b;break;case"h":case"hh":c._pf.bigHour=!0;case"H":case"HH":e[Fb]=C(b);break;case"m":case"mm":e[Gb]=C(b);break;case"s":case"ss":e[Hb]=C(b);break;case"S":case"SS":case"SSS":case"SSSS":e[Ib]=C(1e3*("0."+b));break;case"x":c._d=new Date(C(b));break;case"X":c._d=new Date(1e3*parseFloat(b));break;case"Z":case"ZZ":c._useUTC=!0,c._tzm=S(b);break;case"dd":case"ddd":case"dddd":d=c._locale.weekdaysParse(b),null!=d?(c._w=c._w||{},c._w.d=d):c._pf.invalidWeekday=b;break;case"w":case"ww":case"W":case"WW":case"d":case"e":case"E":a=a.substr(0,1);case"gggg":case"GGGG":case"GGGGG":a=a.substr(0,2),b&&(c._w=c._w||{},c._w[a]=C(b));break;case"gg":case"GG":c._w=c._w||{},c._w[a]=vb.parseTwoDigitYear(b)}}function U(a){var c,d,e,f,g,h,i;c=a._w,null!=c.GG||null!=c.W||null!=c.E?(g=1,h=4,d=b(c.GG,a._a[Cb],jb(vb(),1,4).year),e=b(c.W,1),f=b(c.E,1)):(g=a._locale._week.dow,h=a._locale._week.doy,d=b(c.gg,a._a[Cb],jb(vb(),g,h).year),e=b(c.w,1),null!=c.d?(f=c.d,g>f&&++e):f=null!=c.e?c.e+g:g),i=kb(d,e,f,h,g),a._a[Cb]=i.year,a._dayOfYear=i.dayOfYear}function V(a){var c,d,e,f,g=[];if(!a._d){for(e=X(a),a._w&&null==a._a[Eb]&&null==a._a[Db]&&U(a),a._dayOfYear&&(f=b(a._a[Cb],e[Cb]),a._dayOfYear>F(f)&&(a._pf._overflowDayOfYear=!0),d=fb(f,0,a._dayOfYear),a._a[Db]=d.getUTCMonth(),a._a[Eb]=d.getUTCDate()),c=0;3>c&&null==a._a[c];++c)a._a[c]=g[c]=e[c];for(;7>c;c++)a._a[c]=g[c]=null==a._a[c]?2===c?1:0:a._a[c];24===a._a[Fb]&&0===a._a[Gb]&&0===a._a[Hb]&&0===a._a[Ib]&&(a._nextDay=!0,a._a[Fb]=0),a._d=(a._useUTC?fb:eb).apply(null,g),null!=a._tzm&&a._d.setUTCMinutes(a._d.getUTCMinutes()-a._tzm),a._nextDay&&(a._a[Fb]=24)}}function W(a){var b;a._d||(b=A(a._i),a._a=[b.year,b.month,b.day||b.date,b.hour,b.minute,b.second,b.millisecond],V(a))}function X(a){var b=new Date;return a._useUTC?[b.getUTCFullYear(),b.getUTCMonth(),b.getUTCDate()]:[b.getFullYear(),b.getMonth(),b.getDate()]}function Y(b){if(b._f===vb.ISO_8601)return void ab(b);b._a=[],b._pf.empty=!0;var c,d,e,f,g,h=""+b._i,i=h.length,j=0;for(e=Q(b._f,b._locale).match(Pb)||[],c=0;c0&&b._pf.unusedInput.push(g),h=h.slice(h.indexOf(d)+d.length),j+=d.length),rc[f]?(d?b._pf.empty=!1:b._pf.unusedTokens.push(f),T(f,d,b)):b._strict&&!d&&b._pf.unusedTokens.push(f);b._pf.charsLeftOver=i-j,h.length>0&&b._pf.unusedInput.push(h),b._pf.bigHour===!0&&b._a[Fb]<=12&&(b._pf.bigHour=a),b._a[Fb]=k(b._locale,b._a[Fb],b._meridiem),V(b),H(b)}function Z(a){return a.replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(a,b,c,d,e){return b||c||d||e})}function $(a){return a.replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&")}function _(a){var b,c,e,f,g;if(0===a._f.length)return a._pf.invalidFormat=!0,void(a._d=new Date(0/0));for(f=0;fg)&&(e=g,c=b));o(a,c||b)}function ab(a){var b,c,d=a._i,e=fc.exec(d);if(e){for(a._pf.iso=!0,b=0,c=hc.length;c>b;b++)if(hc[b][1].exec(d)){a._f=hc[b][0]+(e[6]||" ");break}for(b=0,c=ic.length;c>b;b++)if(ic[b][1].exec(d)){a._f+=ic[b][0];break}d.match(Xb)&&(a._f+="Z"),Y(a)}else a._isValid=!1}function bb(a){ab(a),a._isValid===!1&&(delete a._isValid,vb.createFromInputFallback(a))}function cb(a,b){var c,d=[];for(c=0;ca&&h.setFullYear(a),h}function fb(a){var b=new Date(Date.UTC.apply(null,arguments));return 1970>a&&b.setUTCFullYear(a),b}function gb(a,b){if("string"==typeof a)if(isNaN(a)){if(a=b.weekdaysParse(a),"number"!=typeof a)return null}else a=parseInt(a,10);return a}function hb(a,b,c,d,e){return e.relativeTime(b||1,!!c,a,d)}function ib(a,b,c){var d=vb.duration(a).abs(),e=Ab(d.as("s")),f=Ab(d.as("m")),g=Ab(d.as("h")),h=Ab(d.as("d")),i=Ab(d.as("M")),j=Ab(d.as("y")),k=e0,k[4]=c,hb.apply({},k)}function jb(a,b,c){var d,e=c-b,f=c-a.day();return f>e&&(f-=7),e-7>f&&(f+=7),d=vb(a).add(f,"d"),{week:Math.ceil(d.dayOfYear()/7),year:d.year()}}function kb(a,b,c,d,e){var f,g,h=fb(a,0,1).getUTCDay();return h=0===h?7:h,c=null!=c?c:e,f=e-h+(h>d?7:0)-(e>h?7:0),g=7*(b-1)+(c-e)+f+1,{year:g>0?a:a-1,dayOfYear:g>0?g:F(a-1)+g}}function lb(b){var c,d=b._i,e=b._f;return b._locale=b._locale||vb.localeData(b._l),null===d||e===a&&""===d?vb.invalid({nullInput:!0}):("string"==typeof d&&(b._i=d=b._locale.preparse(d)),vb.isMoment(d)?new m(d,!0):(e?w(e)?_(b):Y(b):db(b),c=new m(b),c._nextDay&&(c.add(1,"d"),c._nextDay=a),c))}function mb(a,b){var c,d;if(1===b.length&&w(b[0])&&(b=b[0]),!b.length)return vb();for(c=b[0],d=1;d=0?"+":"-";return b+r(Math.abs(a),6)},gg:function(){return r(this.weekYear()%100,2)},gggg:function(){return r(this.weekYear(),4)},ggggg:function(){return r(this.weekYear(),5)},GG:function(){return r(this.isoWeekYear()%100,2)},GGGG:function(){return r(this.isoWeekYear(),4)},GGGGG:function(){return r(this.isoWeekYear(),5)},e:function(){return this.weekday()},E:function(){return this.isoWeekday()},a:function(){return this.localeData().meridiem(this.hours(),this.minutes(),!0)},A:function(){return this.localeData().meridiem(this.hours(),this.minutes(),!1)},H:function(){return this.hours()},h:function(){return this.hours()%12||12},m:function(){return this.minutes()},s:function(){return this.seconds()},S:function(){return C(this.milliseconds()/100)},SS:function(){return r(C(this.milliseconds()/10),2)},SSS:function(){return r(this.milliseconds(),3)},SSSS:function(){return r(this.milliseconds(),3)},Z:function(){var a=this.utcOffset(),b="+";return 0>a&&(a=-a,b="-"),b+r(C(a/60),2)+":"+r(C(a)%60,2)},ZZ:function(){var a=this.utcOffset(),b="+";return 0>a&&(a=-a,b="-"),b+r(C(a/60),2)+r(C(a)%60,2)},z:function(){return this.zoneAbbr()},zz:function(){return this.zoneName()},x:function(){return this.valueOf()},X:function(){return this.unix()},Q:function(){return this.quarter()}},sc={},tc=["months","monthsShort","weekdays","weekdaysShort","weekdaysMin"],uc=!1;pc.length;)xb=pc.pop(),rc[xb+"o"]=i(rc[xb],xb);for(;qc.length;)xb=qc.pop(),rc[xb+xb]=h(rc[xb],2);rc.DDDD=h(rc.DDD,3),o(l.prototype,{set:function(a){var b,c;for(c in a)b=a[c],"function"==typeof b?this[c]=b:this["_"+c]=b;this._ordinalParseLenient=new RegExp(this._ordinalParse.source+"|"+/\d{1,2}/.source)},_months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_"),months:function(a){return this._months[a.month()]},_monthsShort:"Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),monthsShort:function(a){return this._monthsShort[a.month()]},monthsParse:function(a,b,c){var d,e,f;for(this._monthsParse||(this._monthsParse=[],this._longMonthsParse=[],this._shortMonthsParse=[]),d=0;12>d;d++){if(e=vb.utc([2e3,d]),c&&!this._longMonthsParse[d]&&(this._longMonthsParse[d]=new RegExp("^"+this.months(e,"").replace(".","")+"$","i"),this._shortMonthsParse[d]=new RegExp("^"+this.monthsShort(e,"").replace(".","")+"$","i")),c||this._monthsParse[d]||(f="^"+this.months(e,"")+"|^"+this.monthsShort(e,""),this._monthsParse[d]=new RegExp(f.replace(".",""),"i")),c&&"MMMM"===b&&this._longMonthsParse[d].test(a))return d;if(c&&"MMM"===b&&this._shortMonthsParse[d].test(a))return d;if(!c&&this._monthsParse[d].test(a))return d}},_weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),weekdays:function(a){return this._weekdays[a.day()]},_weekdaysShort:"Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),weekdaysShort:function(a){return this._weekdaysShort[a.day()]},_weekdaysMin:"Su_Mo_Tu_We_Th_Fr_Sa".split("_"),weekdaysMin:function(a){return this._weekdaysMin[a.day()]},weekdaysParse:function(a){var b,c,d;for(this._weekdaysParse||(this._weekdaysParse=[]),b=0;7>b;b++)if(this._weekdaysParse[b]||(c=vb([2e3,1]).day(b),d="^"+this.weekdays(c,"")+"|^"+this.weekdaysShort(c,"")+"|^"+this.weekdaysMin(c,""),this._weekdaysParse[b]=new RegExp(d.replace(".",""),"i")),this._weekdaysParse[b].test(a))return b},_longDateFormat:{LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY LT",LLLL:"dddd, MMMM D, YYYY LT"},longDateFormat:function(a){var b=this._longDateFormat[a];return!b&&this._longDateFormat[a.toUpperCase()]&&(b=this._longDateFormat[a.toUpperCase()].replace(/MMMM|MM|DD|dddd/g,function(a){return a.slice(1)}),this._longDateFormat[a]=b),b},isPM:function(a){return"p"===(a+"").toLowerCase().charAt(0)},_meridiemParse:/[ap]\.?m?\.?/i,meridiem:function(a,b,c){return a>11?c?"pm":"PM":c?"am":"AM"},_calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},calendar:function(a,b,c){var d=this._calendar[a];return"function"==typeof d?d.apply(b,[c]):d},_relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},relativeTime:function(a,b,c,d){var e=this._relativeTime[c];return"function"==typeof e?e(a,b,c,d):e.replace(/%d/i,a)},pastFuture:function(a,b){var c=this._relativeTime[a>0?"future":"past"];return"function"==typeof c?c(b):c.replace(/%s/i,b)},ordinal:function(a){return this._ordinal.replace("%d",a)},_ordinal:"%d",_ordinalParse:/\d{1,2}/,preparse:function(a){return a},postformat:function(a){return a},week:function(a){return jb(a,this._week.dow,this._week.doy).week},_week:{dow:0,doy:6},firstDayOfWeek:function(){return this._week.dow},firstDayOfYear:function(){return this._week.doy},_invalidDate:"Invalid date",invalidDate:function(){return this._invalidDate}}),vb=function(b,c,e,f){var g;return"boolean"==typeof e&&(f=e,e=a),g={},g._isAMomentObject=!0,g._i=b,g._f=c,g._l=e,g._strict=f,g._isUTC=!1,g._pf=d(),lb(g)},vb.suppressDeprecationWarnings=!1,vb.createFromInputFallback=f("moment construction falls back to js Date. This is discouraged and will be removed in upcoming major release. Please refer to https://github.com/moment/moment/issues/1407 for more info.",function(a){a._d=new Date(a._i+(a._useUTC?" UTC":""))}),vb.min=function(){var a=[].slice.call(arguments,0);return mb("isBefore",a)},vb.max=function(){var a=[].slice.call(arguments,0);return mb("isAfter",a)},vb.utc=function(b,c,e,f){var g;return"boolean"==typeof e&&(f=e,e=a),g={},g._isAMomentObject=!0,g._useUTC=!0,g._isUTC=!0,g._l=e,g._i=b,g._f=c,g._strict=f,g._pf=d(),lb(g).utc()},vb.unix=function(a){return vb(1e3*a)},vb.duration=function(a,b){var d,e,f,g,h=a,i=null;return vb.isDuration(a)?h={ms:a._milliseconds,d:a._days,M:a._months}:"number"==typeof a?(h={},b?h[b]=a:h.milliseconds=a):(i=Nb.exec(a))?(d="-"===i[1]?-1:1,h={y:0,d:C(i[Eb])*d,h:C(i[Fb])*d,m:C(i[Gb])*d,s:C(i[Hb])*d,ms:C(i[Ib])*d}):(i=Ob.exec(a))?(d="-"===i[1]?-1:1,f=function(a){var b=a&&parseFloat(a.replace(",","."));return(isNaN(b)?0:b)*d},h={y:f(i[2]),M:f(i[3]),d:f(i[4]),h:f(i[5]),m:f(i[6]),s:f(i[7]),w:f(i[8])}):null==h?h={}:"object"==typeof h&&("from"in h||"to"in h)&&(g=t(vb(h.from),vb(h.to)),h={},h.ms=g.milliseconds,h.M=g.months),e=new n(h),vb.isDuration(a)&&c(a,"_locale")&&(e._locale=a._locale),e},vb.version=yb,vb.defaultFormat=gc,vb.ISO_8601=function(){},vb.momentProperties=Kb,vb.updateOffset=function(){},vb.relativeTimeThreshold=function(b,c){return oc[b]===a?!1:c===a?oc[b]:(oc[b]=c,!0)},vb.lang=f("moment.lang is deprecated. Use moment.locale instead.",function(a,b){return vb.locale(a,b)}),vb.locale=function(a,b){var c;return a&&(c="undefined"!=typeof b?vb.defineLocale(a,b):vb.localeData(a),c&&(vb.duration._locale=vb._locale=c)),vb._locale._abbr},vb.defineLocale=function(a,b){return null!==b?(b.abbr=a,Jb[a]||(Jb[a]=new l),Jb[a].set(b),vb.locale(a),Jb[a]):(delete Jb[a],null)},vb.langData=f("moment.langData is deprecated. Use moment.localeData instead.",function(a){return vb.localeData(a)}),vb.localeData=function(a){var b;if(a&&a._locale&&a._locale._abbr&&(a=a._locale._abbr),!a)return vb._locale;if(!w(a)){if(b=L(a))return b;a=[a]}return K(a)},vb.isMoment=function(a){return a instanceof m||null!=a&&c(a,"_isAMomentObject")},vb.isDuration=function(a){return a instanceof n};for(xb=tc.length-1;xb>=0;--xb)B(tc[xb]);vb.normalizeUnits=function(a){return z(a)},vb.invalid=function(a){var b=vb.utc(0/0);return null!=a?o(b._pf,a):b._pf.userInvalidated=!0,b},vb.parseZone=function(){return vb.apply(null,arguments).parseZone()},vb.parseTwoDigitYear=function(a){return C(a)+(C(a)>68?1900:2e3)},vb.isDate=x,o(vb.fn=m.prototype,{clone:function(){return vb(this)},valueOf:function(){return+this._d-6e4*(this._offset||0)},unix:function(){return Math.floor(+this/1e3)},toString:function(){return this.clone().locale("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")},toDate:function(){return this._offset?new Date(+this):this._d},toISOString:function(){var a=vb(this).utc();return 00:!1},parsingFlags:function(){return o({},this._pf)},invalidAt:function(){return this._pf.overflow},utc:function(a){return this.utcOffset(0,a)},local:function(a){return this._isUTC&&(this.utcOffset(0,a),this._isUTC=!1,a&&this.subtract(this._dateUtcOffset(),"m")),this},format:function(a){var b=P(this,a||vb.defaultFormat);return this.localeData().postformat(b)},add:u(1,"add"),subtract:u(-1,"subtract"),diff:function(a,b,c){var d,e,f=M(a,this),g=6e4*(f.utcOffset()-this.utcOffset());return b=z(b),"year"===b||"month"===b||"quarter"===b?(e=j(this,f),"quarter"===b?e/=3:"year"===b&&(e/=12)):(d=this-f,e="second"===b?d/1e3:"minute"===b?d/6e4:"hour"===b?d/36e5:"day"===b?(d-g)/864e5:"week"===b?(d-g)/6048e5:d),c?e:q(e)},from:function(a,b){return vb.duration({to:this,from:a}).locale(this.locale()).humanize(!b)},fromNow:function(a){return this.from(vb(),a)},calendar:function(a){var b=a||vb(),c=M(b,this).startOf("day"),d=this.diff(c,"days",!0),e=-6>d?"sameElse":-1>d?"lastWeek":0>d?"lastDay":1>d?"sameDay":2>d?"nextDay":7>d?"nextWeek":"sameElse";return this.format(this.localeData().calendar(e,this,vb(b)))},isLeapYear:function(){return G(this.year())},isDST:function(){return this.utcOffset()>this.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()},day:function(a){var b=this._isUTC?this._d.getUTCDay():this._d.getDay();return null!=a?(a=gb(a,this.localeData()),this.add(a-b,"d")):b},month:qb("Month",!0),startOf:function(a){switch(a=z(a)){case"year":this.month(0);case"quarter":case"month":this.date(1);case"week":case"isoWeek":case"day":this.hours(0);case"hour":this.minutes(0);case"minute":this.seconds(0);case"second":this.milliseconds(0)}return"week"===a?this.weekday(0):"isoWeek"===a&&this.isoWeekday(1),"quarter"===a&&this.month(3*Math.floor(this.month()/3)),this},endOf:function(b){return b=z(b),b===a||"millisecond"===b?this:this.startOf(b).add(1,"isoWeek"===b?"week":b).subtract(1,"ms")},isAfter:function(a,b){var c;return b=z("undefined"!=typeof b?b:"millisecond"),"millisecond"===b?(a=vb.isMoment(a)?a:vb(a),+this>+a):(c=vb.isMoment(a)?+a:+vb(a),c<+this.clone().startOf(b))},isBefore:function(a,b){var c;return b=z("undefined"!=typeof b?b:"millisecond"),"millisecond"===b?(a=vb.isMoment(a)?a:vb(a),+a>+this):(c=vb.isMoment(a)?+a:+vb(a),+this.clone().endOf(b)a?this:a}),max:f("moment().max is deprecated, use moment.max instead. https://github.com/moment/moment/issues/1548",function(a){return a=vb.apply(null,arguments),a>this?this:a}),zone:f("moment().zone is deprecated, use moment().utcOffset instead. https://github.com/moment/moment/issues/1779",function(a,b){return null!=a?("string"!=typeof a&&(a=-a),this.utcOffset(a,b),this):-this.utcOffset()}),utcOffset:function(a,b){var c,d=this._offset||0;return null!=a?("string"==typeof a&&(a=S(a)),Math.abs(a)<16&&(a=60*a),!this._isUTC&&b&&(c=this._dateUtcOffset()),this._offset=a,this._isUTC=!0,null!=c&&this.add(c,"m"),d!==a&&(!b||this._changeInProgress?v(this,vb.duration(a-d,"m"),1,!1):this._changeInProgress||(this._changeInProgress=!0,vb.updateOffset(this,!0),this._changeInProgress=null)),this):this._isUTC?d:this._dateUtcOffset()},isLocal:function(){return!this._isUTC},isUtcOffset:function(){return this._isUTC},isUtc:function(){return this._isUTC&&0===this._offset},zoneAbbr:function(){return this._isUTC?"UTC":""},zoneName:function(){return this._isUTC?"Coordinated Universal Time":""},parseZone:function(){return this._tzm?this.utcOffset(this._tzm):"string"==typeof this._i&&this.utcOffset(S(this._i)),this},hasAlignedHourOffset:function(a){return a=a?vb(a).utcOffset():0,(this.utcOffset()-a)%60===0},daysInMonth:function(){return D(this.year(),this.month())},dayOfYear:function(a){var b=Ab((vb(this).startOf("day")-vb(this).startOf("year"))/864e5)+1;return null==a?b:this.add(a-b,"d")},quarter:function(a){return null==a?Math.ceil((this.month()+1)/3):this.month(3*(a-1)+this.month()%3)},weekYear:function(a){var b=jb(this,this.localeData()._week.dow,this.localeData()._week.doy).year;return null==a?b:this.add(a-b,"y")},isoWeekYear:function(a){var b=jb(this,1,4).year;return null==a?b:this.add(a-b,"y")},week:function(a){var b=this.localeData().week(this);return null==a?b:this.add(7*(a-b),"d")},isoWeek:function(a){var b=jb(this,1,4).week;return null==a?b:this.add(7*(a-b),"d")},weekday:function(a){var b=(this.day()+7-this.localeData()._week.dow)%7;return null==a?b:this.add(a-b,"d")},isoWeekday:function(a){return null==a?this.day()||7:this.day(this.day()%7?a:a-7)},isoWeeksInYear:function(){return E(this.year(),1,4)},weeksInYear:function(){var a=this.localeData()._week;return E(this.year(),a.dow,a.doy)},get:function(a){return a=z(a),this[a]()},set:function(a,b){var c;if("object"==typeof a)for(c in a)this.set(c,a[c]);else a=z(a),"function"==typeof this[a]&&this[a](b);return this},locale:function(b){var c;return b===a?this._locale._abbr:(c=vb.localeData(b),null!=c&&(this._locale=c),this)},lang:f("moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.",function(b){return b===a?this.localeData():this.locale(b)}),localeData:function(){return this._locale},_dateUtcOffset:function(){return 15*-Math.round(this._d.getTimezoneOffset()/15)}}),vb.fn.millisecond=vb.fn.milliseconds=qb("Milliseconds",!1),vb.fn.second=vb.fn.seconds=qb("Seconds",!1),vb.fn.minute=vb.fn.minutes=qb("Minutes",!1),vb.fn.hour=vb.fn.hours=qb("Hours",!0),vb.fn.date=qb("Date",!0),vb.fn.dates=f("dates accessor is deprecated. Use date instead.",qb("Date",!0)),vb.fn.year=qb("FullYear",!0),vb.fn.years=f("years accessor is deprecated. Use year instead.",qb("FullYear",!0)),vb.fn.days=vb.fn.day,vb.fn.months=vb.fn.month,vb.fn.weeks=vb.fn.week,vb.fn.isoWeeks=vb.fn.isoWeek,vb.fn.quarters=vb.fn.quarter,vb.fn.toJSON=vb.fn.toISOString,vb.fn.isUTC=vb.fn.isUtc,o(vb.duration.fn=n.prototype,{_bubble:function(){var a,b,c,d=this._milliseconds,e=this._days,f=this._months,g=this._data,h=0;g.milliseconds=d%1e3,a=q(d/1e3),g.seconds=a%60,b=q(a/60),g.minutes=b%60,c=q(b/60),g.hours=c%24,e+=q(c/24),h=q(rb(e)),e-=q(sb(h)),f+=q(e/30),e%=30,h+=q(f/12),f%=12,g.days=e,g.months=f,g.years=h},abs:function(){return this._milliseconds=Math.abs(this._milliseconds),this._days=Math.abs(this._days),this._months=Math.abs(this._months),this._data.milliseconds=Math.abs(this._data.milliseconds),this._data.seconds=Math.abs(this._data.seconds),this._data.minutes=Math.abs(this._data.minutes),this._data.hours=Math.abs(this._data.hours),this._data.months=Math.abs(this._data.months),this._data.years=Math.abs(this._data.years),this},weeks:function(){return q(this.days()/7)},valueOf:function(){return this._milliseconds+864e5*this._days+this._months%12*2592e6+31536e6*C(this._months/12) -},humanize:function(a){var b=ib(this,!a,this.localeData());return a&&(b=this.localeData().pastFuture(+this,b)),this.localeData().postformat(b)},add:function(a,b){var c=vb.duration(a,b);return this._milliseconds+=c._milliseconds,this._days+=c._days,this._months+=c._months,this._bubble(),this},subtract:function(a,b){var c=vb.duration(a,b);return this._milliseconds-=c._milliseconds,this._days-=c._days,this._months-=c._months,this._bubble(),this},get:function(a){return a=z(a),this[a.toLowerCase()+"s"]()},as:function(a){var b,c;if(a=z(a),"month"===a||"year"===a)return b=this._days+this._milliseconds/864e5,c=this._months+12*rb(b),"month"===a?c:c/12;switch(b=this._days+Math.round(sb(this._months/12)),a){case"week":return b/7+this._milliseconds/6048e5;case"day":return b+this._milliseconds/864e5;case"hour":return 24*b+this._milliseconds/36e5;case"minute":return 24*b*60+this._milliseconds/6e4;case"second":return 24*b*60*60+this._milliseconds/1e3;case"millisecond":return Math.floor(24*b*60*60*1e3)+this._milliseconds;default:throw new Error("Unknown unit "+a)}},lang:vb.fn.lang,locale:vb.fn.locale,toIsoString:f("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",function(){return this.toISOString()}),toISOString:function(){var a=Math.abs(this.years()),b=Math.abs(this.months()),c=Math.abs(this.days()),d=Math.abs(this.hours()),e=Math.abs(this.minutes()),f=Math.abs(this.seconds()+this.milliseconds()/1e3);return this.asSeconds()?(this.asSeconds()<0?"-":"")+"P"+(a?a+"Y":"")+(b?b+"M":"")+(c?c+"D":"")+(d||e||f?"T":"")+(d?d+"H":"")+(e?e+"M":"")+(f?f+"S":""):"P0D"},localeData:function(){return this._locale},toJSON:function(){return this.toISOString()}}),vb.duration.fn.toString=vb.duration.fn.toISOString;for(xb in kc)c(kc,xb)&&tb(xb.toLowerCase());vb.duration.fn.asMilliseconds=function(){return this.as("ms")},vb.duration.fn.asSeconds=function(){return this.as("s")},vb.duration.fn.asMinutes=function(){return this.as("m")},vb.duration.fn.asHours=function(){return this.as("h")},vb.duration.fn.asDays=function(){return this.as("d")},vb.duration.fn.asWeeks=function(){return this.as("weeks")},vb.duration.fn.asMonths=function(){return this.as("M")},vb.duration.fn.asYears=function(){return this.as("y")},vb.locale("en",{ordinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(a){var b=a%10,c=1===C(a%100/10)?"th":1===b?"st":2===b?"nd":3===b?"rd":"th";return a+c}}),Lb?module.exports=vb:"function"==typeof define&&define.amd?(define(function(a,b,c){return c.config&&c.config()&&c.config().noGlobal===!0&&(zb.moment=wb),vb}),ub(!0)):ub()}).call(this); \ No newline at end of file +},humanize:function(a){var b=ib(this,!a,this.localeData());return a&&(b=this.localeData().pastFuture(+this,b)),this.localeData().postformat(b)},add:function(a,b){var c=vb.duration(a,b);return this._milliseconds+=c._milliseconds,this._days+=c._days,this._months+=c._months,this._bubble(),this},subtract:function(a,b){var c=vb.duration(a,b);return this._milliseconds-=c._milliseconds,this._days-=c._days,this._months-=c._months,this._bubble(),this},get:function(a){return a=z(a),this[a.toLowerCase()+"s"]()},as:function(a){var b,c;if(a=z(a),"month"===a||"year"===a)return b=this._days+this._milliseconds/864e5,c=this._months+12*rb(b),"month"===a?c:c/12;switch(b=this._days+Math.round(sb(this._months/12)),a){case"week":return b/7+this._milliseconds/6048e5;case"day":return b+this._milliseconds/864e5;case"hour":return 24*b+this._milliseconds/36e5;case"minute":return 24*b*60+this._milliseconds/6e4;case"second":return 24*b*60*60+this._milliseconds/1e3;case"millisecond":return Math.floor(24*b*60*60*1e3)+this._milliseconds;default:throw new Error("Unknown unit "+a)}},lang:vb.fn.lang,locale:vb.fn.locale,toIsoString:f("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",function(){return this.toISOString()}),toISOString:function(){var a=Math.abs(this.years()),b=Math.abs(this.months()),c=Math.abs(this.days()),d=Math.abs(this.hours()),e=Math.abs(this.minutes()),f=Math.abs(this.seconds()+this.milliseconds()/1e3);return this.asSeconds()?(this.asSeconds()<0?"-":"")+"P"+(a?a+"Y":"")+(b?b+"M":"")+(c?c+"D":"")+(d||e||f?"T":"")+(d?d+"H":"")+(e?e+"M":"")+(f?f+"S":""):"P0D"},localeData:function(){return this._locale},toJSON:function(){return this.toISOString()}}),vb.duration.fn.toString=vb.duration.fn.toISOString;for(xb in kc)c(kc,xb)&&tb(xb.toLowerCase());vb.duration.fn.asMilliseconds=function(){return this.as("ms")},vb.duration.fn.asSeconds=function(){return this.as("s")},vb.duration.fn.asMinutes=function(){return this.as("m")},vb.duration.fn.asHours=function(){return this.as("h")},vb.duration.fn.asDays=function(){return this.as("d")},vb.duration.fn.asWeeks=function(){return this.as("weeks")},vb.duration.fn.asMonths=function(){return this.as("M")},vb.duration.fn.asYears=function(){return this.as("y")},vb.locale("en",{ordinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(a){var b=a%10,c=1===C(a%100/10)?"th":1===b?"st":2===b?"nd":3===b?"rd":"th";return a+c}}),Lb?module.exports=vb:"function"==typeof define&&define.amd?(define(function(a,b,c){return c.config&&c.config()&&c.config().noGlobal===!0&&(zb.moment=wb),vb}),ub(!0)):ub()}).call(this); diff --git a/ui/login.html b/ui/login.html index c2f66869..fe9167aa 100644 --- a/ui/login.html +++ b/ui/login.html @@ -60,7 +60,7 @@ - + @@ -82,7 +82,7 @@ })(jQuery); - \ No newline at end of file + diff --git a/ui/organisations.html b/ui/organisations.html index 22813eec..a8c51d73 100644 --- a/ui/organisations.html +++ b/ui/organisations.html @@ -6,7 +6,7 @@ - + @@ -81,8 +81,8 @@ - - + + @@ -91,4 +91,4 @@ - \ No newline at end of file + diff --git a/ui/relationships.html b/ui/relationships.html index cd81b1a9..a01f8baf 100644 --- a/ui/relationships.html +++ b/ui/relationships.html @@ -79,8 +79,8 @@ - - + + @@ -89,4 +89,4 @@ - \ No newline at end of file +