diff --git a/Dockerfile b/Dockerfile index c1860dd..9626435 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,4 +25,4 @@ RUN \ && rm -rf /tmp/* /var/cache/apk/* USER kernelcollector -ENTRYPOINT ["python", "-m", "kernelcollector.Main"] +ENTRYPOINT ["/bin/sh", "/srv/entrypoint.sh"] diff --git a/README.md b/README.md index f752420..f83b752 100644 --- a/README.md +++ b/README.md @@ -5,15 +5,15 @@ KernelCollector is a small Python script that handles the upkeep of a Linux kern It keeps track of header, image and module packages for the `amd64`, `i386`, `armhf`, `arm64`, `ppc64el` and `s390x` architectures. There are three kind of kernel images that KernelCollector collects: - * `linux-current`: The newest stable version of the Linux kernel, for example: `v5.0.7` - * `linux-beta`: The newest release candidate of the Linux kernel, for example: `v5.1-rc5` - * `linux-devel`: The newest trunk build of the Linux kernel, for example: `v2019-04-16` + * `linux-current`: The newest stable version of the Linux kernel, for example: `v5.8.10` + * `linux-beta`: The newest release candidate of the Linux kernel, for example: `v5.9-rc5` + * `linux-devel`: The newest trunk build of the Linux kernel, for example: `v2019-09-17` Using a cronjob, KernelCollector can always keep these packages updated in the Debian package repository. This is useful because it allows users to automatically upgrade their Linux kernels to the latest version from the update channel, without any user input. For example, you will not receive beta or devel versions while on the current release channel. -Older kernel versions will disappear once the newest kernel is installed. If kernel version `5.0.8` is released, everybody using the KernelCollector repository will automatically be upgraded to version `5.0.8`, straight from `5.0.7` - and so on. +Older kernel versions will disappear once the newest kernel is installed. If kernel version `5.8.10` is released, everybody using the KernelCollector repository will automatically be upgraded to version `5.8.10`, straight from `5.8.9` - and so on. This kind of setup might not be useful (or too risky) for some people, in that case, you are welcome to handle your own kernel installations. @@ -71,7 +71,7 @@ Next, edit the `settings.json` file to your liking: * `distribution`: Defaults to `sid`. This really doesn't matter, as the packages require a newer version of Debian or Ubuntu, and this is just a matter of preference. * `gpgKey`: Defaults to `ABCD`. Obviously, this isn't a real GPG key. Repositories maintained by KernelCollector are GPG signed. You will have to create your own GPG key, which can be password protected if needed. * `gpgPassword`: Defaults to `none`. If you don't have a GPG password, please set the password to `none`. If you have one, specify it here. -* `repoPath`: Defaults to `/var/www/debian`. This is the filesystem path of your repository, where the artifacts will be published to. +* `repoPath`: Defaults to `/srv/packages`. This is the filesystem path of your repository, where the artifacts will be published to. You might notice that you need a GPG key to sign the kernel packages. This is out of scope for this tutorial, Google is your friend in this regard, though `gpg --full-generate-key` might be a good point to start. diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 0000000..40403d4 --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +# Import our GPG key +gpg --import gpg.key + +# Run the actual program +python -m kernelcollector.Main diff --git a/kernelcollector/Main.py b/kernelcollector/Main.py index ccca969..f1189a8 100644 --- a/kernelcollector/Main.py +++ b/kernelcollector/Main.py @@ -1,18 +1,17 @@ from .PackageCollector import PackageCollector from .PackageList import PackageList from .PackageDistribution import PackageDistribution -import traceback, json, socket, os, sys, time +from .WebhookEmitter import WebhookEmitter +import traceback, json, logging, os, sys class Main(object): def __init__(self): - try: + if os.path.exists('settings.json'): with open('settings.json', 'r') as file: self.settings = json.load(file) - except: - self.settings = {} - defaultValues = {'repoPath': '/var/www/debian', 'gpgKey': 'ABCDEF', 'gpgPassword': 'none', 'distribution': 'sid', 'description': 'Package repository for newest Linux kernels', 'architectures': ['amd64']} + defaultValues = {'repoPath': '/srv/packages', 'gpgKey': 'ABCDEF', 'gpgPassword': 'none', 'distribution': 'sid', 'description': 'Package repository for newest Linux kernels', 'architectures': ['amd64'], 'webhook': None} edited = False for key, value in defaultValues.items(): @@ -25,34 +24,32 @@ def __init__(self): self.saveSettings() sys.exit() - self.packageList = PackageList(self.settings['repoPath'].rstrip('/'), self.settings['gpgKey'], self.settings['gpgPassword'], verbose=True) - self.packageDist = PackageDistribution(self.settings['distribution'], self.settings['architectures'], self.settings['description'], verbose=True) + self.logger = WebhookEmitter(self.settings['webhook']) + + self.packageList = PackageList(self.logger, self.settings['repoPath'].rstrip('/'), self.settings['gpgKey'], self.settings['gpgPassword']) + self.packageDist = PackageDistribution(self.logger, self.settings['distribution'], self.settings['architectures'], self.settings['description']) self.packageList.addDistribution(self.packageDist) - self.packageCollector = PackageCollector(self.settings['architectures'], self.packageList, verbose=True) - self.logFolder = os.path.join(os.getcwd(), 'logs') + self.packageCollector = PackageCollector(self.logger, self.settings['architectures'], self.packageList) def runAllBuilds(self): # Attempt to run all builds. - # If something goes wrong, a log file will be created with the error. + # If something goes wrong, a webhook message will be sent. try: self.packageCollector.runAllBuilds() except: - log = traceback.format_exc() - - if not os.path.exists(self.logFolder): - os.makedirs(self.logFolder) - - logFilename = os.path.join(self.logFolder, 'crash-{0}.log'.format(int(time.time()))) - - with open(logFilename, 'w') as file: - file.write(log) + self.logger.add('Something went wrong while building packages!', alert=True) + self.logger.add(traceback.format_exc(), pre=True) + self.logger.send_all() def saveSettings(self): with open('settings.json', 'w') as file: json.dump(self.settings, file, sort_keys=True, indent=4, separators=(',', ': ')) if __name__ == '__main__': + logging.basicConfig(format='[%(asctime)s] %(message)s', datefmt='%Y/%m/%d %I:%M:%S %p') + logging.root.setLevel(logging.INFO) + main = Main() main.runAllBuilds() diff --git a/kernelcollector/PackageCollector.py b/kernelcollector/PackageCollector.py index 45e3fe8..848a307 100644 --- a/kernelcollector/PackageCollector.py +++ b/kernelcollector/PackageCollector.py @@ -1,6 +1,7 @@ from bs4 import BeautifulSoup from . import Utils -import json, requests, tempfile, shutil, os, time, uuid +import json, logging, tempfile, shutil, os, time, uuid +import requests FIND_IMAGE_RM = 'rm -f /lib/modules/$version/.fresh-install' NEW_FIND_IMAGE_RM = 'rm -rf /lib/modules/$version' @@ -8,22 +9,18 @@ class PackageCollector(object): - def __init__(self, architectures, pkgList, verbose=True): + def __init__(self, logger, architectures, pkgList): + self.logger = logger self.architectures = architectures self.pkgList = pkgList self.tmpDir = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex) - self.verbose = verbose self.currentDir = os.getcwd() self.reloadCache() - def log(self, message): - if self.verbose: - print(message) - def runAllBuilds(self): # Get all releases and prereleases - self.log('Current directory is {0}'.format(self.currentDir)) - self.log('Checking latest versions of the kernel...') + logging.info(f'Current directory is {self.currentDir}') + logging.info('Checking latest versions of the kernel...') releases, prereleases = self.getAllReleases() # The newest release is always the last in the list @@ -38,9 +35,9 @@ def runAllBuilds(self): dailyRelease = self.getNewestDailyRelease() downloaded = False - self.log('Current release: {0}'.format(release)) - self.log('Current release candidate: {0}'.format(prerelease)) - self.log('Current daily build: v{0}'.format(dailyRelease)) + logging.info(f'Current release: {release}') + logging.info(f'Current release candidate: {prerelease}') + logging.info(f'Current daily build: v{dailyRelease}') # Create the temporary folder if os.path.exists(self.tmpDir): @@ -57,7 +54,7 @@ def runAllBuilds(self): downloaded = True # Redownload devel build if necessary - if self.downloadAndRepackAll('daily/{0}'.format(dailyRelease), dailyRelease, 'linux-devel'): + if self.downloadAndRepackAll(f'daily/{dailyRelease}', dailyRelease, 'linux-devel'): downloaded = True # Update cache and publish repository @@ -138,7 +135,7 @@ def getNewestDailyRelease(self): return max(versions) def getFiles(self, releaseLink, releaseType): - with requests.get('https://kernel.ubuntu.com/~kernel-ppa/mainline/{0}'.format(releaseLink)) as site: + with requests.get(f'https://kernel.ubuntu.com/~kernel-ppa/mainline/{releaseLink}') as site: data = site.content files = {} @@ -171,17 +168,17 @@ def getFiles(self, releaseLink, releaseType): # and they can be either generic, low latency or snapdragon (the processor) # The only package that doesn't have a sub type is headers-all, which is archless for type in ('image', 'modules', 'headers'): - if '-{0}-'.format(type) not in text: + if f'-{type}-' not in text: continue for subType in ('generic', 'lowlatency', 'snapdragon'): - if '-{0}'.format(subType) in text: - files['{0}-{1}-{2}-{3}'.format(releaseType, type, subType, arch)] = text + if f'-{subType}' in text: + files[f'{releaseType}-{type}-{subType}-{arch}'] = text foundCurrent = True break if (not foundCurrent) and '-headers-' in text: - files['{0}-headers-all'.format(releaseType)] = text + files[f'{releaseType}-headers-all'] = text return files @@ -190,7 +187,7 @@ def downloadAndRepack(self, releaseLink, releaseName, releaseType, pkgName, file extractFolder = os.path.join(self.tmpDir, uuid.uuid4().hex) controlFilename = os.path.join(extractFolder, 'DEBIAN', 'control') postrmFilename = os.path.join(extractFolder, 'DEBIAN', 'postrm') - link = 'https://kernel.ubuntu.com/~kernel-ppa/mainline/{0}/{1}'.format(releaseLink, filename) + link = f'https://kernel.ubuntu.com/~kernel-ppa/mainline/{releaseLink}/{filename}' # Create a temporary folder for the repackaging if os.path.exists(extractFolder): @@ -211,15 +208,23 @@ def downloadAndRepack(self, releaseLink, releaseName, releaseType, pkgName, file releaseName = '-'.join(names) # Download the .deb - self.log('Downloading package {0} (release v{1})'.format(pkgName, releaseName)) + logging.info(f'Downloading package {pkgName} (release v{releaseName})') Utils.downloadFile(link, debFilename) # Extract the .deb file - os.system('dpkg-deb -R {0} {1}'.format(debFilename, extractFolder)) + result = Utils.run_process(['dpkg-deb', '-R', debFilename, extractFolder]) + + if result.failed: + self.logger.add(f'Could not extract {os.path.basename(debFilename)} (error code {result.exit_code})!', alert=True) + self.logger.add(result.get_output(), pre=True) + self.logger.send_all() + return + os.remove(debFilename) if not os.path.exists(controlFilename): - self.log('No control file for {0}...'.format(pkgName)) + self.logger.add(f'No control file for {pkgName}...', alert=True) + self.logger.send_all() return # Rewrite the control file @@ -232,9 +237,9 @@ def downloadAndRepack(self, releaseLink, releaseName, releaseType, pkgName, file # For example, generic packages will conflict with lowlatency and snapdragon packages for i, line in enumerate(controlLines): if line.startswith('Package:'): - controlLines[i] = 'Package: {0}'.format(pkgName) + controlLines[i] = f'Package: {pkgName}' elif line.startswith('Version:'): - controlLines[i] = 'Version: {0}'.format(releaseName) + controlLines[i] = f'Version: {releaseName}' elif line.startswith('Depends: '): dependencies = [dep for dep in line[len('Depends: '):].split(', ') if not dep.startswith('linux-')] @@ -276,7 +281,14 @@ def downloadAndRepack(self, releaseLink, releaseName, releaseType, pkgName, file f.write('\n'.join(postrmLines)) # Repack the .deb file - os.system('dpkg-deb -b {0} {1}'.format(extractFolder, debFilename)) + result = Utils.run_process(['dpkg-deb', '-b', extractFolder, debFilename]) + + if result.failed: + self.logger.add(f'Could not pack {os.path.basename(debFilename)} (error code {result.exit_code})!', alert=True) + self.logger.add(result.get_output(), pre=True) + self.logger.send_all() + return + self.pkgList.addDebToPool(debFilename) # Remove the temporary extract folder @@ -285,7 +297,7 @@ def downloadAndRepack(self, releaseLink, releaseName, releaseType, pkgName, file def downloadAndRepackAll(self, releaseLink, releaseName, releaseType): # Download the file list for this release - self.log('Downloading release: {0}'.format(releaseType)) + logging.info(f'Downloading release: {releaseType}') files = self.getFiles(releaseLink, releaseType) requiredTypes = ['image', 'modules', 'headers'] @@ -303,7 +315,8 @@ def downloadAndRepackAll(self, releaseLink, releaseName, releaseType): currentTypes.append(type) if len(currentTypes) != len(requiredTypes): - self.log('Release is not yet ready: {0}'.format(releaseType)) + self.logger.add(f'Release is not yet ready: {releaseType}') + self.logger.send_all() return False downloaded = False @@ -312,7 +325,7 @@ def downloadAndRepackAll(self, releaseLink, releaseName, releaseType): for pkgName, filename in files.items(): # Check our cache if self.fileCache.get(pkgName, None) == filename: - self.log('Skipping package {0}.'.format(pkgName)) + logging.info(f'Skipping package {pkgName}.') continue # Download and repack @@ -347,3 +360,4 @@ def updateCache(self): def publishRepository(self): # If temporary directory doesn't exist, nothing matters self.pkgList.saveAllDistributions(['l', 'custom']) + self.pkgList.sendEmbeddedReport() diff --git a/kernelcollector/PackageDistribution.py b/kernelcollector/PackageDistribution.py index 3283a6a..af41f7f 100644 --- a/kernelcollector/PackageDistribution.py +++ b/kernelcollector/PackageDistribution.py @@ -1,19 +1,19 @@ from deb_pkg_tools.control import unparse_control_fields from datetime import datetime from . import Utils +import traceback, logging, gzip, os import gnupg -import gzip, os gpg = gnupg.GPG() gpg.encoding = 'utf-8' class PackageDistribution(object): - def __init__(self, name, architectures, description, verbose=True): + def __init__(self, logger, name, architectures, description): + self.logger = logger self.name = name self.architectures = architectures self.description = description - self.verbose = verbose def getName(self): return self.name @@ -48,17 +48,22 @@ def setPackageList(self, pkgList): os.makedirs(self.folder) def getArchDir(self, arch): - return os.path.join(self.folder, 'main', 'binary-{0}'.format(arch)) + return os.path.join(self.folder, 'main', f'binary-{arch}') - def log(self, message): - if self.verbose: - print(message) + def signFile(self, filename, content, detach=False): + with open(filename, 'w') as file: + try: + file.write(str(gpg.sign(content, detach=detach, keyid=self.pkgList.gpgKey, passphrase=self.pkgList.gpgPassword))) + except: + self.logger.add(f'Could not sign {filename}! Please check your GPG keys!', alert=True) + self.logger.add(traceback.format_exc(), pre=True) + self.logger.send_all() def save(self, releases): mainDir = os.path.join(self.folder, 'main') archToPackages = {arch: [] for arch in self.architectures} - self.log("Writing package list to disk...") + logging.info('Writing package list to disk...') # Associate our packages with architectures. for release in releases: @@ -82,7 +87,7 @@ def save(self, releases): with open(os.path.join(archDir, 'Release'), 'w') as file: file.write('\n'.join([ 'Component: main', 'Origin: linux-kernel', 'Label: linux-kernel', - 'Architecture: {0}'.format(arch), 'Description: {0}'.format(self.description) + f'Architecture: {arch}', f'Description: {self.description}' ])) packages = '\n'.join(archToPackages[arch]) @@ -107,28 +112,19 @@ def save(self, releases): md5, sha1, sha256 = Utils.getAllHashes(fullPath) size = str(os.path.getsize(fullPath)) - md5s.append(' {0} {1} {2}'.format(md5, size, displayPath)) - sha1s.append(' {0} {1} {2}'.format(sha1, size, displayPath)) - sha256s.append(' {0} {1} {2}'.format(sha256, size, displayPath)) + md5s.append(f' {md5} {size} {displayPath}') + sha1s.append(f' {sha1} {size} {displayPath}') + sha256s.append(f' {sha256} {size} {displayPath}') # Save the final package list, signing release = '\n'.join([ - 'Origin: linux-kernel', 'Label: linux-kernel', 'Suite: {0}'.format(self.name), 'Codename: {0}'.format(self.name), 'Date: {0}'.format(date), - 'Architectures: {0}'.format(' '.join(self.architectures)), 'Components: main', 'Description: {0}'.format(self.description), + 'Origin: linux-kernel', 'Label: linux-kernel', f'Suite: {self.name}', f'Codename: {self.name}', f'Date: {date}', + 'Architectures: {0}'.format(' '.join(self.architectures)), 'Components: main', f'Description: {self.description}', 'MD5Sum:\n{0}'.format('\n'.join(md5s)), 'SHA1:\n{0}'.format('\n'.join(sha1s)), 'SHA256:\n{0}'.format('\n'.join(sha256s)) ]) with open(os.path.join(self.folder, 'Release'), 'w') as file: file.write(release) - with open(os.path.join(self.folder, 'InRelease'), 'w') as file: - try: - file.write(str(gpg.sign(release, keyid=self.pkgList.gpgKey, passphrase=self.pkgList.gpgPassword))) - except: - self.log("Couldn't sign InRelease :( Check your GPG keys!") - - with open(os.path.join(self.folder, 'Release.gpg'), 'w') as file: - try: - file.write(str(gpg.sign(release, detach=True, keyid=self.pkgList.gpgKey, passphrase=self.pkgList.gpgPassword))) - except: - self.log("Couldn't sign Release.gpg :( Check your GPG keys!") + self.signFile(os.path.join(self.folder, 'InRelease'), release, detach=False) + self.signFile(os.path.join(self.folder, 'Release.gpg'), release, detach=True) diff --git a/kernelcollector/PackageList.py b/kernelcollector/PackageList.py index f429612..64643f1 100644 --- a/kernelcollector/PackageList.py +++ b/kernelcollector/PackageList.py @@ -2,15 +2,16 @@ from distutils.version import LooseVersion from datetime import datetime from . import Utils -import shutil, os +import shutil, logging, time, os class PackageList(object): - def __init__(self, repoPath, gpgKey, gpgPassword, verbose=True): + def __init__(self, logger, repoPath, gpgKey, gpgPassword): + self.logger = logger self.gpgKey = gpgKey self.gpgPassword = gpgPassword - self.verbose = verbose self.distributions = {} + self.recentlyAdded = {} self.setRepoPath(repoPath) def getRepoPath(self): @@ -40,13 +41,9 @@ def addDistribution(self, distribution): def getDistribution(self, name): return self.distributions[name] - def log(self, message): - if self.verbose: - print(message) - def addDebToPool(self, filename): basename = os.path.basename(filename) - self.log('Adding {0} to pool...'.format(basename)) + logging.info(f'Adding {basename} to pool...') # Create the pool folder if necessary poolFolder = os.path.join(self.poolFolder, basename[0]) @@ -55,17 +52,18 @@ def addDebToPool(self, filename): os.makedirs(poolFolder) # Remove any old deb package, and move from original location to pool - basename, ext = os.path.splitext(os.path.basename(filename)) - poolFilename = os.path.join(poolFolder, '{0}_tmp{1}'.format(basename, ext)) + noext, ext = os.path.splitext(basename) + poolFilename = os.path.join(poolFolder, f'{noext}_tmp{ext}') if os.path.exists(poolFilename): os.remove(poolFilename) shutil.move(filename, poolFilename) + self.recentlyAdded[basename] = None # Version to be filled out in getAllReleasesInPool def saveAllDistributions(self, letters): # Save all distributions - self.log('Saving package list...') + logging.info('Saving package list...') releases = [] for letter in letters: @@ -74,6 +72,33 @@ def saveAllDistributions(self, letters): for distribution in self.distributions.values(): distribution.save(releases) + def sendEmbeddedReport(self): + description = [f'**{filename}** has been updated to **v{version}**!' for filename, version in self.recentlyAdded.items() if version is not None] + + if not description: + return + + description = '\n'.join(description) + current_date = time.strftime('%Y-%m-%d %H:%M:%S') + content = { + 'embeds': [{ + 'title': 'Your package list has been updated!', + 'description': description, + 'color': 7526106, + 'author': { + 'name': 'Kernel Collector', + 'url': 'https://github.com/darktohka/kernelcollector', + 'icon_url': 'https://i.imgur.com/y6g563D.png' + }, + 'footer': { + 'text': f'This report has been generated on {current_date}.' + } + }] + } + + self.logger.add_embed(content) + self.logger.send_all() + def getAllReleasesInPool(self, letter): poolFolder = os.path.join(self.poolFolder, letter) @@ -87,7 +112,7 @@ def getAllReleasesInPool(self, letter): continue fullPath = os.path.join(poolFolder, file) - newFile = '{0}.deb'.format(fullPath[:-len('_tmp.deb')]) + newFile = fullPath[:-len('_tmp.deb')] + '.deb' if os.path.exists(newFile): os.remove(newFile) @@ -105,7 +130,7 @@ def getAllReleasesInPool(self, letter): continue basename = os.path.basename(fullPath) - self.log('Inspecting {0}...'.format(basename)) + logging.info(f'Inspecting {basename}...') try: data = inspect_package_fields(fullPath) @@ -118,10 +143,14 @@ def getAllReleasesInPool(self, letter): pkg = pkgToVersions.get(pkgName, {}) if version in pkg: - self.log('Removing duplicate version {0} from package {1}...'.format(version, pkgName)) + self.logger.add(f'Removing duplicate version {version} from package {pkgName}...') + self.logger.send_all() os.remove(fullPath) continue + if basename in self.recentlyAdded: + self.recentlyAdded[basename] = version + poolFilename = os.path.join(poolFolder, basename)[len(self.repoPath):].lstrip('/') md5, sha1, sha256 = Utils.getAllHashes(fullPath) data['Filename'] = poolFilename @@ -132,6 +161,7 @@ def getAllReleasesInPool(self, letter): pkg[version] = [fullPath, data] pkgToVersions[pkgName] = pkg + releases = [] # We need to gather the current releases now @@ -157,7 +187,8 @@ def getAllReleasesInPool(self, letter): continue filename = pkgList[0] - self.log('Removing old file {0}...'.format(os.path.basename(filename))) + self.logger.add(f'Removing old file {os.path.basename(filename)}...') + self.logger.send_all() os.remove(filename) releases.append([fullPath, data]) diff --git a/kernelcollector/Utils.py b/kernelcollector/Utils.py index afd20d3..ec6fb0c 100644 --- a/kernelcollector/Utils.py +++ b/kernelcollector/Utils.py @@ -1,5 +1,44 @@ -import requests, re -import hashlib +import hashlib, subprocess, re +import requests + +class ProcessOutput(object): + + def __init__(self, lines, exit_code): + self.lines = lines + self.exit_code = exit_code + + def get_lines(self): + return self.lines + + def get_output(self): + return ''.join(self.lines) + + @property + def success(self): + return self.exit_code == 0 + + @property + def failed(self): + return self.exit_code != 0 + +def run_process(process): + if isinstance(process, str): + process = process.split() + + try: + process = subprocess.Popen(process, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except: + return ProcessOutput([], -1) + + lines = [] + + with process.stdout: + for line in iter(process.stdout.readline, b''): + if line: + lines.append(line.decode('utf-8')) + + process.wait() + return ProcessOutput(lines, process.returncode) def releaseToTuple(name): return tuple(int(x) for x in re.split('\\-rc|\\.', name, 0)) diff --git a/kernelcollector/WebhookEmitter.py b/kernelcollector/WebhookEmitter.py new file mode 100644 index 0000000..7590fab --- /dev/null +++ b/kernelcollector/WebhookEmitter.py @@ -0,0 +1,76 @@ +import requests +import time + +HEADERS = {'User-Agent': 'KernelCollector'} + +class WebhookEmitter(object): + + def __init__(self, webhook): + self.webhook = webhook + self.queue = [] + self.embeds = [] + self.next_webhook = 0 + + def set_webhook(self, webhook): + self.webhook = webhook + + def add(self, message, alert=False, pre=False): + if pre: + message = f'```{message}```' + if alert: + message = '@everyone\n' + message + + for msg in [message[x:x+19980] for x in range(0, len(message), 19980)]: + self.queue.append(msg) + + def add_embed(self, embed): + self.embeds.append(embed) + + def try_post(self, *args, **kwargs): + try: + req = requests.post(*args, **kwargs) + + try: + req = req.json() + except: + return True + + if 'retry_after' in req: + time.sleep((req['retry_after'] / 1000) + 0.1) + return self.try_post(*args, **kwargs) + except: + print('Could not send request... trying again.') + time.sleep(1) + return self.try_post(*args, **kwargs) + + def send_webhook(self, data): + current_time = time.time() + + if self.next_webhook > current_time: + time.sleep(self.next_webhook - current_time) + + result = self.try_post(self.webhook, headers=HEADERS, json=data) + self.next_webhook = time.time() + 2 + return result + + def send_all(self): + if not self.webhook: + return + + while self.queue: + current_item = self.queue.pop(0) + + while self.queue: + item = self.queue.pop(0) + + if (len(item) + len(current_item)) > 19980: + self.send_webhook({'content': current_item}) + current_item = item + else: + current_item += '\n' + current_item += item + + self.send_webhook({'content': current_item}) + + while self.embeds: + self.send_webhook(self.embeds.pop(0)) diff --git a/supplementary/nginx_config b/supplementary/nginx_config index 21badfb..025bb66 100644 --- a/supplementary/nginx_config +++ b/supplementary/nginx_config @@ -6,7 +6,7 @@ server { server_tokens off; location / { - root /var/www/debian; + root /srv/packages; index index.html; autoindex on; }