Skip to content
Open
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion pybossa/cloud_store_api/base_conn.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,23 @@ def get_bucket(
): # pylint: disable=W0613
return BaseClientBucketAdapter(self, bucket_name)

def get_path(self, path='/', **kwargs):
"""
Get the full path by prepending host_suffix if it exists.
This method provides compatibility with the legacy boto2-style interface.
"""
host_suffix = getattr(self, 'host_suffix', '')
if host_suffix:
if not host_suffix.startswith('/'):
host_suffix = '/' + host_suffix
if not host_suffix.endswith('/') and path.startswith('/'):
return host_suffix + path
elif host_suffix.endswith('/') and path.startswith('/'):
return host_suffix + path[1:]
else:
return host_suffix + path
return path

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can this be removed as it seems this method is not used anywhere except under tests.

def new_key(self, bucket, path):
try:
self.client.put_object(Bucket=bucket, Key=path)
Expand Down Expand Up @@ -195,7 +212,7 @@ def copy_key(self, bucket, source_key, target_key, **kwargs):
"%s: %s, key %s. http status %d",
self.__class__.__name__,
str(e),
err_resp.get("Key", path),
err_resp.get("Key", target_key),
http_status,
)
raise
Expand Down
175 changes: 25 additions & 150 deletions pybossa/cloud_store_api/connection.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,15 @@
from copy import deepcopy
import ssl
import sys
import time

from flask import current_app
from boto.auth_handler import AuthHandler
import boto.auth

from boto.exception import S3ResponseError
from boto.s3.key import Key
from boto.s3.bucket import Bucket
from boto.s3.connection import S3Connection, OrdinaryCallingFormat
from boto.provider import Provider
import jwt
from botocore.config import Config
import boto3
from werkzeug.exceptions import BadRequest
from boto3.session import Session
from botocore.client import Config
from pybossa.cloud_store_api.base_conn import BaseConnection
from os import environ

from pybossa.cloud_store_api.proxied_s3_client import ProxiedS3Client
from pybossa.cloud_store_api.s3_client_wrapper import S3ClientWrapper


def check_store(store):
if not store:
Expand Down Expand Up @@ -59,60 +50,30 @@ def create_connection(**kwargs):
cert=kwargs.get("cert", False),
proxy_url=kwargs.get("proxy_url")
)
# Map legacy boto2 parameters to boto3 parameters
if 'host' in kwargs and 'endpoint_url' not in kwargs:
host = kwargs.pop('host')
port = kwargs.pop('port', None)
if port:
kwargs['endpoint_url'] = f"https://{host}:{port}"
else:
# Default to port 443 for HTTPS to maintain compatibility with old behavior
kwargs['endpoint_url'] = f"https://{host}:443"

if 'object_service' in kwargs:
current_app.logger.info("Calling ProxiedConnection")
conn = ProxiedConnection(**kwargs)
current_app.logger.info("Calling ProxiedS3Client")
# Map auth_headers to extra_headers for ProxiedS3Client compatibility
if 'auth_headers' in kwargs:
auth_headers = kwargs.pop('auth_headers')
# Convert auth_headers list of tuples to dict for extra_headers
if auth_headers:
kwargs['extra_headers'] = dict(auth_headers)
conn = ProxiedS3Client(**kwargs)
else:
current_app.logger.info("Calling CustomConnection")
conn = CustomConnection(**kwargs)
current_app.logger.info("Calling S3ClientWrapper")
conn = S3ClientWrapper(**kwargs)
return conn


class CustomProvider(Provider):
"""Extend Provider to carry information about the end service provider, in
case the service is being proxied.
"""

def __init__(self, name, access_key=None, secret_key=None,
security_token=None, profile_name=None, object_service=None,
auth_headers=None):
self.object_service = object_service or name
self.auth_headers = auth_headers
super(CustomProvider, self).__init__(name, access_key, secret_key,
security_token, profile_name)


class CustomConnection(S3Connection):

def __init__(self, *args, **kwargs):
if not kwargs.get('calling_format'):
kwargs['calling_format'] = OrdinaryCallingFormat()

kwargs['provider'] = CustomProvider('aws',
kwargs.get('aws_access_key_id'),
kwargs.get('aws_secret_access_key'),
kwargs.get('security_token'),
kwargs.get('profile_name'),
kwargs.pop('object_service', None),
kwargs.pop('auth_headers', None))

kwargs['bucket_class'] = CustomBucket

ssl_no_verify = kwargs.pop('s3_ssl_no_verify', False)
self.host_suffix = kwargs.pop('host_suffix', '')

super(CustomConnection, self).__init__(*args, **kwargs)

if kwargs.get('is_secure', True) and ssl_no_verify:
self.https_validate_certificates = False
context = ssl._create_unverified_context()
self.http_connection_kwargs['context'] = context

def get_path(self, path='/', *args, **kwargs):
ret = super(CustomConnection, self).get_path(path, *args, **kwargs)
return self.host_suffix + ret


class CustomConnectionV2(BaseConnection):
def __init__(
self,
Expand All @@ -133,89 +94,3 @@ def __init__(
proxies={"https": proxy_url, "http": proxy_url},
),
)


class CustomBucket(Bucket):
"""Handle both 200 and 204 as response code"""

def delete_key(self, *args, **kwargs):
try:
super(CustomBucket, self).delete_key(*args, **kwargs)
except S3ResponseError as e:
if e.status != 200:
raise


class ProxiedKey(Key):

def should_retry(self, response, chunked_transfer=False):
if 200 <= response.status <= 299:
return True
return super(ProxiedKey, self).should_retry(response, chunked_transfer)


class ProxiedBucket(CustomBucket):

def __init__(self, *args, **kwargs):
super(ProxiedBucket, self).__init__(*args, **kwargs)
self.set_key_class(ProxiedKey)


class ProxiedConnection(CustomConnection):
"""Object Store connection through proxy API. Sets the proper headers and
creates the jwt; use the appropriate Bucket and Key classes.
"""

def __init__(self, client_id, client_secret, object_service, *args, **kwargs):
self.client_id = client_id
self.client_secret = client_secret
kwargs['object_service'] = object_service
super(ProxiedConnection, self).__init__(*args, **kwargs)
self.set_bucket_class(ProxiedBucket)

def make_request(self, method, bucket='', key='', headers=None, data='',
query_args=None, sender=None, override_num_retries=None,
retry_handler=None):
headers = headers or {}
headers['jwt'] = self.create_jwt(method, self.host, bucket, key)
headers['x-objectservice-id'] = self.provider.object_service.upper()
current_app.logger.info("Calling ProxiedConnection.make_request. headers %s", str(headers))
return super(ProxiedConnection, self).make_request(method, bucket, key,
headers, data, query_args, sender, override_num_retries,
retry_handler)

def create_jwt(self, method, host, bucket, key):
now = int(time.time())
path = self.get_path(self.calling_format.build_path_base(bucket, key))
current_app.logger.info("create_jwt called. method %s, host %s, bucket %s, key %s, path %s", method, host, str(bucket), str(key), str(path))
payload = {
'iat': now,
'nbf': now,
'exp': now + 300,
'method': method,
'iss': self.client_id,
'host': host,
'path': path,
'region': 'ny'
}
return jwt.encode(payload, self.client_secret, algorithm='HS256')


class CustomAuthHandler(AuthHandler):
"""Implements sending of custom auth headers"""

capability = ['s3']

def __init__(self, host, config, provider):
if not provider.auth_headers:
raise boto.auth_handler.NotReadyToAuthenticate()
self._provider = provider
super(CustomAuthHandler, self).__init__(host, config, provider)

def add_auth(self, http_request, **kwargs):
headers = http_request.headers
for header, attr in self._provider.auth_headers:
headers[header] = getattr(self._provider, attr)

def sign_string(self, *args, **kwargs):
return ''
Loading