-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathsetup.py
38 lines (36 loc) · 1.32 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
from setuptools import setup
from pathlib import Path
readme_path = Path(__file__).absolute().parent.joinpath('README.md')
long_description = readme_path.read_text(encoding='utf-8')
setup(name='scrapy-s3pipeline',
version='0.6.0',
description='Scrapy pipeline to store chunked items into Amazon S3 or Google Clous Storage bucket',
long_description=long_description,
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Framework :: Scrapy',
],
keywords='scrapy pipeline aws s3 gcs serverless',
url='https://github.com/orangain/scrapy-s3pipeline',
author='orangain',
author_email='[email protected]',
license='MIT',
packages=[
's3pipeline',
's3pipeline.strategies',
],
install_requires=['Scrapy>=1.1'],
extras_require={
's3': ['boto3'],
'gcs': ['google-cloud-storage'],
},
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)