forked from deepset-ai/haystack
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.cfg
221 lines (201 loc) · 5.13 KB
/
setup.cfg
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
[metadata]
name = farm-haystack
version = file: VERSION.txt
url = https://github.com/deepset-ai/haystack
project_urls =
Docs: RTD = https://haystack.deepset.ai/overview/intro
CI: GitHub = https://github.com/deepset-ai/haystack/actions
GitHub: issues = https://github.com/deepset-ai/haystack/issues
GitHub: repo = https://github.com/deepset-ai/haystack
description = Neural Question Answering & Semantic Search at Scale. Use modern transformer based models like BERT to find answers in large document collections
long_description = file: README.md
long_description_content_type = text/markdown
keywords=
QA
Question-Answering
Reader
Retriever
semantic-search
search
BERT
roberta
albert
squad
mrc
transfer-learning
language-model
transformer
author = deepset.ai
author_email = [email protected]
license = Apache License 2.0
license_file = LICENSE
platforms = any
classifiers =
Development Status :: 5 - Production/Stable
Intended Audience :: Science/Research
License :: Freely Distributable
License :: OSI Approved :: Apache Software License
Topic :: Scientific/Engineering :: Artificial Intelligence
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
[options]
use_scm_version = True
python_requires = >=3.7, <4
packages = find:
setup_requires =
setuptools
wheel
install_requires =
importlib-metadata; python_version < '3.8'
torch>1.9,<1.11
requests
pydantic
transformers==4.13.0
nltk
pandas
# Utils
dill # pickle extension for (de-)serialization
tqdm # progress bars in model download and training scripts
networkx # graphs library
mmh3 # fast hashing function (murmurhash3)
quantulum3 # quantities extraction from text
azure-ai-formrecognizer==3.2.0b2 # forms reader
# Preprocessing
more_itertools # for windowing
python-docx
langdetect # for PDF conversions
tika # Apache Tika (text & metadata extractor)
# See haystack/nodes/retriever/_embedding_encoder.py, _SentenceTransformersEmbeddingEncoder
sentence-transformers>=0.4.0
# for stats in run_classifier
scipy>=1.3.2
scikit-learn>=1.0.0
# Metrics and logging
seqeval
mlflow<=1.13.1
# Elasticsearch
elasticsearch>=7.7,<=7.10
elastic-apm
# Not found in repo, to review:
#uvloop==0.14; sys_platform != 'win32' and sys_platform != 'cygwin'
[options.packages.find]
exclude =
rest_api*
test*
tutorials*
ui*
[options.extras_require]
sql =
sqlalchemy>=1.4.2
sqlalchemy_utils
psycopg2-binary; sys_platform != 'win32' and sys_platform != 'cygwin'
only-faiss =
faiss-cpu>=1.6.3
faiss =
farm-haystack[sql,only-faiss]
only-faiss-gpu =
faiss-gpu>=1.6.3
faiss-gpu =
farm-haystack[sql,only-faiss-gpu]
only-milvus1 =
pymilvus<2.0.0 # Refer milvus version support matrix at https://github.com/milvus-io/pymilvus#install-pymilvus
milvus1 =
farm-haystack[sql,only-milvus1]
only-milvus =
pymilvus>=2.0.0 # Refer milvus version support matrix at https://github.com/milvus-io/pymilvus#install-pymilvus
milvus =
farm-haystack[sql,only-milvus]
weaviate =
weaviate-client==2.5.0
graphdb =
SPARQLWrapper
docstores =
farm-haystack[faiss,milvus1,weaviate,graphdb]
docstores-gpu =
farm-haystack[faiss-gpu,milvus1,weaviate,graphdb]
crawler =
selenium
webdriver-manager
preprocessing =
beautifulsoup4
markdown
ocr =
pytesseract==0.3.7
pillow
pdf2image==1.14.0
onnx =
onnxruntime
onnxruntime_tools
onnx-gpu =
onnxruntime-gpu
onnxruntime_tools
ray =
ray>=1.9.1
colab =
grpcio==1.43.0
dev =
# Type check
mypy
types-Markdown
types-requests
types-PyYAML
# Test
pytest
responses
tox
coverage
python-multipart
psutil
# Linting
pylint
# Code formatting
black[jupyter]
# Documentation
pydoc-markdown>=4,<5
mkdocs
jupytercontrib
watchdog #==1.0.2
test =
farm-haystack[docstores,crawler,preprocessing,ocr,ray,dev]
all =
farm-haystack[docstores,crawler,preprocessing,ocr,ray,dev,onnx]
all-gpu =
farm-haystack[docstores-gpu,crawler,preprocessing,ocr,ray,dev,onnx-gpu]
[tool:pytest]
testpaths =
test
rest_api/test
ui/test
python_files =
test_*.py
addopts =
-vv
[mypy]
warn_return_any = false
warn_unused_configs = true
ignore_missing_imports = true
plugins = pydantic.mypy
[tox]
requires = tox-venv
setuptools >= 30.0.0
envlist = py36,py37
[testenv]
changedir = test
deps =
coverage
pytest
pandas
setenv =
COVERAGE_FILE = test-reports/.coverage
PYTEST_ADDOPTS = --junitxml=test-reports/{envname}/junit.xml -vv
commands =
coverage run --source haystack --parallel-mode -m pytest {posargs}
coverage combine
coverage report -m
coverage html -d test-reports/coverage-html
coverage xml -o test-reports/coverage.xml