Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: script that discovers minimum tested versions #9418

Closed
wants to merge 11 commits into from
185 changes: 185 additions & 0 deletions min_compatible_versions.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
This file was generated by scripts/min_compatible_versions.py
pkg_name,min_version
Flask-Cache,~=0.13.1
Jinja2,~=2.11.0
SQLAlchemy,==2.0.22
WebTest,0
Werkzeug,<1.0
ai21,0
aiobotocore,~=1.4.2
aiofiles,0
aiohttp,~=3.7
aiohttp_jinja2,~=1.5.0
aiomysql,~=0.1.0
aiopg,~=0.16.0
aiosqlite,0
algoliasearch,~=2.5
anthropic,~=0.28
anyio,>=3.4.0
aredis,0
asgiref,~=3.0
astunparse,0
async_generator,~=1.10
asyncpg,~=0.22.0
asynctest,==0.13.0
attrs,>=20
blinker,0
boto3,0
botocore,~=1.13
bottle,>=0.12
bytecode,>=0.13.0
cassandra-driver,~=3.24.0
cattrs,<23.1.1
celery,~=4.4
cfn-lint,~=0.53.1
channels,~=3.0
cherrypy,>=17
click,==7.1.2
cohere,==4.57
confluent-kafka,~=1.9.2
coverage,0
cryptography,<39
daphne,0
databases,0
datadog-lambda,>=4.66.0
ddsketch,>=3.0.0
django,>=2.2
django-pylibmc,>=0.6
django-q,0
django-redis,>=4.5
django_hosts,~=4.0
djangorestframework,>=3.11
docker,0
dogpile.cache,~=0.9
dramatiq,0
elasticsearch,~=7.13.0
elasticsearch1,~=1.10.0
elasticsearch2,~=2.5.0
elasticsearch5,~=5.5.0
elasticsearch6,~=6.8.0
elasticsearch7,~=7.13.0
elasticsearch7[async],0
elasticsearch8,~=8.0.1
elasticsearch[async],0
envier,~=0.5
exceptiongroup,0
falcon,~=3.0
fastapi,~=0.64.0
flask,~=0.12.0
flask-caching,~=1.10.0
flask-login,~=0.6.2
gevent,~=20.12.0
googleapis-common-protos,0
graphene,~=3.0.0
graphql-core,~=3.2.0
graphql-relay,0
greenlet,~=1.0.0
grpcio,~=1.34.0
gunicorn,==20.0.4
gunicorn[gevent],0
httpretty,<1.1
httpx,~=0.17.0
huggingface-hub,0
hypothesis,<6.45.1
importlib-metadata,0
importlib_metadata,<5.0
itsdangerous,<2.0
jinja2,~=2.11.0
kombu,~=4.4
langchain,==0.0.192
langchain-aws,0
langchain-community,==0.0.14
langchain-core,==0.1.52
langchain-openai,==0.1.6
langchain-pinecone,==0.1.0
langchain_experimental,==0.0.47
langsmith,==0.1.58
logbook,~=1.0.0
loguru,~=0.4.0
mako,~=1.1.0
mariadb,~=1.0
markupsafe,<2.0
mock,0
molten,>=1.0
mongoengine,~=0.23
more_itertools,<8.11.0
moto,>=1.0
moto[all],<5.0
msgpack,~=1.0.0
mysql-connector-python,==8.0.5
mysqlclient,~=2.0
numexpr,0
openai,==0.26.5
openai[datalib],==1.30.1
"openai[embeddings,datalib]",==0.27.2
opensearch-py,0
opensearch-py[async],0
opensearch-py[requests],~=1.1.0
opentelemetry-api,>=1
opentelemetry-instrumentation-flask,<=0.37b0
opentracing,>=2.0.0
peewee,0
pillow,0
pinecone-client,==2.2.4
pony,0
protobuf,>=3
psutil,0
psycopg,~=3.0.18
psycopg2-binary,~=2.8.0
py-cpuinfo,~=8.0.0
pycryptodome,0
pyfakefs,0
pylibmc,~=1.6.2
pymemcache,~=3.4.2
pymongo,~=3.11
pymysql,~=0.10
pynamodb,~=5.0
pyodbc,~=4.0.31
pyramid,~=1.10
pysqlite3-binary,0
pytest,~=4.0
pytest-aiohttp,0
pytest-asyncio,~=0.21.1
pytest-bdd,>=4.0
pytest-benchmark,>=3.1.0
pytest-cov,==2.9.0
pytest-django,==3.10.0
pytest-mock,==2.0.0
pytest-randomly,0
pytest-sanic,~=1.6.2
python-consul,>=1.1
python-json-logger,==2.0.7
python-memcached,0
redis,~=2.0
redis-py-cluster,>=2.0
requests,~=2.20.0
requests-mock,>=1.4
responses,~=0.16.0
rq,~=1.8.0
sanic,~=20.12
sanic-testing,~=0.8.3
scikit-learn,==1.0.2
setuptools,<=67.6.0
simplejson,0
six,==1.12.0
snowflake-connector-python,~=2.3.0
sqlalchemy,~=1.2.18
starlette,~=0.14.0
structlog,~=20.2.0
tests/contrib/pyramid/pserve_app,0
tiktoken,0
tornado,~=4.5.0
tortoise-orm,0
typing-extensions,0
typing_extensions,0
urllib3,~=1.0
uwsgi,0
vcrpy,==4.2.1
vertica-python,>=0.6.0
websockets,<11.0
webtest,0
werkzeug,<1.0
wheel,0
xmltodict,>=0.12
yaaredis,~=2.0.0
yarl,~=1.0
115 changes: 115 additions & 0 deletions scripts/min_compatible_versions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import csv
import pathlib
import sys
from typing import Dict
from typing import List
from typing import Set
from typing import Tuple

from packaging.version import parse as parse_version


sys.path.append(str(pathlib.Path(__file__).parent.parent.resolve()))
import riotfile # noqa:E402


OUT_FILENAME = "min_compatible_versions.csv"


def _format_version_specifiers(spec: Set[str]) -> Set[str]:
return set([part for v in [v.split(",") for v in spec if v] for part in v if "!=" not in part])


def tree_pkgs_from_riot() -> Dict[str, Set[str]]:
tree_pkgs = _tree_pkgs_from_riot(riotfile.venv)
_merge_tree_pkgs(tree_pkgs, get_project_dependencies())
return tree_pkgs


def _pkgs_with_relevant_compatibility(pkgs: Dict[str, List]) -> Dict[str, List]:
return {pkg: versions for pkg, versions in pkgs.items() if "github.com" not in pkg}


def _node_has_relevant_compatibility(node: riotfile.Venv) -> bool:
return node.command is None or "pytest" in node.command


def _split_pkg_and_spec(pkg_and_spec: str) -> Tuple[str, str]:
earliest_separator_index = len(pkg_and_spec)
for spec_char in ("<", "=", ">", "~"):
if spec_char in pkg_and_spec:
earliest_separator_index = min(earliest_separator_index, pkg_and_spec.index(spec_char))
return pkg_and_spec[:earliest_separator_index], pkg_and_spec[earliest_separator_index:]


def get_project_dependencies() -> Dict[str, Set]:
pyproject_path = (pathlib.Path(__file__).parent.parent / "pyproject.toml").resolve()
with open(pyproject_path, "r") as f:
pyproject_contents = f.readlines()
in_deps = False
project_dependencies = {}
for line in pyproject_contents:
if in_deps:
if line.startswith("]"):
in_deps = False
else:
pkg, spec = _split_pkg_and_spec(line.strip().split(";")[0].strip('",'))
project_dependencies[pkg] = set([spec] if spec else [])
if line.startswith("dependencies ="):
in_deps = True
return project_dependencies


def _merge_tree_pkgs(tree_pkgs: Dict[str, Set], to_add: Dict[str, Set]):
for pkg_name, versions in to_add.items():
if pkg_name in tree_pkgs:
tree_pkgs[pkg_name] = tree_pkgs[pkg_name].union(versions)
else:
tree_pkgs[pkg_name] = versions


def _tree_pkgs_from_riot(node: riotfile.Venv) -> Dict[str, Set]:
if not _node_has_relevant_compatibility(node):
return {}
result = {
pkg: _format_version_specifiers(set(versions))
for pkg, versions in _pkgs_with_relevant_compatibility(node.pkgs).items()
}
for child_venv in node.venvs:
_merge_tree_pkgs(result, _tree_pkgs_from_riot(child_venv))
return result


def min_version_spec(version_specs: List[str]) -> str:
min_numeric = ""
min_spec = ""
for spec in version_specs:
numeric = parse_version(spec.strip("~==<>"))
if not min_numeric or numeric < min_numeric:
min_numeric = numeric
min_spec = spec
return min_spec


def write_out(all_pkgs: Dict[str, Set[str]]) -> None:
with open(OUT_FILENAME, "w") as csvfile:
csv_writer = csv.writer(csvfile, delimiter=",")
csv_writer.writerow(["This file was generated by scripts/min_compatible_versions.py"])
csv_writer.writerow(["pkg_name", "min_version"])
for pkg, versions in sorted(all_pkgs.items()):
min_version = "0"
if versions:
min_version = str(min_version_spec(versions)).strip()
print("%s\n\tTested versions: %s\n\tMinimum: %s" % (pkg, sorted(list(versions)), min_version))
csv_writer.writerow([pkg, min_version])


def main():
"""Discover the minimum version of every package referenced in the riotfile

Writes to stdout and min_versions.csv
"""
write_out(tree_pkgs_from_riot())


main()
Loading