Skip to content

Commit

Permalink
Merge branch 'langchain-ai:master' into feature/document_translation
Browse files Browse the repository at this point in the history
  • Loading branch information
Sheepsta300 authored Oct 11, 2024
2 parents 4ba0b27 + 2a1029c commit 7051bca
Show file tree
Hide file tree
Showing 2,018 changed files with 77,165 additions and 98,471 deletions.
24 changes: 10 additions & 14 deletions .github/ISSUE_TEMPLATE/bug-report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,25 +96,21 @@ body:
attributes:
label: System Info
description: |
Please share your system info with us.
Please share your system info with us. Do NOT skip this step and please don't trim
the output. Most users don't include enough information here and it makes it harder
for us to help you.
"pip freeze | grep langchain"
platform (windows / linux / mac)
python version
OR if you're on a recent version of langchain-core you can paste the output of:
Run the following command in your terminal and paste the output here:
python -m langchain_core.sys_info
placeholder: |
"pip freeze | grep langchain"
platform
python version
Alternatively, if you're on a recent version of langchain-core you can paste the output of:
or if you have an existing python interpreter running:
python -m langchain_core.sys_info
from langchain_core import sys_info
sys_info.print_sys_info()
These will only surface LangChain packages, don't forget to include any other relevant
packages you're using (if you're not sure what's relevant, you can paste the entire output of `pip freeze`).
alternatively, put the entire output of `pip freeze` here.
placeholder: |
python -m langchain_core.sys_info
validations:
required: true
2 changes: 1 addition & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Thank you for contributing to LangChain!

- [ ] **PR title**: "package: description"
- Where "package" is whichever of langchain, community, core, experimental, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes.
- Where "package" is whichever of langchain, community, core, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes.
- Example: "community: add foobar LLM"


Expand Down
111 changes: 90 additions & 21 deletions .github/scripts/check_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,27 @@
import json
import os
import sys
import tomllib
from collections import defaultdict
from typing import Dict, List, Set
from pathlib import Path
import tomllib

from get_min_versions import get_min_version_from_toml


LANGCHAIN_DIRS = [
"libs/core",
"libs/text-splitters",
"libs/langchain",
"libs/community",
"libs/experimental",
]

# when set to True, we are ignoring core dependents
# in order to be able to get CI to pass for each individual
# package that depends on core
# e.g. if you touch core, we don't then add textsplitters/etc to CI
IGNORE_CORE_DEPENDENTS = False

# ignored partners are removed from dependents
# but still run if directly edited
IGNORED_PARTNERS = [
Expand Down Expand Up @@ -99,44 +106,101 @@ def add_dependents(dirs_to_eval: Set[str], dependents: dict) -> List[str]:


def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
if dir_ == "libs/core":
return [
{"working-directory": dir_, "python-version": f"3.{v}"}
for v in range(8, 13)
]
min_python = "3.8"
max_python = "3.12"
if job == "test-pydantic":
return _get_pydantic_test_configs(dir_)

if dir_ == "libs/core":
py_versions = ["3.9", "3.10", "3.11", "3.12"]
# custom logic for specific directories
if dir_ == "libs/partners/milvus":
elif dir_ == "libs/partners/milvus":
# milvus poetry doesn't allow 3.12 because they
# declare deps in funny way
max_python = "3.11"
py_versions = ["3.9", "3.11"]

if dir_ in ["libs/community", "libs/langchain"] and job == "extended-tests":
elif dir_ in ["libs/community", "libs/langchain"] and job == "extended-tests":
# community extended test resolution in 3.12 is slow
# even in uv
max_python = "3.11"
py_versions = ["3.9", "3.11"]

if dir_ == "libs/community" and job == "compile-integration-tests":
elif dir_ == "libs/community" and job == "compile-integration-tests":
# community integration deps are slow in 3.12
max_python = "3.11"
py_versions = ["3.9", "3.11"]
else:
py_versions = ["3.9", "3.12"]

return [
{"working-directory": dir_, "python-version": min_python},
{"working-directory": dir_, "python-version": max_python},
return [{"working-directory": dir_, "python-version": py_v} for py_v in py_versions]


def _get_pydantic_test_configs(
dir_: str, *, python_version: str = "3.11"
) -> List[Dict[str, str]]:
with open("./libs/core/poetry.lock", "rb") as f:
core_poetry_lock_data = tomllib.load(f)
for package in core_poetry_lock_data["package"]:
if package["name"] == "pydantic":
core_max_pydantic_minor = package["version"].split(".")[1]
break

with open(f"./{dir_}/poetry.lock", "rb") as f:
dir_poetry_lock_data = tomllib.load(f)

for package in dir_poetry_lock_data["package"]:
if package["name"] == "pydantic":
dir_max_pydantic_minor = package["version"].split(".")[1]
break

core_min_pydantic_version = get_min_version_from_toml(
"./libs/core/pyproject.toml", "release", python_version, include=["pydantic"]
)["pydantic"]
core_min_pydantic_minor = (
core_min_pydantic_version.split(".")[1]
if "." in core_min_pydantic_version
else "0"
)
dir_min_pydantic_version = get_min_version_from_toml(
f"./{dir_}/pyproject.toml", "release", python_version, include=["pydantic"]
).get("pydantic", "0.0.0")
dir_min_pydantic_minor = (
dir_min_pydantic_version.split(".")[1]
if "." in dir_min_pydantic_version
else "0"
)

custom_mins = {
# depends on pydantic-settings 2.4 which requires pydantic 2.7
"libs/community": 7,
}

max_pydantic_minor = min(
int(dir_max_pydantic_minor),
int(core_max_pydantic_minor),
)
min_pydantic_minor = max(
int(dir_min_pydantic_minor),
int(core_min_pydantic_minor),
custom_mins.get(dir_, 0),
)

configs = [
{
"working-directory": dir_,
"pydantic-version": f"2.{v}.0",
"python-version": python_version,
}
for v in range(min_pydantic_minor, max_pydantic_minor + 1)
]
return configs


def _get_configs_for_multi_dirs(
job: str, dirs_to_run: List[str], dependents: dict
job: str, dirs_to_run: Dict[str, Set[str]], dependents: dict
) -> List[Dict[str, str]]:
if job == "lint":
dirs = add_dependents(
dirs_to_run["lint"] | dirs_to_run["test"] | dirs_to_run["extended-test"],
dependents,
)
elif job in ["test", "compile-integration-tests", "dependencies"]:
elif job in ["test", "compile-integration-tests", "dependencies", "test-pydantic"]:
dirs = add_dependents(
dirs_to_run["test"] | dirs_to_run["extended-test"], dependents
)
Expand Down Expand Up @@ -165,6 +229,7 @@ def _get_configs_for_multi_dirs(
dirs_to_run["lint"] = all_package_dirs()
dirs_to_run["test"] = all_package_dirs()
dirs_to_run["extended-test"] = set(LANGCHAIN_DIRS)

for file in files:
if any(
file.startswith(dir_)
Expand All @@ -182,8 +247,12 @@ def _get_configs_for_multi_dirs(
if any(file.startswith(dir_) for dir_ in LANGCHAIN_DIRS):
# add that dir and all dirs after in LANGCHAIN_DIRS
# for extended testing

found = False
for dir_ in LANGCHAIN_DIRS:
if dir_ == "libs/core" and IGNORE_CORE_DEPENDENTS:
dirs_to_run["extended-test"].add(dir_)
continue
if file.startswith(dir_):
found = True
if found:
Expand Down Expand Up @@ -224,7 +293,6 @@ def _get_configs_for_multi_dirs(

# we now have dirs_by_job
# todo: clean this up

map_job_to_configs = {
job: _get_configs_for_multi_dirs(job, dirs_to_run, dependents)
for job in [
Expand All @@ -233,6 +301,7 @@ def _get_configs_for_multi_dirs(
"extended-tests",
"compile-integration-tests",
"dependencies",
"test-pydantic",
]
}
map_job_to_configs["test-doc-imports"] = (
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/check_prerelease_dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

# see if we're releasing an rc
version = toml_data["tool"]["poetry"]["version"]
releasing_rc = "rc" in version
releasing_rc = "rc" in version or "dev" in version

# if not, iterate through dependencies and make sure none allow prereleases
if not releasing_rc:
Expand Down
53 changes: 48 additions & 5 deletions .github/scripts/get_min_versions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import sys
from typing import Optional

if sys.version_info >= (3, 11):
import tomllib
Expand All @@ -7,6 +8,9 @@
import tomli as tomllib

from packaging.version import parse as parse_version
from packaging.specifiers import SpecifierSet
from packaging.version import Version

import re

MIN_VERSION_LIBS = [
Expand All @@ -17,7 +21,14 @@
"SQLAlchemy",
]

SKIP_IF_PULL_REQUEST = ["langchain-core"]
# some libs only get checked on release because of simultaneous changes in
# multiple libs
SKIP_IF_PULL_REQUEST = [
"langchain-core",
"langchain-text-splitters",
"langchain",
"langchain-community",
]


def get_min_version(version: str) -> str:
Expand Down Expand Up @@ -45,7 +56,13 @@ def get_min_version(version: str) -> str:
raise ValueError(f"Unrecognized version format: {version}")


def get_min_version_from_toml(toml_path: str, versions_for: str):
def get_min_version_from_toml(
toml_path: str,
versions_for: str,
python_version: str,
*,
include: Optional[list] = None,
):
# Parse the TOML file
with open(toml_path, "rb") as file:
toml_data = tomllib.load(file)
Expand All @@ -57,18 +74,26 @@ def get_min_version_from_toml(toml_path: str, versions_for: str):
min_versions = {}

# Iterate over the libs in MIN_VERSION_LIBS
for lib in MIN_VERSION_LIBS:
for lib in set(MIN_VERSION_LIBS + (include or [])):
if versions_for == "pull_request" and lib in SKIP_IF_PULL_REQUEST:
# some libs only get checked on release because of simultaneous
# changes
# changes in multiple libs
continue
# Check if the lib is present in the dependencies
if lib in dependencies:
if include and lib not in include:
continue
# Get the version string
version_string = dependencies[lib]

if isinstance(version_string, dict):
version_string = version_string["version"]
if isinstance(version_string, list):
version_string = [
vs
for vs in version_string
if check_python_version(python_version, vs["python"])
][0]["version"]

# Use parse_version to get the minimum supported version from version_string
min_version = get_min_version(version_string)
Expand All @@ -79,13 +104,31 @@ def get_min_version_from_toml(toml_path: str, versions_for: str):
return min_versions


def check_python_version(version_string, constraint_string):
"""
Check if the given Python version matches the given constraints.
:param version_string: A string representing the Python version (e.g. "3.8.5").
:param constraint_string: A string representing the package's Python version constraints (e.g. ">=3.6, <4.0").
:return: True if the version matches the constraints, False otherwise.
"""
try:
version = Version(version_string)
constraints = SpecifierSet(constraint_string)
return version in constraints
except Exception as e:
print(f"Error: {e}")
return False


if __name__ == "__main__":
# Get the TOML file path from the command line argument
toml_file = sys.argv[1]
versions_for = sys.argv[2]
python_version = sys.argv[3]
assert versions_for in ["release", "pull_request"]

# Call the function to get the minimum versions
min_versions = get_min_version_from_toml(toml_file, versions_for)
min_versions = get_min_version_from_toml(toml_file, versions_for, python_version)

print(" ".join([f"{lib}=={version}" for lib, version in min_versions.items()]))
Loading

0 comments on commit 7051bca

Please sign in to comment.