Skip to content

Commit

Permalink
Merge branch 'master' into feature/azure_face
Browse files Browse the repository at this point in the history
  • Loading branch information
efriis authored Dec 16, 2024
2 parents 2a53e4a + fb44e74 commit f369267
Show file tree
Hide file tree
Showing 2,800 changed files with 92,439 additions and 85,527 deletions.
2 changes: 2 additions & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
/.github/ @efriis @baskaryan @ccurme
/libs/packages.yml @efriis
2 changes: 1 addition & 1 deletion .github/DISCUSSION_TEMPLATE/q-a.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ body:
if there's another way to solve your problem:
[LangChain documentation with the integrated search](https://python.langchain.com/docs/get_started/introduction),
[API Reference](https://api.python.langchain.com/en/stable/),
[API Reference](https://python.langchain.com/api_reference/),
[GitHub search](https://github.com/langchain-ai/langchain),
[LangChain Github Discussions](https://github.com/langchain-ai/langchain/discussions),
[LangChain Github Issues](https://github.com/langchain-ai/langchain/issues?q=is%3Aissue),
Expand Down
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/bug-report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ body:
if there's another way to solve your problem:
[LangChain documentation with the integrated search](https://python.langchain.com/docs/get_started/introduction),
[API Reference](https://api.python.langchain.com/en/stable/),
[API Reference](https://python.langchain.com/api_reference/),
[GitHub search](https://github.com/langchain-ai/langchain),
[LangChain Github Discussions](https://github.com/langchain-ai/langchain/discussions),
[LangChain Github Issues](https://github.com/langchain-ai/langchain/issues?q=is%3Aissue),
Expand Down
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/documentation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ body:
place to ask your question:
[LangChain documentation with the integrated search](https://python.langchain.com/docs/get_started/introduction),
[API Reference](https://api.python.langchain.com/en/stable/),
[API Reference](https://python.langchain.com/api_reference/),
[GitHub search](https://github.com/langchain-ai/langchain),
[LangChain Github Discussions](https://github.com/langchain-ai/langchain/discussions),
[LangChain Github Issues](https://github.com/langchain-ai/langchain/issues?q=is%3Aissue),
Expand Down
2 changes: 1 addition & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Thank you for contributing to LangChain!

- [ ] **PR title**: "package: description"
- Where "package" is whichever of langchain, community, core, etc. is being modified. Use "docs: ..." for purely docs changes, "templates: ..." for template changes, "infra: ..." for CI changes.
- Where "package" is whichever of langchain, community, core, etc. is being modified. Use "docs: ..." for purely docs changes, "infra: ..." for CI changes.
- Example: "community: add foobar LLM"


Expand Down
35 changes: 26 additions & 9 deletions .github/scripts/check_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@
"huggingface",
]

PY_312_MAX_PACKAGES = [
"libs/partners/huggingface", # https://github.com/pytorch/pytorch/issues/130249
]


def all_package_dirs() -> Set[str]:
return {
Expand Down Expand Up @@ -110,23 +114,30 @@ def _get_configs_for_single_dir(job: str, dir_: str) -> List[Dict[str, str]]:
return _get_pydantic_test_configs(dir_)

if dir_ == "libs/core":
py_versions = ["3.9", "3.10", "3.11", "3.12"]
py_versions = ["3.9", "3.10", "3.11", "3.12", "3.13"]
# custom logic for specific directories
elif dir_ == "libs/partners/milvus":
# milvus poetry doesn't allow 3.12 because they
# declare deps in funny way
py_versions = ["3.9", "3.11"]

elif dir_ in ["libs/community", "libs/langchain"] and job == "extended-tests":
# community extended test resolution in 3.12 is slow
# even in uv
py_versions = ["3.9", "3.11"]
elif dir_ in PY_312_MAX_PACKAGES:
py_versions = ["3.9", "3.12"]

elif dir_ == "libs/langchain" and job == "extended-tests":
py_versions = ["3.9", "3.13"]

elif dir_ == "libs/community" and job == "extended-tests":
py_versions = ["3.9", "3.12"]

elif dir_ == "libs/community" and job == "compile-integration-tests":
# community integration deps are slow in 3.12
py_versions = ["3.9", "3.11"]
else:
elif dir_ == ".":
# unable to install with 3.13 because tokenizers doesn't support 3.13 yet
py_versions = ["3.9", "3.12"]
else:
py_versions = ["3.9", "3.13"]

return [{"working-directory": dir_, "python-version": py_v} for py_v in py_versions]

Expand Down Expand Up @@ -261,15 +272,19 @@ def _get_configs_for_multi_dirs(
# TODO: update to include all packages that rely on standard-tests (all partner packages)
# note: won't run on external repo partners
dirs_to_run["lint"].add("libs/standard-tests")
dirs_to_run["test"].add("libs/standard-tests")
dirs_to_run["lint"].add("libs/cli")
dirs_to_run["test"].add("libs/cli")
dirs_to_run["test"].add("libs/partners/mistralai")
dirs_to_run["test"].add("libs/partners/openai")
dirs_to_run["test"].add("libs/partners/anthropic")
dirs_to_run["test"].add("libs/partners/fireworks")
dirs_to_run["test"].add("libs/partners/groq")

elif file.startswith("libs/cli"):
# todo: add cli makefile
pass
dirs_to_run["lint"].add("libs/cli")
dirs_to_run["test"].add("libs/cli")

elif file.startswith("libs/partners"):
partner_dir = file.split("/")[2]
if os.path.isdir(f"libs/partners/{partner_dir}") and [
Expand All @@ -279,12 +294,14 @@ def _get_configs_for_multi_dirs(
] != ["README.md"]:
dirs_to_run["test"].add(f"libs/partners/{partner_dir}")
# Skip if the directory was deleted or is just a tombstone readme
elif file == "libs/packages.yml":
continue
elif file.startswith("libs/"):
raise ValueError(
f"Unknown lib: {file}. check_diff.py likely needs "
"an update for this new library!"
)
elif any(file.startswith(p) for p in ["docs/", "templates/", "cookbook/"]):
elif any(file.startswith(p) for p in ["docs/", "cookbook/"]):
if file.startswith("docs/"):
docs_edited = True
dirs_to_run["lint"].add(".")
Expand Down
96 changes: 74 additions & 22 deletions .github/scripts/get_min_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,23 @@
# for python 3.10 and below, which doesnt have stdlib tomllib
import tomli as tomllib

from packaging.version import parse as parse_version
from packaging.specifiers import SpecifierSet
from packaging.version import Version


import requests
from packaging.version import parse
from typing import List

import re


MIN_VERSION_LIBS = [
"langchain-core",
"langchain-community",
"langchain",
"langchain-text-splitters",
"numpy",
"SQLAlchemy",
]

Expand All @@ -31,29 +37,61 @@
]


def get_min_version(version: str) -> str:
# base regex for x.x.x with cases for rc/post/etc
# valid strings: https://peps.python.org/pep-0440/#public-version-identifiers
vstring = r"\d+(?:\.\d+){0,2}(?:(?:a|b|rc|\.post|\.dev)\d+)?"
# case ^x.x.x
_match = re.match(f"^\\^({vstring})$", version)
if _match:
return _match.group(1)
def get_pypi_versions(package_name: str) -> List[str]:
"""
Fetch all available versions for a package from PyPI.
Args:
package_name (str): Name of the package
Returns:
List[str]: List of all available versions
# case >=x.x.x,<y.y.y
_match = re.match(f"^>=({vstring}),<({vstring})$", version)
if _match:
_min = _match.group(1)
_max = _match.group(2)
assert parse_version(_min) < parse_version(_max)
return _min
Raises:
requests.exceptions.RequestException: If PyPI API request fails
KeyError: If package not found or response format unexpected
"""
pypi_url = f"https://pypi.org/pypi/{package_name}/json"
response = requests.get(pypi_url)
response.raise_for_status()
return list(response.json()["releases"].keys())

# case x.x.x
_match = re.match(f"^({vstring})$", version)
if _match:
return _match.group(1)

raise ValueError(f"Unrecognized version format: {version}")
def get_minimum_version(package_name: str, spec_string: str) -> Optional[str]:
"""
Find the minimum published version that satisfies the given constraints.
Args:
package_name (str): Name of the package
spec_string (str): Version specification string (e.g., ">=0.2.43,<0.4.0,!=0.3.0")
Returns:
Optional[str]: Minimum compatible version or None if no compatible version found
"""
# rewrite occurrences of ^0.0.z to 0.0.z (can be anywhere in constraint string)
spec_string = re.sub(r"\^0\.0\.(\d+)", r"0.0.\1", spec_string)
# rewrite occurrences of ^0.y.z to >=0.y.z,<0.y+1 (can be anywhere in constraint string)
for y in range(1, 10):
spec_string = re.sub(rf"\^0\.{y}\.(\d+)", rf">=0.{y}.\1,<0.{y+1}", spec_string)
# rewrite occurrences of ^x.y.z to >=x.y.z,<x+1.0.0 (can be anywhere in constraint string)
for x in range(1, 10):
spec_string = re.sub(
rf"\^{x}\.(\d+)\.(\d+)", rf">={x}.\1.\2,<{x+1}", spec_string
)

spec_set = SpecifierSet(spec_string)
all_versions = get_pypi_versions(package_name)

valid_versions = []
for version_str in all_versions:
try:
version = parse(version_str)
if spec_set.contains(version):
valid_versions.append(version)
except ValueError:
continue

return str(min(valid_versions)) if valid_versions else None


def get_min_version_from_toml(
Expand Down Expand Up @@ -96,7 +134,7 @@ def get_min_version_from_toml(
][0]["version"]

# Use parse_version to get the minimum supported version from version_string
min_version = get_min_version(version_string)
min_version = get_minimum_version(lib, version_string)

# Store the minimum version in the min_versions dictionary
min_versions[lib] = min_version
Expand All @@ -112,6 +150,20 @@ def check_python_version(version_string, constraint_string):
:param constraint_string: A string representing the package's Python version constraints (e.g. ">=3.6, <4.0").
:return: True if the version matches the constraints, False otherwise.
"""

# rewrite occurrences of ^0.0.z to 0.0.z (can be anywhere in constraint string)
constraint_string = re.sub(r"\^0\.0\.(\d+)", r"0.0.\1", constraint_string)
# rewrite occurrences of ^0.y.z to >=0.y.z,<0.y+1.0 (can be anywhere in constraint string)
for y in range(1, 10):
constraint_string = re.sub(
rf"\^0\.{y}\.(\d+)", rf">=0.{y}.\1,<0.{y+1}.0", constraint_string
)
# rewrite occurrences of ^x.y.z to >=x.y.z,<x+1.0.0 (can be anywhere in constraint string)
for x in range(1, 10):
constraint_string = re.sub(
rf"\^{x}\.0\.(\d+)", rf">={x}.0.\1,<{x+1}.0.0", constraint_string
)

try:
version = Version(version_string)
constraints = SpecifierSet(constraint_string)
Expand Down
89 changes: 89 additions & 0 deletions .github/scripts/prep_api_docs_build.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
#!/usr/bin/env python
"""Script to sync libraries from various repositories into the main langchain repository."""

import os
import shutil
import yaml
from pathlib import Path
from typing import Dict, Any


def load_packages_yaml() -> Dict[str, Any]:
"""Load and parse the packages.yml file."""
with open("langchain/libs/packages.yml", "r") as f:
return yaml.safe_load(f)


def get_target_dir(package_name: str) -> Path:
"""Get the target directory for a given package."""
package_name_short = package_name.replace("langchain-", "")
base_path = Path("langchain/libs")
if package_name_short == "experimental":
return base_path / "experimental"
return base_path / "partners" / package_name_short


def clean_target_directories(packages: list) -> None:
"""Remove old directories that will be replaced."""
for package in packages:

target_dir = get_target_dir(package["name"])
if target_dir.exists():
print(f"Removing {target_dir}")
shutil.rmtree(target_dir)


def move_libraries(packages: list) -> None:
"""Move libraries from their source locations to the target directories."""
for package in packages:

repo_name = package["repo"].split("/")[1]
source_path = package["path"]
target_dir = get_target_dir(package["name"])

# Handle root path case
if source_path == ".":
source_dir = repo_name
else:
source_dir = f"{repo_name}/{source_path}"

print(f"Moving {source_dir} to {target_dir}")

# Ensure target directory exists
os.makedirs(os.path.dirname(target_dir), exist_ok=True)

try:
# Move the directory
shutil.move(source_dir, target_dir)
except Exception as e:
print(f"Error moving {source_dir} to {target_dir}: {e}")


def main():
"""Main function to orchestrate the library sync process."""
try:
# Load packages configuration
package_yaml = load_packages_yaml()
packages = [
p
for p in package_yaml["packages"]
if not p.get("disabled", False)
and p["repo"].startswith("langchain-ai/")
and p["repo"] != "langchain-ai/langchain"
]

# Clean target directories
clean_target_directories(packages)

# Move libraries to their new locations
move_libraries(packages)

print("Library sync completed successfully!")

except Exception as e:
print(f"Error during library sync: {e}")
raise


if __name__ == "__main__":
main()
3 changes: 2 additions & 1 deletion .github/workflows/_compile_integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@ on:
description: "Python version to use"

env:
POETRY_VERSION: "1.7.1"
POETRY_VERSION: "1.8.4"

jobs:
build:
defaults:
run:
working-directory: ${{ inputs.working-directory }}
runs-on: ubuntu-latest
timeout-minutes: 20
name: "poetry run pytest -m compile tests/integration_tests #${{ inputs.python-version }}"
steps:
- uses: actions/checkout@v4
Expand Down
Loading

0 comments on commit f369267

Please sign in to comment.