diff --git a/README.md b/README.md index 1c587e02e6eb6..07878e8fe74d4 100644 --- a/README.md +++ b/README.md @@ -1 +1,56 @@ -# LangChain \ No newline at end of file +# 🦜️🔗 LangChain + +⚡ Building applications with LLMs through composability ⚡ + +[![lint](https://github.com/hwchase17/langchain/actions/workflows/lint.yml/badge.svg)](https://github.com/hwchase17/langchain/actions/workflows/lint.yml) [![test](https://github.com/hwchase17/langchain/actions/workflows/test.yml/badge.svg)](https://github.com/hwchase17/langchain/actions/workflows/test.yml) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) + + + +## Quick Install + +`pip install langchain` + +## 🤔 What is this? + +Large language models (LLMs) are emerging as a transformative technology, enabling +developers to build applications that they previously could not. +But using these LLMs in isolation is often not enough to +create a truly powerful app - the real power comes when you are able to +combine them with other sources of computation or knowledge. + +This library is aimed at assisting in the development of those types of applications. +It aims to create: +1. a comprehensive collection of pieces you would ever want to combine +2. a flexible interface for combining pieces into a single comprehensive "chain" +3. a schema for easily saving and sharing those chains + +## 🚀 What can I do with this + +This project was largely inspired by a few projects seen on Twitter for which we thought it would make sense to have more explicit tooling. A lot of the initial functionality was done in an attempt to recreate those. Those are: + +**[Self-ask-with-search](https://ofir.io/self-ask.pdf)** + +To recreate this paper, use the following code snippet or checkout the [example notebook](examples/self_ask_with_search.ipynb). + +``` +from langchain import SelfAskWithSearchChain, OpenAI, SerpAPIChain + +llm = OpenAI(temperature=0) +search = SerpAPIChain() + +self_ask_with_search = SelfAskWithSearchChain(llm=llm, search_chain=search) + +self_ask_with_search.run("What is the hometown of the reigning men's U.S. Open champion?") +``` + +**[LLM Math](https://twitter.com/amasad/status/1568824744367259648?s=20&t=-7wxpXBJinPgDuyHLouP1w)** +To recreate this example, use the following code snippet or check out the [example notebook](examples/llm_math.ipynb). + +``` +from langchain import OpenAI, LLMMathChain + +llm = OpenAI(temperature=0) +llm_math = LLMMathChain(llm=llm) + +llm_math.run("How many of the integers between 0 and 99 inclusive are divisible by 8?") +``` \ No newline at end of file diff --git a/examples/llm_math.ipynb b/examples/llm_math.ipynb new file mode 100644 index 0000000000000..f07479f55fd26 --- /dev/null +++ b/examples/llm_math.ipynb @@ -0,0 +1,59 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "44e9ba31", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'Answer: 13\\n'" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain import OpenAI, LLMMathChain\n", + "\n", + "llm = OpenAI(temperature=0)\n", + "llm_math = LLMMathChain(llm=llm)\n", + "\n", + "llm_math.run(\"How many of the integers between 0 and 99 inclusive are divisible by 8?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f62f0c75", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/self_ask_with_search.ipynb b/examples/self_ask_with_search.ipynb new file mode 100644 index 0000000000000..b059bfc89eeff --- /dev/null +++ b/examples/self_ask_with_search.ipynb @@ -0,0 +1,74 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "7e3b513e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "What is the hometown of the reigning men's U.S. Open champion?\n", + "Are follow up questions needed here:\u001b[102m Yes.\n", + "Follow up: Who is the reigning men's U.S. Open champion?\u001b[0m\n", + "Intermediate answer: \u001b[106mCarlos Alcaraz\u001b[0m.\u001b[102m\n", + "Follow up: Where is Carlos Alcaraz from?\u001b[0m\n", + "Intermediate answer: \u001b[106mEl Palmar, Murcia, Spain\u001b[0m.\u001b[102m\n", + "So the final answer is: El Palmar, Murcia, Spain\u001b[0m" + ] + }, + { + "data": { + "text/plain": [ + "\"What is the hometown of the reigning men's U.S. Open champion?\\nAre follow up questions needed here: Yes.\\nFollow up: Who is the reigning men's U.S. Open champion?\\nIntermediate answer: Carlos Alcaraz.\\nFollow up: Where is Carlos Alcaraz from?\\nIntermediate answer: El Palmar, Murcia, Spain.\\nSo the final answer is: El Palmar, Murcia, Spain\"" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain import SelfAskWithSearchChain, OpenAI, SerpAPIChain\n", + "\n", + "llm = OpenAI(temperature=0)\n", + "search = SerpAPIChain()\n", + "\n", + "self_ask_with_search = SelfAskWithSearchChain(llm=llm, search_chain=search)\n", + "\n", + "self_ask_with_search.run(\"What is the hometown of the reigning men's U.S. Open champion?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6195fc82", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/langchain/__init__.py b/langchain/__init__.py index 7f43cb32ba075..7cf590520e452 100644 --- a/langchain/__init__.py +++ b/langchain/__init__.py @@ -1 +1,21 @@ """Main entrypoint into package.""" +from langchain.chains import ( + LLMChain, + LLMMathChain, + PythonChain, + SelfAskWithSearchChain, + SerpAPIChain, +) +from langchain.llms import Cohere, OpenAI +from langchain.prompt import Prompt + +__all__ = [ + "LLMChain", + "LLMMathChain", + "PythonChain", + "SelfAskWithSearchChain", + "SerpAPIChain", + "Cohere", + "OpenAI", + "Prompt", +] diff --git a/langchain/chains/__init__.py b/langchain/chains/__init__.py index 139a4bc8fa50d..c77010b95bb65 100644 --- a/langchain/chains/__init__.py +++ b/langchain/chains/__init__.py @@ -1 +1,14 @@ """Chains are easily reusable components which can be linked together.""" +from langchain.chains.llm import LLMChain +from langchain.chains.llm_math.base import LLMMathChain +from langchain.chains.python import PythonChain +from langchain.chains.self_ask_with_search.base import SelfAskWithSearchChain +from langchain.chains.serpapi import SerpAPIChain + +__all__ = [ + "LLMChain", + "LLMMathChain", + "PythonChain", + "SelfAskWithSearchChain", + "SerpAPIChain", +] diff --git a/langchain/chains/serpapi.py b/langchain/chains/serpapi.py index 6a9f4a17e5ad4..2c7c32dab7684 100644 --- a/langchain/chains/serpapi.py +++ b/langchain/chains/serpapi.py @@ -3,6 +3,7 @@ Heavily borrowed from https://github.com/ofirpress/self-ask """ import os +import sys from typing import Any, Dict, List from pydantic import BaseModel, Extra, root_validator @@ -10,6 +11,20 @@ from langchain.chains.base import Chain +class HiddenPrints: + """Context manager to hide prints.""" + + def __enter__(self) -> None: + """Open file to pipe stdout to.""" + self._original_stdout = sys.stdout + sys.stdout = open(os.devnull, "w") + + def __exit__(self, *_: Any) -> None: + """Close file that stdout was piped to.""" + sys.stdout.close() + sys.stdout = self._original_stdout + + class SerpAPIChain(Chain, BaseModel): """Chain that calls SerpAPI.""" @@ -60,9 +75,9 @@ def _run(self, inputs: Dict[str, Any]) -> Dict[str, str]: "gl": "us", "hl": "en", } - - search = self.search_engine(params) - res = search.get_dict() + with HiddenPrints(): + search = self.search_engine(params) + res = search.get_dict() if "answer_box" in res.keys() and "answer" in res["answer_box"].keys(): toret = res["answer_box"]["answer"] diff --git a/langchain/llms/__init__.py b/langchain/llms/__init__.py index cb699ac1d9f38..2e1720f3cf8ad 100644 --- a/langchain/llms/__init__.py +++ b/langchain/llms/__init__.py @@ -1 +1,5 @@ """Wrappers on top of large language models.""" +from langchain.llms.cohere import Cohere +from langchain.llms.openai import OpenAI + +__all__ = ["Cohere", "OpenAI"] diff --git a/setup.py b/setup.py index 83611874923f5..207840b30a7e8 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ name="langchain", version_format="0.0.0", packages=find_packages(), - description="Building LLM empowered applications", + description="Building applications with LLMs through composability", install_requires=["pydantic"], long_description=long_description, license="MIT",