Skip to content

Commit

Permalink
chat(): add models "llama-3-70b" add "mixtral-8x7b" (#226)
Browse files Browse the repository at this point in the history
* chat(): add models "llama-3-70b" add "mixtral-8x7b"

* Update tests

* CI: update pip

* CI: fail-fast: false
  • Loading branch information
deedy5 authored Jun 12, 2024
1 parent 658319b commit cef67ed
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 10 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 15,7 @@ jobs:
runs-on: ${{ matrix.os }}

strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.12"]
os: [ubuntu-latest, macos-latest, windows-latest]
Expand All @@ -27,6 28,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install -U pip
python -m pip install .[lxml,dev]
- name: Ruff
run: |
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 240,8 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str:
Args:
keywords (str): The initial message or question to send to the AI.
model (str): The model to use: "gpt-3.5", "claude-3-haiku". Defaults to "gpt-3.5".
model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b".
Defaults to "gpt-3.5".
Returns:
str: The response from the AI.
Expand Down
2 changes: 1 addition & 1 deletion duckduckgo_search/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 137,7 @@ def version():
def chat(save, proxy):
"""CLI function to perform an interactive AI chat using DuckDuckGo API."""
cache_file = "ddgs_chat_conversation.json"
models = ["gpt-3.5", "claude-3-haiku"]
models = ["gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b"]
client = DDGS(proxy=proxy)

print("DuckDuckGo AI chat. Available models:")
Expand Down
10 changes: 8 additions & 2 deletions duckduckgo_search/duckduckgo_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,12 125,18 @@ def chat(self, keywords: str, model: str = "gpt-3.5") -> str:
Args:
keywords (str): The initial message or question to send to the AI.
model (str): The model to use: "gpt-3.5", "claude-3-haiku". Defaults to "gpt-3.5".
model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b".
Defaults to "gpt-3.5".
Returns:
str: The response from the AI.
"""
models = {"claude-3-haiku": "claude-3-haiku-20240307", "gpt-3.5": "gpt-3.5-turbo-0125"}
models = {
"claude-3-haiku": "claude-3-haiku-20240307",
"gpt-3.5": "gpt-3.5-turbo-0125",
"llama-3-70b": "meta-llama/Llama-3-70b-chat-hf",
"mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
}
# vqd
if not self._chat_vqd:
resp = self.client.get("https://duckduckgo.com/duckchat/v1/status", headers={"x-vqd-accept": "1"})
Expand Down
3 changes: 2 additions & 1 deletion duckduckgo_search/duckduckgo_search_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 41,8 @@ async def achat(self, keywords: str, model: str = "gpt-3.5") -> str:
Args:
keywords (str): The initial message or question to send to the AI.
model (str): The model to use: "gpt-3.5", "claude-3-haiku". Defaults to "gpt-3.5".
model (str): The model to use: "gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b".
Defaults to "gpt-3.5".
Returns:
str: The response from the AI.
Expand Down
7 changes: 5 additions & 2 deletions tests/test_duckduckgo_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 14,13 @@ def test_context_manager():
results = ddgs.news("cars", max_results=30)
assert 20 <= len(results) <= 30

def test_chat():
results = DDGS().chat("cat")

@pytest.mark.parametrize("model", ["gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b"])
def test_chat(model):
results = DDGS().chat("cat", model=model)
assert len(results) >= 1


def test_text():
results = DDGS().text("cat", safesearch="off", timelimit="m", max_results=30)
assert 27 <= len(results) <= 30
Expand Down
25 changes: 22 additions & 3 deletions tests/test_duckduckgo_search_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,60 3,79 @@

from duckduckgo_search import AsyncDDGS


@pytest.fixture(autouse=True)
def pause_between_tests():
time.sleep(0.5)

@pytest.fixture(autouse=True)
async def test_chat():
results = await AsyncDDGS().chat("cat")
@pytest.mark.asyncio
async def test_context_manager():
async with AsyncDDGS() as addgs:
results = await addgs.anews("cars", max_results=30)
assert 20 <= len(results) <= 30


@pytest.mark.asyncio
@pytest.mark.parametrize("model", ["gpt-3.5", "claude-3-haiku", "llama-3-70b", "mixtral-8x7b"])
async def test_chat(model):
results = await AsyncDDGS().achat("cat", model=model)
assert len(results) >= 1


@pytest.mark.asyncio
async def test_text():
results = await AsyncDDGS().atext("sky", safesearch="off", timelimit="m", max_results=30)
assert 27 <= len(results) <= 30


@pytest.mark.asyncio
async def test_text_html():
results = await AsyncDDGS().atext("eagle", backend="html", max_results=30)
assert 27 <= len(results) <= 30


@pytest.mark.asyncio
async def test_text_lite():
results = await AsyncDDGS().atext("dog", backend="lite", max_results=30)
assert 27 <= len(results) <= 30


@pytest.mark.asyncio
async def test_async_images():
results = await AsyncDDGS().aimages("flower", max_results=200)
assert 95 <= len(results) <= 200


@pytest.mark.asyncio
async def test_async_videos():
results = await AsyncDDGS().avideos("sea", max_results=40)
assert 37 <= len(results) <= 40


@pytest.mark.asyncio
async def test_async_news():
results = await AsyncDDGS().anews("tesla", max_results=30)
assert 20 <= len(results) <= 30


@pytest.mark.asyncio
async def test_async_maps():
results = await AsyncDDGS().amaps("school", place="London", max_results=30)
assert 27 <= len(results) <= 30


@pytest.mark.asyncio
async def test_answers():
results = await AsyncDDGS().aanswers("sun")
assert len(results) >= 1


@pytest.mark.asyncio
async def test_suggestions():
results = await AsyncDDGS().asuggestions("moon")
assert len(results) >= 1


@pytest.mark.asyncio
async def test_async_translate():
results = await AsyncDDGS().atranslate(["school", "tomatoes"], to="de")
Expand Down

0 comments on commit cef67ed

Please sign in to comment.