From 6041be5349db951fe4ac9ca033c210813bc5f87e Mon Sep 17 00:00:00 2001 From: Stainless Bot Date: Wed, 12 Jul 2023 15:16:08 +0000 Subject: [PATCH] docs(examples): bump model to claude-2 in example scripts --- examples/demo_async.py | 2 +- examples/demo_sync.py | 2 +- examples/streaming.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/demo_async.py b/examples/demo_async.py index d78d3dc5..718ddd2a 100644 --- a/examples/demo_async.py +++ b/examples/demo_async.py @@ -10,7 +10,7 @@ async def main() -> None: client = AsyncAnthropic() res = await client.completions.create( - model="claude-v1", + model="claude-2", prompt=f"{anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court? {anthropic.AI_PROMPT}", max_tokens_to_sample=1000, ) diff --git a/examples/demo_sync.py b/examples/demo_sync.py index 7da2c7f5..a60846e0 100644 --- a/examples/demo_sync.py +++ b/examples/demo_sync.py @@ -8,7 +8,7 @@ def main() -> None: client = Anthropic() res = client.completions.create( - model="claude-v1", + model="claude-2", prompt=f"{anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court? {anthropic.AI_PROMPT}", max_tokens_to_sample=1000, ) diff --git a/examples/streaming.py b/examples/streaming.py index 16e1230f..1d91b531 100644 --- a/examples/streaming.py +++ b/examples/streaming.py @@ -15,7 +15,7 @@ def sync_stream() -> None: stream = client.completions.create( prompt=f"{HUMAN_PROMPT} {question}{AI_PROMPT}", - model="claude-v1", + model="claude-2", stream=True, max_tokens_to_sample=300, ) @@ -29,7 +29,7 @@ def sync_stream() -> None: async def async_stream() -> None: stream = await async_client.completions.create( prompt=f"{HUMAN_PROMPT} {question}{AI_PROMPT}", - model="claude-v1", + model="claude-2", stream=True, max_tokens_to_sample=300, )