diff --git a/examples/demo_async.py b/examples/demo_async.py index d78d3dc5..718ddd2a 100644 --- a/examples/demo_async.py +++ b/examples/demo_async.py @@ -10,7 +10,7 @@ async def main() -> None: client = AsyncAnthropic() res = await client.completions.create( - model="claude-v1", + model="claude-2", prompt=f"{anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court? {anthropic.AI_PROMPT}", max_tokens_to_sample=1000, ) diff --git a/examples/demo_sync.py b/examples/demo_sync.py index 7da2c7f5..a60846e0 100644 --- a/examples/demo_sync.py +++ b/examples/demo_sync.py @@ -8,7 +8,7 @@ def main() -> None: client = Anthropic() res = client.completions.create( - model="claude-v1", + model="claude-2", prompt=f"{anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court? {anthropic.AI_PROMPT}", max_tokens_to_sample=1000, ) diff --git a/examples/streaming.py b/examples/streaming.py index 16e1230f..1d91b531 100644 --- a/examples/streaming.py +++ b/examples/streaming.py @@ -15,7 +15,7 @@ def sync_stream() -> None: stream = client.completions.create( prompt=f"{HUMAN_PROMPT} {question}{AI_PROMPT}", - model="claude-v1", + model="claude-2", stream=True, max_tokens_to_sample=300, ) @@ -29,7 +29,7 @@ def sync_stream() -> None: async def async_stream() -> None: stream = await async_client.completions.create( prompt=f"{HUMAN_PROMPT} {question}{AI_PROMPT}", - model="claude-v1", + model="claude-2", stream=True, max_tokens_to_sample=300, )