Installation
- Python
- TypeScript
Copy
Ask AI
pip install anthropic
Copy
Ask AI
npm install @anthropic-ai/sdk
Basic Usage
- Python
- TypeScript
Copy
Ask AI
import os
from anthropic import Anthropic
# Initialize client with Edgee endpoint
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
)
# Send a message
message = client.messages.create(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=[
{"role": "user", "content": "What is the capital of France?"}
]
)
print(message.content)
# Access token usage
print(f"Input tokens: {message.usage.input_tokens}")
print(f"Output tokens: {message.usage.output_tokens}")
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
// Initialize client with Edgee endpoint
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
});
// Send a message
const message = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: [
{ role: 'user', content: 'What is the capital of France?' }
]
});
console.log(message.content);
// Access token usage
console.log(`Input tokens: ${message.usage.input_tokens}`);
console.log(`Output tokens: ${message.usage.output_tokens}`);
Streaming Responses
Stream responses for real-time token delivery:- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
)
# Stream messages
with client.messages.stream(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=[
{"role": "user", "content": "Write a short poem about coding"}
]
) as stream:
for text in stream.text_stream:
print(text, end="", flush=True)
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
});
// Stream messages
const stream = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: [
{ role: 'user', content: 'Write a short poem about coding' }
],
stream: true,
});
for await (const event of stream) {
if (event.type === 'content_block_delta'
&& event.delta.type === 'text_delta') {
process.stdout.write(event.delta.text);
}
}
Token Usage Tracking
Access standard Anthropic token usage metrics in every response:- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
)
message = client.messages.create(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=[{"role": "user", "content": "Analyze this long document..."}]
)
print(message.content)
print(f"Input tokens: {message.usage.input_tokens}")
print(f"Output tokens: {message.usage.output_tokens}")
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
});
const message = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Analyze this long document...' }]
});
console.log(message.content);
console.log(`Input tokens: ${message.usage.input_tokens}`);
console.log(`Output tokens: ${message.usage.output_tokens}`);
When compression is enabled,
input_tokens reflects the compressed token count. View detailed compression metrics in the Edgee dashboard.Compression & Tags via Headers
When using the Anthropic SDK with Edgee, you can control token compression and add tags using HTTP headers:Enabling Compression
The headers below control Agentic Token Compression.
- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
default_headers={
"x-edgee-enable-compression": "true",
"x-edgee-compression-rate": "0.8", # Target 80% compression (0.0-1.0)
}
)
# All requests will use compression with 80% target rate
message = client.messages.create(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=[{"role": "user", "content": "Analyze this document..."}]
)
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
defaultHeaders: {
'x-edgee-enable-compression': 'true',
'x-edgee-compression-rate': '0.8', // Target 80% compression (0.0-1.0)
}
});
// All requests will use compression
const message = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Analyze this document...' }]
});
Adding Tags for Analytics
Combine compression with tags to track requests in your dashboard:- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
default_headers={
"x-edgee-enable-compression": "true",
"x-edgee-compression-rate": "0.8",
"x-edgee-tags": "production,anthropic-sdk,user-123"
}
)
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
defaultHeaders: {
'x-edgee-enable-compression': 'true',
'x-edgee-compression-rate': '0.8',
'x-edgee-tags': 'production,anthropic-sdk,user-123'
}
});
| Header | Type | Description |
|---|---|---|
x-edgee-enable-compression | "true" or "false" | Enable token compression for requests (overrides console settings) |
x-edgee-compression-rate | string | Target compression rate (0.0-1.0, default 0.75) |
x-edgee-tags | string | Comma-separated tags for analytics and filtering |
You can also enable compression per API key in the Edgee console. Headers override console settings for specific requests.
Multi-Provider Access
With Edgee, you can access models from multiple providers using the same Anthropic SDK client and compare costs across providers:- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
)
# Use Claude
claude_response = client.messages.create(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)
# Use GPT-4 through the same client
gpt_response = client.messages.create(
model="gpt-5.2",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)
# Use Mistral
mistral_response = client.messages.create(
model="mistral-large",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai/',
apiKey: process.env.EDGEE_API_KEY,
});
// Use Claude
const claudeResponse = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});
// Use GPT-4 through the same client
const gptResponse = await client.messages.create({
model: 'gpt-5.2',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});
// Use Mistral
const mistralResponse = await client.messages.create({
model: 'mistral-large',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});
Function Calling (Tools)
Use Claude’s tool calling with Edgee:- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
)
# Define a tool
tools = [
{
"name": "get_weather",
"description": "Get the current weather in a given location",
"input_schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA"
}
},
"required": ["location"]
}
}
]
# Send message with tools
message = client.messages.create(
model="claude-sonnet-4.5",
max_tokens=1024,
tools=tools,
messages=[
{"role": "user", "content": "What's the weather like in Paris?"}
]
)
print(message.content)
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
});
// Define a tool
const tools = [
{
name: 'get_weather',
description: 'Get the current weather in a given location',
input_schema: {
type: 'object',
properties: {
location: {
type: 'string',
description: 'The city and state, e.g. San Francisco, CA'
}
},
required: ['location']
}
}
];
// Send message with tools
const message = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
tools: tools,
messages: [
{ role: 'user', content: "What's the weather like in Paris?" }
]
});
console.log(message.content);
Error Handling and Retries
The Anthropic SDK includes built-in retry logic, which works seamlessly with Edgee’s automatic failover:- Python
- TypeScript
Copy
Ask AI
from anthropic import Anthropic, APIError
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
max_retries=3, # SDK will retry up to 3 times
)
try:
message = client.messages.create(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)
print(message.content)
except APIError as e:
print(f"API Error: {e}")
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
maxRetries: 3, // SDK will retry up to 3 times
});
try {
const message = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});
console.log(message.content);
} catch (error) {
console.error('API Error:', error);
}
Complete Example
Here’s a complete application example:- Python
- TypeScript
Copy
Ask AI
#!/usr/bin/env python3
import os
from anthropic import Anthropic
def main():
# Initialize client
client = Anthropic(
base_url="https://api.edgee.ai",
api_key=os.environ.get("EDGEE_API_KEY"),
default_headers={
"x-edgee-tags": "production,chat-app"
}
)
# Chat loop
conversation = []
print("Chat with Claude (type 'quit' to exit)")
while True:
user_input = input("\nYou: ")
if user_input.lower() == 'quit':
break
conversation.append({
"role": "user",
"content": user_input
})
# Stream response
print("\nClaude: ", end="", flush=True)
with client.messages.stream(
model="claude-sonnet-4.5",
max_tokens=1024,
messages=conversation
) as stream:
assistant_message = ""
for text in stream.text_stream:
print(text, end="", flush=True)
assistant_message += text
conversation.append({
"role": "assistant",
"content": assistant_message
})
if __name__ == "__main__":
main()
Copy
Ask AI
import Anthropic from '@anthropic-ai/sdk';
import * as readline from 'readline';
async function main() {
// Initialize client
const client = new Anthropic({
baseURL: 'https://api.edgee.ai',
apiKey: process.env.EDGEE_API_KEY,
defaultHeaders: {
'x-edgee-tags': 'production,chat-app'
}
});
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const conversation: Array<{ role: string; content: string }> = [];
console.log("Chat with Claude (type 'quit' to exit)");
const chat = () => {
rl.question('\nYou: ', async (input) => {
if (input.toLowerCase() === 'quit') {
rl.close();
return;
}
conversation.push({
role: 'user',
content: input
});
process.stdout.write('\nClaude: ');
const stream = await client.messages.create({
model: 'claude-sonnet-4.5',
max_tokens: 1024,
messages: conversation,
stream: true,
});
let assistantMessage = '';
for await (const event of stream) {
if (event.type === 'content_block_delta'
&& event.delta.type === 'text_delta') {
process.stdout.write(event.delta.text);
assistantMessage += event.delta.text;
}
}
conversation.push({
role: 'assistant',
content: assistantMessage
});
chat();
});
};
chat();
}
main();