AWS Bedrock Integration

Secure your AWS Bedrock foundation models with TalonAI.

SDK Integration

import { TalonAI } from '@talonai/sdk';
import { BedrockRuntimeClient, InvokeModelCommand } from '@aws-sdk/client-bedrock-runtime';

const talon = new TalonAI();
const bedrock = new BedrockRuntimeClient({ region: 'us-east-1' });

async function chat(userMessage: string) {
  // Analyze input
  const analysis = await talon.analyze({ content: userMessage });

  if (!analysis.isSafe) {
    throw new Error('Blocked');
  }

  const command = new InvokeModelCommand({
    modelId: 'anthropic.claude-3-sonnet-20240229-v1:0',
    body: JSON.stringify({
      anthropic_version: 'bedrock-2023-05-31',
      max_tokens: 1024,
      messages: [{ role: 'user', content: userMessage }],
    }),
    contentType: 'application/json',
  });

  const response = await bedrock.send(command);
  const body = JSON.parse(new TextDecoder().decode(response.body));

  return body.content[0].text;
}

Python Example

import boto3
import json
from talonai import TalonAI

talon = TalonAI()
bedrock = boto3.client('bedrock-runtime', region_name='us-east-1')

def chat(user_message: str) -> str:
    result = talon.analyze(content=user_message)

    if not result.is_safe:
        raise ValueError("Blocked")

    response = bedrock.invoke_model(
        modelId='anthropic.claude-3-sonnet-20240229-v1:0',
        body=json.dumps({
            'anthropic_version': 'bedrock-2023-05-31',
            'max_tokens': 1024,
            'messages': [{'role': 'user', 'content': user_message}]
        })
    )

    body = json.loads(response['body'].read())
    return body['content'][0]['text']

Supported Models

  • Anthropic Claude 3 (all variants)
  • Amazon Titan
  • Meta Llama 2
  • Cohere Command
  • AI21 Jurassic