API Integration Best Practices: The Professional Way to Connect

The right way to integrate APIs not only improves application performance but also reduces costs and increases reliability. This guide will share practice-proven integration patterns and techniques.

Integration Architecture Design

Layered Architecture Pattern

// 1. API Abstraction Layer
interface LLMProvider {
  chat(messages: Message[]): Promise<Response>;
  complete(prompt: string): Promise<string>;
  embed(text: string): Promise<number[]>;
}

// 2. Provider Implementation
class OpenAIProvider implements LLMProvider {
  private client: OpenAI;
  private config: ProviderConfig;
  
  constructor(config: ProviderConfig) {
    this.config = config;
    this.client = new OpenAI({
      apiKey: config.apiKey,
      maxRetries: config.maxRetries || 3,
    });
  }
  
  async chat(messages: Message[]): Promise<Response> {
    try {
      const response = await this.client.chat.completions.create({
        model: this.config.model || 'gpt-3.5-turbo',
        messages: messages,
        temperature: this.config.temperature || 0.7,
        stream: this.config.stream || false,
      });
      
      return this.formatResponse(response);
    } catch (error) {
      throw this.handleError(error);
    }
  }
  
  private handleError(error: any): Error {
    if (error.status === 429) {
      return new RateLimitError('Rate limit exceeded', error);
    }
    if (error.status === 401) {
      return new AuthenticationError('Invalid API key', error);
    }
    return new APIError('API request failed', error);
  }
}

// 3. Service Layer
class LLMService {
  private providers: Map<string, LLMProvider>;
  private cache: CacheManager;
  private metrics: MetricsCollector;
  
  constructor() {
    this.providers = new Map();
    this.cache = new CacheManager();
    this.metrics = new MetricsCollector();
  }
  
  addProvider(name: string, provider: LLMProvider) {
    this.providers.set(name, provider);
  }
  
  async chat(
    providerName: string, 
    messages: Message[], 
    options?: ChatOptions
  ): Promise<Response> {
    const provider = this.providers.get(providerName);
    if (!provider) {
      throw new Error(`Provider ${providerName} not found`);
    }
    
    // Check cache
    const cacheKey = this.generateCacheKey(messages);
    const cached = await this.cache.get(cacheKey);
    if (cached && options?.useCache) {
      this.metrics.recordCacheHit();
      return cached;
    }
    
    // Call API
    const startTime = Date.now();
    try {
      const response = await provider.chat(messages);
      
      // Record metrics
      this.metrics.recordLatency(Date.now() - startTime);
      this.metrics.recordTokenUsage(response.usage);
      
      // Cache result
      if (options?.useCache) {
        await this.cache.set(cacheKey, response, options.cacheTTL);
      }
      
      return response;
    } catch (error) {
      this.metrics.recordError(error);
      throw error;
    }
  }
}

Error Handling Strategy

Robust Error Handling

Error Classification Handling

class ErrorHandler {
  static handle(error: any): ErrorResponse {
    // 1. API Errors
    if (error.response) {
      switch (error.response.status) {
        case 429:
          return {
            retry: true,
            delay: this.getRetryDelay(error),
            message: 'Rate limited'
          };
        case 401:
          return {
            retry: false,
            message: 'Authentication failed'
          };
        case 500:
        case 502:
        case 503:
          return {
            retry: true,
            delay: 1000,
            message: 'Server error'
          };
      }
    }
    
    // 2. Network Errors
    if (error.code === 'ECONNREFUSED') {
      return {
        retry: true,
        delay: 5000,
        message: 'Connection failed'
      };
    }
    
    // 3. Timeout Errors
    if (error.code === 'ETIMEDOUT') {
      return {
        retry: true,
        delay: 2000,
        message: 'Request timeout'
      };
    }
    
    // Default handling
    return {
      retry: false,
      message: error.message
    };
  }
  
  static getRetryDelay(error: any): number {
    // Get retry delay from response header
    const retryAfter = 
      error.response?.headers['retry-after'];
    
    if (retryAfter) {
      return parseInt(retryAfter) * 1000;
    }
    
    // Exponential backoff
    const attempt = error.attempt || 1;
    return Math.min(
      Math.pow(2, attempt) * 1000,
      30000
    );
  }
}

Smart Retry Mechanism

class RetryManager {
  async execute<T>(
    operation: () => Promise<T>,
    options: RetryOptions = {}
  ): Promise<T> {
    const {
      maxAttempts = 3,
      initialDelay = 1000,
      maxDelay = 30000,
      factor = 2,
      jitter = true
    } = options;
    
    let lastError: any;
    
    for (let attempt = 1; attempt <= maxAttempts; attempt++) {
      try {
        return await operation();
      } catch (error: any) {
        lastError = error;
        error.attempt = attempt;
        
        const errorResponse = 
          ErrorHandler.handle(error);
        
        if (!errorResponse.retry || 
            attempt === maxAttempts) {
          throw error;
        }
        
        // Calculate delay
        let delay = errorResponse.delay || 
          Math.min(
            initialDelay * Math.pow(factor, attempt - 1),
            maxDelay
          );
        
        // Add jitter
        if (jitter) {
          delay *= 0.5 + Math.random();
        }
        
        console.log(
          `Retry ${attempt}/${maxAttempts} after ${delay}ms`
        );
        
        await this.sleep(delay);
      }
    }
    
    throw lastError;
  }
  
  private sleep(ms: number): Promise<void> {
    return new Promise(resolve => 
      setTimeout(resolve, ms)
    );
  }
}

Streaming Response Handling

Efficient Stream Processing

// Streaming Response Handler
class StreamHandler {
  async *handleStream(
    stream: ReadableStream,
    options: StreamOptions = {}
  ): AsyncGenerator<StreamChunk> {
    const reader = stream.getReader();
    const decoder = new TextDecoder();
    let buffer = '';
    
    try {
      while (true) {
        const { done, value } = await reader.read();
        
        if (done) {
          // Process remaining data
          if (buffer) {
            yield this.parseChunk(buffer);
          }
          break;
        }
        
        // Decode data
        buffer += decoder.decode(value, { stream: true });
        
        // Split by line
        const lines = buffer.split('\n');
        buffer = lines.pop() || '';
        
        for (const line of lines) {
          if (line.trim() === '') continue;
          
          if (line.startsWith('data: ')) {
            const data = line.slice(6);
            
            if (data === '[DONE]') {
              return;
            }
            
            try {
              const chunk = JSON.parse(data);
              
              // Process data
              const processed = await this.processChunk(chunk);
              
              // Send processed data
              yield processed;
              
              // Callback handling
              if (options.onChunk) {
                options.onChunk(processed);
              }
            } catch (error) {
              console.error('Failed to parse chunk:', error);
              
              if (options.onError) {
                options.onError(error);
              }
            }
          }
        }
      }
    } finally {
      reader.releaseLock();
    }
  }
  
  private processChunk(chunk: any): StreamChunk {
    return {
      id: chunk.id,
      content: chunk.choices?.[0]?.delta?.content || '',
      role: chunk.choices?.[0]?.delta?.role,
      finishReason: chunk.choices?.[0]?.finish_reason,
      usage: chunk.usage
    };
  }
  
  // Stream aggregator
  async aggregateStream(
    stream: AsyncGenerator<StreamChunk>
  ): Promise<CompleteResponse> {
    let content = '';
    let role = '';
    let usage = null;
    
    for await (const chunk of stream) {
      if (chunk.content) {
        content += chunk.content;
      }
      if (chunk.role) {
        role = chunk.role;
      }
      if (chunk.usage) {
        usage = chunk.usage;
      }
    }
    
    return {
      content,
      role,
      usage
    };
  }
}

// Usage example
const streamHandler = new StreamHandler();

// Usage in React component
function ChatComponent() {
  const [response, setResponse] = useState('');
  
  const handleStream = async (prompt: string) => {
    const stream = await api.createChatStream(prompt);
    
    for await (const chunk of streamHandler.handleStream(stream, {
      onChunk: (chunk) => {
        setResponse(prev => prev + chunk.content);
      },
      onError: (error) => {
        console.error('Stream error:', error);
      }
    })) {
      // Additional processing can be done here
    }
  };
}

Performance Optimization Techniques

Improve API Call Performance

🚀 Connection Pool Optimization

// HTTP Agent Configuration
const https = require('https');
const http = require('http');

const httpsAgent = new https.Agent({
  keepAlive: true,
  keepAliveMsecs: 1000,
  maxSockets: 50,
  maxFreeSockets: 10,
  timeout: 60000,
  scheduling: 'lifo'
});

const httpAgent = new http.Agent({
  keepAlive: true,
  keepAliveMsecs: 1000,
  maxSockets: 50,
  maxFreeSockets: 10,
  timeout: 60000
});

// Axios Configuration
const apiClient = axios.create({
  httpsAgent,
  httpAgent,
  timeout: 30000,
  maxRedirects: 0,
  decompress: true,
  responseType: 'stream'
});

// Request Interceptor
apiClient.interceptors.request.use(
  config => {
    config.headers['Connection'] = 'keep-alive';
    config.headers['Accept-Encoding'] = 'gzip, deflate';
    return config;
  }
);

💾 Smart Caching Strategy

class SmartCache {
  private cache: Map<string, CacheEntry>;
  private lru: string[];
  private maxSize: number;
  
  constructor(maxSize = 1000) {
    this.cache = new Map();
    this.lru = [];
    this.maxSize = maxSize;
  }
  
  async get(
    key: string,
    factory: () => Promise<any>,
    options: CacheOptions = {}
  ): Promise<any> {
    // Check cache
    const entry = this.cache.get(key);
    
    if (entry && !this.isExpired(entry)) {
      this.updateLRU(key);
      return entry.value;
    }
    
    // Generate new value
    const value = await factory();
    
    // Store in cache
    this.set(key, value, options.ttl || 3600000);
    
    return value;
  }
  
  private set(key: string, value: any, ttl: number) {
    // Check capacity
    if (this.cache.size >= this.maxSize) {
      this.evict();
    }
    
    this.cache.set(key, {
      value,
      expiry: Date.now() + ttl
    });
    
    this.updateLRU(key);
  }
  
  private evict() {
    // LRU eviction
    const leastUsed = this.lru.shift();
    if (leastUsed) {
      this.cache.delete(leastUsed);
    }
  }
  
  private updateLRU(key: string) {
    const index = this.lru.indexOf(key);
    if (index > -1) {
      this.lru.splice(index, 1);
    }
    this.lru.push(key);
  }
  
  private isExpired(entry: CacheEntry): boolean {
    return Date.now() > entry.expiry;
  }
}

Cost Control Strategy

Fine-grained Cost Management

Token Optimization

class TokenOptimizer {
  // Compress Prompt
  compressPrompt(text: string): string {
    return text
      // Remove extra whitespace
      .replace(/\s+/g, ' ')
      // Remove duplicate punctuation
      .replace(/([.!?])\1+/g, '$1')
      // Trim formatting
      .trim();
  }
  
  // Smart truncation
  truncateToLimit(
    text: string, 
    maxTokens: number,
    model: string = 'gpt-3.5-turbo'
  ): string {
    const encoder = this.getEncoder(model);
    const tokens = encoder.encode(text);
    
    if (tokens.length <= maxTokens) {
      return text;
    }
    
    // Preserve important parts
    const importance = this.calculateImportance(text);
    const truncated = this.smartTruncate(
      text, 
      maxTokens, 
      importance
    );
    
    return truncated;
  }
  
  // Batch optimization
  optimizeBatch(messages: Message[]): Message[] {
    // Deduplicate
    const unique = this.deduplicateMessages(messages);
    
    // Merge similar
    const merged = this.mergeSimilar(unique);
    
    // Compress content
    return merged.map(msg => ({
      ...msg,
      content: this.compressPrompt(msg.content)
    }));
  }
}

Usage Monitoring

class UsageMonitor {
  private usage: Map<string, UserUsage>;
  private limits: Map<string, UsageLimit>;
  
  async trackUsage(
    userId: string,
    tokens: number,
    cost: number
  ): Promise<void> {
    const usage = this.getOrCreateUsage(userId);
    
    // Update usage
    usage.tokens += tokens;
    usage.cost += cost;
    usage.requests += 1;
    
    // Check limits
    await this.checkLimits(userId, usage);
    
    // Send alert
    if (usage.cost > usage.budget * 0.8) {
      await this.sendBudgetAlert(userId, usage);
    }
  }
  
  async checkLimits(
    userId: string, 
    usage: UserUsage
  ): Promise<void> {
    const limits = this.limits.get(userId);
    if (!limits) return;
    
    if (usage.tokens > limits.maxTokens) {
      throw new Error('Token limit exceeded');
    }
    
    if (usage.cost > limits.maxCost) {
      throw new Error('Cost limit exceeded');
    }
    
    if (usage.requests > limits.maxRequests) {
      throw new Error('Request limit exceeded');
    }
  }
  
  generateReport(userId: string): UsageReport {
    const usage = this.usage.get(userId);
    if (!usage) return null;
    
    return {
      period: this.getCurrentPeriod(),
      totalTokens: usage.tokens,
      totalCost: usage.cost,
      totalRequests: usage.requests,
      avgTokensPerRequest: usage.tokens / usage.requests,
      avgCostPerRequest: usage.cost / usage.requests,
      topModels: this.getTopModels(usage),
      recommendations: this.getRecommendations(usage)
    };
  }
}

Security Best Practices

API Security Protection

🔐 Key Management

// Secure Key Management
class SecureKeyManager {
  private keys: Map<string, EncryptedKey>;
  private vault: KeyVault;
  
  constructor() {
    this.keys = new Map();
    this.vault = new KeyVault(process.env.VAULT_URL);
  }
  
  async getKey(keyName: string): Promise<string> {
    // 1. Check in-memory cache
    const cached = this.keys.get(keyName);
    if (cached && !this.isExpired(cached)) {
      return this.decrypt(cached);
    }
    
    // 2. Get from key vault
    const key = await this.vault.getSecret(keyName);
    
    // 3. Cache encrypted key
    this.keys.set(keyName, {
      value: this.encrypt(key),
      expiry: Date.now() + 3600000 // 1 hour
    });
    
    return key;
  }
  
  // Key rotation
  async rotateKey(keyName: string): Promise<void> {
    // Generate new key
    const newKey = this.generateKey();
    
    // Update key vault
    await this.vault.updateSecret(keyName, newKey);
    
    // Clear cache
    this.keys.delete(keyName);
    
    // Notify relevant services
    await this.notifyKeyRotation(keyName);
  }
  
  // Audit log
  async logKeyAccess(keyName: string, userId: string) {
    await this.vault.audit({
      action: 'KEY_ACCESS',
      keyName,
      userId,
      timestamp: new Date(),
      ip: this.getClientIP()
    });
  }
}

🛡️ Input Validation

class InputValidator {
  static validateChatInput(input: ChatInput): void {
    // 1. Length check
    if (input.message.length > 4000) {
      throw new ValidationError('Message too long');
    }
    
    // 2. Content filtering
    if (this.containsMalicious(input.message)) {
      throw new ValidationError('Malicious content detected');
    }
    
    // 3. Injection detection
    if (this.detectInjection(input.message)) {
      throw new ValidationError('Injection attempt detected');
    }
    
    // 4. Encoding check
    if (!this.isValidUTF8(input.message)) {
      throw new ValidationError('Invalid encoding');
    }
  }
  
  private static containsMalicious(text: string): boolean {
    const patterns = [
      /system\s*:/i,
      /ignore\s+previous/i,
      /<script[^>]*>/i,
      /\x00/
    ];
    
    return patterns.some(pattern => pattern.test(text));
  }
  
  private static detectInjection(text: string): boolean {
    // Prompt injection detection
    const injectionPatterns = [
      'forget all previous',
      'disregard instructions',
      'new system prompt'
    ];
    
    const lowercased = text.toLowerCase();
    return injectionPatterns.some(pattern => 
      lowercased.includes(pattern)
    );
  }
}

Monitoring and Observability

Comprehensive Monitoring System

// Integrated Monitoring System
class ObservabilitySystem {
  private metrics: MetricsClient;
  private traces: TracingClient;
  private logs: LoggingClient;
  
  constructor() {
    this.metrics = new PrometheusClient();
    this.traces = new JaegerClient();
    this.logs = new ElasticsearchClient();
  }
  
  // Request tracing
  async traceRequest(
    operation: string,
    fn: () => Promise<any>
  ): Promise<any> {
    const span = this.traces.startSpan(operation);
    const requestId = generateRequestId();
    
    try {
      // Set tracing context
      span.setTag('request.id', requestId);
      span.setTag('user.id', getCurrentUserId());
      
      // Log start
      this.logs.info('Request started', {
        requestId,
        operation,
        timestamp: new Date()
      });
      
      // Execute operation
      const startTime = Date.now();
      const result = await fn();
      const duration = Date.now() - startTime;
      
      // Record metrics
      this.metrics.histogram('api_request_duration', duration, {
        operation,
        status: 'success'
      });
      
      // Log success
      span.setTag('response.status', 'success');
      span.finish();
      
      return result;
    } catch (error) {
      // Log error
      span.setTag('error', true);
      span.log({
        event: 'error',
        message: error.message,
        stack: error.stack
      });
      
      // Error metrics
      this.metrics.increment('api_request_errors', {
        operation,
        error_type: error.constructor.name
      });
      
      // Error log
      this.logs.error('Request failed', {
        requestId,
        operation,
        error: {
          message: error.message,
          stack: error.stack
        }
      });
      
      span.finish();
      throw error;
    }
  }
  
  // Custom metrics
  recordCustomMetric(name: string, value: number, tags?: any) {
    this.metrics.gauge(name, value, tags);
  }
  
  // Health check
  async healthCheck(): Promise<HealthStatus> {
    const checks = await Promise.all([
      this.checkAPIHealth(),
      this.checkDatabaseHealth(),
      this.checkCacheHealth()
    ]);
    
    const allHealthy = checks.every(c => c.healthy);
    
    return {
      status: allHealthy ? 'healthy' : 'unhealthy',
      checks,
      timestamp: new Date()
    };
  }
}

Create a Professional API Integration

Follow best practices to build efficient, reliable, and secure Large Language Model applications.

Start Integrating