JavaScript/TypeScript Integration Complete Guide
Integrate LLM API into web applications, supporting mainstream frameworks like React, Vue, and Node.js, enabling end-to-end AI feature development across frontend and backend.
Installation and Configuration
NPM Installation
# Install OpenAI SDK npm install openai # TypeScript type support npm install --save-dev @types/node # Environment variable management npm install dotenv
Environment Configuration
// .env file
OPENAI_API_KEY=your-api-key
OPENAI_BASE_URL=https://api.openai.com/v1
// config.js
import dotenv from 'dotenv';
dotenv.config();
export const config = {
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL
};Node.js Backend Implementation
// server.js
import express from 'express';
import OpenAI from 'openai';
import cors from 'cors';
const app = express();
app.use(cors());
app.use(express.json());
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY
});
// Chat endpoint
app.post('/api/chat', async (req, res) => {
try {
const { messages } = req.body;
const completion = await openai.chat.completions.create({
model: "gpt-4",
messages: messages,
temperature: 0.7
});
res.json({
success: true,
data: completion.choices[0].message
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
});
// Streaming response
app.post('/api/chat/stream', async (req, res) => {
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
try {
const stream = await openai.chat.completions.create({
model: "gpt-4",
messages: req.body.messages,
stream: true
});
for await (const chunk of stream) {
const data = chunk.choices[0]?.delta?.content || '';
res.write(`data: ${JSON.stringify({ content: data })}\n\n`);
}
res.write('data: [DONE]\n\n');
res.end();
} catch (error) {
res.write(`data: ${JSON.stringify({ error: error.message })}\n\n`);
res.end();
}
});
app.listen(3000, () => {
console.log('Server running on http://localhost:3000');
});React Frontend Integration
// ChatComponent.jsx
import React, { useState, useEffect } from 'react';
function ChatComponent() {
const [messages, setMessages] = useState([]);
const [input, setInput] = useState('');
const [loading, setLoading] = useState(false);
const sendMessage = async () => {
if (!input.trim()) return;
const userMessage = { role: 'user', content: input };
const newMessages = [...messages, userMessage];
setMessages(newMessages);
setInput('');
setLoading(true);
try {
const response = await fetch('http://localhost:3000/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ messages: newMessages })
});
const data = await response.json();
if (data.success) {
setMessages([...newMessages, data.data]);
}
} catch (error) {
console.error('Error:', error);
} finally {
setLoading(false);
}
};
// Streaming response hook
const useStreamChat = () => {
const [streamData, setStreamData] = useState('');
const streamChat = async (messages) => {
const response = await fetch('http://localhost:3000/api/chat/stream', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({ messages })
});
const reader = response.body.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split('
');
for (const line of lines) {
if (line.startsWith('data: ')) {
const data = line.slice(6);
if (data === '[DONE]') break;
try {
const json = JSON.parse(data);
setStreamData(prev => prev + json.content);
} catch (e) {
console.error('Parse error:', e);
}
}
}
}
};
return { streamData, streamChat };
};
return (
<div className="chat-container">
<div className="messages">
{messages.map((msg, idx) => (
<div key={idx} className={`message ${msg.role}`}>
{msg.content}
</div>
))}
{loading && <div className="loading">AI is thinking...</div>}
</div>
<div className="input-area">
<input
type="text"
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyPress={(e) => e.key === 'Enter' && sendMessage()}
placeholder="Type a message..."
/>
<button onClick={sendMessage}>Send</button>
</div>
</div>
);
}
export default ChatComponent;Vue 3 Integration
// ChatView.vue
<template>
<div class="chat-container">
<div class="messages">
<div
v-for="(msg, index) in messages"
:key="index"
:class="['message', msg.role]"
>
{{ msg.content }}
</div>
<div v-if="loading" class="loading">
AI is typing...
</div>
</div>
<div class="input-area">
<input
v-model="input"
@keyup.enter="sendMessage"
placeholder="Type a message..."
/>
<button @click="sendMessage">Send</button>
</div>
</div>
</template>
<script setup>
import { ref, reactive } from 'vue';
import axios from 'axios';
const messages = ref([]);
const input = ref('');
const loading = ref(false);
const apiClient = axios.create({
baseURL: 'http://localhost:3000/api',
headers: {
'Content-Type': 'application/json'
}
});
const sendMessage = async () => {
if (!input.value.trim()) return;
const userMessage = {
role: 'user',
content: input.value
};
messages.value.push(userMessage);
const currentInput = input.value;
input.value = '';
loading.value = true;
try {
const response = await apiClient.post('/chat', {
messages: messages.value
});
if (response.data.success) {
messages.value.push(response.data.data);
}
} catch (error) {
console.error('Error:', error);
messages.value.push({
role: 'assistant',
content: 'Sorry, an error occurred.'
});
} finally {
loading.value = false;
}
};
// Composition API - Reusable logic
export function useChat() {
const messages = ref([]);
const loading = ref(false);
const chat = async (input) => {
loading.value = true;
try {
const response = await apiClient.post('/chat', {
messages: [
...messages.value,
{ role: 'user', content: input }
]
});
if (response.data.success) {
messages.value.push(
{ role: 'user', content: input },
response.data.data
);
}
return response.data;
} finally {
loading.value = false;
}
};
return {
messages,
loading,
chat
};
}
</script>TypeScript Best Practices
// types.ts
export interface Message {
role: 'system' | 'user' | 'assistant';
content: string;
}
export interface ChatRequest {
messages: Message[];
model?: string;
temperature?: number;
max_tokens?: number;
}
export interface ChatResponse {
success: boolean;
data?: Message;
error?: string;
}
// api.ts
import OpenAI from 'openai';
import type { ChatRequest, ChatResponse } from './types';
class LLMService {
private client: OpenAI;
constructor(apiKey: string) {
this.client = new OpenAI({ apiKey });
}
async chat(request: ChatRequest): Promise<ChatResponse> {
try {
const completion = await this.client.chat.completions.create({
model: request.model || 'gpt-4',
messages: request.messages,
temperature: request.temperature || 0.7,
max_tokens: request.max_tokens
});
return {
success: true,
data: completion.choices[0].message
};
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
async *streamChat(request: ChatRequest) {
const stream = await this.client.chat.completions.create({
...request,
stream: true
});
for await (const chunk of stream) {
yield chunk.choices[0]?.delta?.content || '';
}
}
}
export default LLMService;Security Best Practices
🔐 API Key Security
- • Never expose API keys in frontend code
- • Use environment variables to manage secrets
- • Proxy all API calls through the backend
🛡️ Request Validation
- • Implement authentication and authorization
- • Add rate limiting
- • Validate and sanitize user inputs
Start JavaScript Development
Build powerful AI applications with JavaScript/TypeScript and bring intelligence to the frontend.
Get API Access