Вы знаете что с этим делать, друзья.

const https = require('https');
const http = require('http');

const options = {
    hostname: 'play.vercel.ai',
    port: 443,
    method: 'POST',
    headers: {
        'Content-Type': 'application/json',
    },
};

const readBody = (res, json, onData) => new Promise((resolve, reject) => {
    let buffer = '';

    res.on('data', chunk => {
        onData?.(chunk.toString());
        buffer += chunk;
    });

    res.on('end', () => {
        try {
            if (json) buffer = JSON.parse(buffer);
            resolve(buffer);
        } catch (e) {
            console.error(buffer);
            reject(e);
        }
    });
})

const request = (path, data, onData) =>
    new Promise((resolve, reject) => {
        options.headers['User-Agent'] = `Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/${Math.floor(Math.random() * 10000000)} Firefox/${(Math.random() * 200).toFixed(2)}`;
        const req = https.request({ ...options, path }, async (res) => {
            try {
                const body = await readBody(res, false, onData);
                resolve(body);
            } catch (e) {
                reject(e);
            }
        });

        req.write(JSON.stringify(data));
        req.end();
    });

async function generate(text, { model, temperature, maxTokens, frequencyPenalty, presencePenalty, onData }) {
    console.log(`Model: ${model}\nPrompt length: ${text.length}`);

    let currentLine = '';

    let wasTimeout = false;
    let timeout;
    const timeoutPromise = new Promise(resolve => {
        timeout = setTimeout(() => {
            wasTimeout = true;
            currentLine = "";
            resolve();
        }, 15000);
    })

    await Promise.race([
        request('/api/generate', {
            prompt: text,
            model,
            temperature,
            maxTokens: Math.min(maxTokens, 511),
            topP: 1,
            frequencyPenalty,
            presencePenalty,
            stopSequences: model.startsWith('anthropic:claude') ? ['\nHuman:'] : [],
        }, (line) => {
            if (wasTimeout) return;

            if (timeout) {
                process.stdout.write('Generating response ');
                clearTimeout(timeout);
                timeout = 0;
            } else {
                process.stdout.write('.');
            }
            line = ((l) => {
                try {
                    return JSON.parse(l);
                } catch (e) {
                    return l;
                }
            })(line);

            if (model.startsWith('anthropic:claude') && line.trim()) {
                onData?.(line.slice(currentLine.length));
                currentLine = line;
            } else {
                onData?.(line);
                currentLine += line;
            }
        }),
        timeoutPromise,
    ]);

    console.log(wasTimeout ? 'Timeout' : ' Done');

    return currentLine;
}

function preparePrompt(messages) {
    return messages.filter(m => m.content?.trim()).map(m => {
        let author = '';
        switch (m.role) {
            case 'user': author = 'Human'; break;
            case 'assistant': author = 'Assistant'; break;
            case 'system': author = 'System Note'; break;
            default: author = m.role; break;
        }

        return `${author}: ${m.content.trim()}`;
    }).join('\n') + `\nAssistant: `;
}

async function main() {
    const server = http.createServer(async (req, res) => {
        if (req.method.toUpperCase() === 'POST') {
            const body = await readBody(req, true);
            const [, owner, modelName] = req.url.split('/');
            const model = `${owner}:${modelName}`;

            const {
                messages,
                temperature,
                max_tokens,
                presence_penalty,
                frequency_penalty,
                stream,
            } = body;

            if (stream) {
                res.setHeader('Content-Type', 'text/event-stream');
            } else {
                res.setHeader('Content-Type', 'application/json');
            }

            const id = `chatcmpl-${(Math.random().toString(36).slice(2))}`;
            const created = Math.floor(Date.now() / 1000);

            if (stream) {
                const data = JSON.stringify({
                    id, created,
                    object: 'chat.completion.chunk',
                    model: modelName,
                    choices: [{
                        delta: { role: 'assistant' },
                        finish_reason: null,
                        index: 0,
                    }],
                });
                res.write(`data: ${data}\n\n`);
            }

            const prompt = preparePrompt(messages);
            const result = await generate(prompt, {
                model,
                temperature,
                maxTokens: max_tokens,
                frequencyPenalty: frequency_penalty,
                presencePenalty: presence_penalty,
                onData: (line) => {
                    if (stream) {
                        const data = JSON.stringify({
                            id, created,
                            object: 'chat.completion.chunk',
                            model: modelName,
                            choices: [{
                                delta: { content: line },
                                finish_reason: null,
                                index: 0,
                            }]
                        });
                        res.write(`data: ${data}\n\n`);
                    }
                },
            });

            if (stream) {
                const data = JSON.stringify({
                    id, created,
                    object: 'chat.completion.chunk',
                    model: modelName,
                    choices: [{
                        delta: {},
                        finish_reason: 'stop',
                        index: 0,
                    }],
                });
                res.write(`data: ${data}\n\ndata: [DONE]\n\n`);
            } else {
                res.write(JSON.stringify({
                    id, created,
                    object: 'chat.completion',
                    model: modelName,
                    choices: [{
                        message: {
                            role: 'assistant',
                            content: result,
                        },
                        finish_reason: 'stop',
                        index: 0,
                    }]
                }));
            }

            res.end();
        } else {
            res.setHeader('Content-Type', 'application/json');
            res.write(JSON.stringify({
                object: 'list',
                data: [
                    { id: 'claude-v1', object: 'model', created: Date.now(), owned_by: 'anthropic', permission: [], root: 'claude-v1', parent: null },
                    { id: 'gpt-3.5-turbo', object: 'model', created: Date.now(), owned_by: 'openai', permission: [], root: 'gpt-3.5-turbo', parent: null },
                ]
            }));
        }
        res.end();
    });

    server.listen(5004, '0.0.0.0', () => {
        console.log(`proxy for claude-v1: 'http://127.0.0.1:5004/anthropic/claude-v1'`);
    });
}

main().catch(console.error);
Edit Report
Pub: 19 Apr 2023 13:31 UTC
Edit: 22 Apr 2023 10:26 UTC
Views: 10490