Free Claude with limited context:
- Copy and paste the below code into a file called "vercel.js"
- Open CMD and navigate to your file and run it by using "node vercel.js" (LEAVE THIS UP AND RUNNING)
- It will give you a proxy link called: http://127.0.0.1:5004/anthropic/claude-v1/v1
- Plug it in how you would all the other reverse proxies.
- Make sure to set the max context size to 1024 or lower, otherwise it could get a little wonky.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 | const https = require('https');
const http = require('http');
const options = {
hostname: 'play.vercel.ai',
port: 443,
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
};
const readBody = (res, json) => new Promise((resolve, reject) => {
let buffer = '';
res.on('data', chunk => {
buffer += chunk;
});
res.on('end', () => {
try {
if (json) buffer = JSON.parse(buffer);
resolve(buffer);
} catch (e) {
console.error(buffer);
reject(e);
}
});
})
const request = (path, data) =>
new Promise((resolve, reject) => {
options.headers['User-Agent'] = `Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/${Math.floor(Math.random() * 10000000)} Firefox/${(Math.random() * 200).toFixed(2)}`;
const req = https.request({ ...options, path }, async (res) => {
try {
const body = await readBody(res);
resolve(body);
} catch (e) {
reject(e);
}
});
req.write(JSON.stringify(data));
req.end();
});
async function generate(text, { model, temperature, maxTokens, frequencyPenalty, presencePenalty }) {
console.log(model);
const response = await request('/api/generate', {
prompt: text,
model,
temperature,
maxTokens: Math.min(maxTokens, 500),
topP: 1,
frequencyPenalty,
presencePenalty,
stopSequences: model.startsWith('ahtropic:claude') ? ['\n\nHuman:'] : [],
});
const lines = response.split('\n').map(l => {
try {
return JSON.parse(l);
} catch (e) {
return l;
}
});
let currentLine = '';
for (const line of lines) {
if (currentLine && line.startsWith(currentLine)) {
currentLine = line;
} else {
currentLine += line;
}
}
return currentLine;
}
function preparePrompt(messages) {
return messages.filter(m => m.content?.trim()).map(m => {
let author = '';
switch (m.role) {
case 'user': author = 'Human'; break;
case 'assistant': author = 'Assistant'; break;
case 'system': author = 'System Note'; break;
default: author = m.role; break;
}
return `${author}: ${m.content.trim()}`;
}).join('\n') + `\nAssistant: `;
}
async function main() {
const server = http.createServer(async (req, res) => {
res.setHeader('Content-Type', 'application/json');
if (req.method.toUpperCase() === 'POST') {
const body = await readBody(req, true);
const [, owner, modelName] = req.url.split('/');
const model = `${owner}:${modelName}`;
const {
messages,
temperature,
max_tokens,
presence_penalty,
frequency_penalty,
} = body;
const prompt = preparePrompt(messages);
const result = await generate(prompt, {
model,
temperature,
maxTokens: max_tokens,
frequencyPenalty: frequency_penalty,
presencePenalty: presence_penalty,
});
res.write(JSON.stringify({
id: `chatcmpl-${(Math.random().toString(36).slice(2))}`,
object: 'chat.completion',
model: modelName,
choices: [{
message: {
role: 'assistant',
content: result,
},
finish_reason: 'stop',
index: 0,
}]
}));
} else {
res.write(JSON.stringify({
object: 'list',
data: [
{ id: 'gpt-4', object: 'model', created: Date.now(), owned_by: 'openai', permission: [], root: 'gpt-4', parent: null },
{ id: 'claude-v1', object: 'model', created: Date.now(), owned_by: 'anthropic', permission: [], root: 'claude-v1', parent: null },
]
}));
}
res.end();
});
server.listen(5004, '0.0.0.0', () => {
console.log(`proxy for claude-v1: 'http://127.0.0.1:5004/anthropic/claude-v1/v1'`);
console.log(`proxy for gpt-4: 'http://127.0.0.1:5004/openai/gpt-4/v1'`);
});
}
main().catch(console.error);
|