import requests, json, uuid, time
from flask import Flask, request, Response, stream_with_context

app = Flask(__name__)
state = uuid.uuid4()
print(f"Go to this link and make an account:\n\nhttps://aide.dev/authenticate?state={state}\n\nScript will run when done automatically.")

while True:
    result = requests.get(f"https://api.codestory.ai/v1/auth/editor/status?state={state}")
    if result.ok:
        status_data = result.json()
        if status_data.get("access_token"):
            key = status_data["access_token"]
            break
    time.sleep(1)

def chat_request(messages, temp, system):
    if not system:
        system = [{"type": "text", "text": "You are a helpful assistant that follows all user instructions."}]
    payload = {
        "model": "deepseek/deepseek-r1",
        "temperature": temp,
        "stream": True,
        "messages": [
            {"role": "system", "content": system},
            *messages
        ]
    }
    resp = requests.post("https://codestory-provider-dot-anton-390822.ue.r.appspot.com/openrouter-api",
                         headers={"authorization": f"Bearer {key}", "content-type": "application/json"},
                         json=payload, stream=True)
    return resp if resp.ok else None

@app.route("/messages", methods=["POST"])
@app.route("/chat/completions", methods=["POST"])  # Added route for OpenAI-style compatibility
def handle_chat():
    data = request.json
    streaming = data.get("stream", True)
    result = chat_request(
        messages=data.get("messages"),
        temp=data.get("temperature"),
        system=data.get("system")
    )

    if not result:
        return {"error": "Request failed"}

    if streaming:
        def generate():
            for l in result.iter_lines():
                if not l:
                    continue
                try:
                    d = json.loads(l.decode('utf-8').replace('data: ', ''))
                    if 'choices' in d and len(d['choices']) > 0:
                        chunk = d['choices'][0].get('delta', {}).get('content', '')
                        if chunk:
                            yield f"data: {json.dumps({'choices': [{'delta': {'content': chunk}}]})}\n\n"
                    if d.get('choices', [{}])[0].get('finish_reason') is not None:
                        yield f"data: {json.dumps({'choices': [{'finish_reason': 'stop'}]})}\n\n"
                        break
                except:
                    continue

        return Response(stream_with_context(generate()), content_type='text/event-stream', headers={'Cache-Control': 'no-cache', 'Connection': 'keep-alive'})
    else:
        txt = ""
        for l in result.iter_lines():
            if not l:
                continue
            try:
                d = json.loads(l.decode('utf-8').replace('data: ', ''))
                if 'choices' in d and len(d['choices']) > 0:
                    chunk = d['choices'][0].get('delta', {}).get('content', '')
                    if chunk:
                        txt += chunk
                if d.get('choices', [{}])[0].get('finish_reason') is not None:
                    break
            except:
                continue
        return {"type": "message", "content": [{"type": "text", "text": txt}]}

@app.route("/models", methods=["GET"])
def list_models():
    return {"object": "list", "data": [{"id": "deepseek/deepseek-r1", "object": "model"}]}

if __name__ == "__main__":
    app.run()
Edit Report
Pub: 30 Jan 2025 22:59 UTC
Views: 1094