Tst / app.py
Niansuh's picture
Update app.py
a725254 verified
raw
history blame
3.83 kB
import requests
from flask import Flask, request, jsonify
import logging
import json
# Initialize Flask app
app = Flask(__name__)
# Set up logging for debugging purposes
logging.basicConfig(level=logging.INFO)
# Define the endpoint to interact with the Amigo API
AMIGO_API_URL = "https://api.amigochat.io/v1/chat/completions"
headers = {
'accept': 'application/json, text/event-stream',
'accept-language': 'en-US,en;q=0.9,ru;q=0.8',
'content-type': 'application/json',
'origin': 'https://niansuh-01.hf.space',
'priority': 'u=1, i',
'referer': 'https://niansuh-01.hf.space/?logs=container',
'sec-ch-ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
}
@app.route('/v1/chat/completions', methods=['POST'])
def chat_completions():
try:
# Log incoming request data
data = request.get_json()
app.logger.info(f"Received request data: {json.dumps(data, indent=2)}")
# Prepare the payload for the Amigo API
payload = {
"messages": data['messages'],
"model": data.get('model', 'gpt-4o-mini'), # Default to 'gpt-4o-mini'
"personaId": "amigo",
"frequency_penalty": 0,
"max_tokens": 4000,
"presence_penalty": 0,
"stream": True,
"temperature": 0.5,
"top_p": 0.95
}
# Send the request to the Amigo API
response = requests.post(AMIGO_API_URL, headers=headers, json=payload)
# If the request is successful, return the response in OpenAI format
if response.status_code == 200:
amigo_response = response.json()
# Log the response from Amigo API
app.logger.info(f"Amigo API response: {json.dumps(amigo_response, indent=2)}")
# Transform the Amigo API response to OpenAI API response format
openai_response = {
"id": amigo_response.get('id', 'unknown'),
"object": "chat.completion",
"created": amigo_response.get('created', 0),
"model": amigo_response.get('model', 'gpt-4o-mini'),
"choices": [
{
"message": {
"role": amigo_response.get('messages', [{}])[0].get('role', 'user'),
"content": amigo_response.get('messages', [{}])[0].get('content', '')
},
"finish_reason": amigo_response.get('finish_reason', 'stop'),
"index": 0
}
],
"usage": amigo_response.get('usage', {})
}
return jsonify(openai_response)
else:
# Log the error if the response from Amigo API is not 200
app.logger.error(f"Amigo API failed with status {response.status_code}: {response.text}")
return jsonify({
'error': {
'message': f'Failed to fetch from Amigo API: {response.text}',
'type': 'internal_error'
}
}), 500
except Exception as e:
# Catch any other errors and log them
app.logger.error(f"Error in processing request: {str(e)}")
return jsonify({
'error': {
'message': f'An error occurred: {str(e)}',
'type': 'internal_error'
}
}), 500
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=5000)