Jones0 commited on
Commit
dae02cf
·
verified ·
1 Parent(s): f69e06f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +478 -0
app.py ADDED
@@ -0,0 +1,478 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify, Response
2
+ import cloudscraper # 替换requests库,专门用于绕过Cloudflare保护
3
+ import io
4
+ import json
5
+ import re
6
+ import uuid
7
+ import random
8
+ import time
9
+ import os
10
+
11
+ app = Flask(__name__)
12
+
13
+ TARGET_URL = "https://grok.com/rest/app-chat/conversations/new"
14
+ MODELS = ["grok-2", "grok-3", "grok-3-thinking"]
15
+ COOKIE_NUM = 0
16
+ COOKIE_LIST = []
17
+ LAST_COOKIE_INDEX = {}
18
+ TEMPORARY_MODE = False
19
+
20
+ USER_AGENTS = [
21
+ # Windows - Chrome
22
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
23
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
24
+ # Windows - Firefox
25
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:132.0) Gecko/20100101 Firefox/132.0",
26
+ # Windows - Edge
27
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 Edg/123.0.2420.81",
28
+ # Windows - Opera
29
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 OPR/109.0.0.0",
30
+ # macOS - Chrome
31
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
32
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
33
+ # macOS - Safari
34
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.0.1 Safari/605.1.15",
35
+ # macOS - Firefox
36
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 14.7; rv:132.0) Gecko/20100101 Firefox/132.0",
37
+ # macOS - Opera
38
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 14_4_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36 OPR/109.0.0.0",
39
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 14.4; rv:124.0) Gecko/20100101 Firefox/124.0",
40
+ # Linux - Chrome
41
+ "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
42
+ # Linux - Firefox
43
+ "Mozilla/5.0 (X11; Linux i686; rv:124.0) Gecko/20100101 Firefox/124.0",
44
+ ]
45
+
46
+ def create_cf_scraper(cookie_string):
47
+ """创建一个配置好的cloudscraper实例"""
48
+ # 只使用cloudscraper支持的浏览器
49
+ browser = random.choice(['chrome', 'firefox']) # 修复:移除不支持的'edge'
50
+ platform = random.choice(['windows', 'darwin', 'linux'])
51
+
52
+ # 创建cloudscraper会话
53
+ scraper = cloudscraper.create_scraper(
54
+ browser={
55
+ 'browser': browser,
56
+ 'platform': platform,
57
+ 'desktop': True
58
+ },
59
+ delay=random.uniform(5, 10), # 等待Cloudflare检查
60
+ interpreter='js2py', # 使用js2py解释JavaScript挑战
61
+ )
62
+
63
+ # 设置自定义用户代理
64
+ selected_ua = random.choice(USER_AGENTS)
65
+
66
+ # 设置基本头信息
67
+ scraper.headers.update({
68
+ "user-agent": selected_ua,
69
+ "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
70
+ "accept-language": "en-US,en;q=0.5",
71
+ "accept-encoding": "gzip, deflate, br",
72
+ "dnt": "1",
73
+ "sec-fetch-dest": "document",
74
+ "sec-fetch-mode": "navigate",
75
+ "sec-fetch-site": "none",
76
+ "sec-fetch-user": "?1",
77
+ "upgrade-insecure-requests": "1",
78
+ "cookie": cookie_string
79
+ })
80
+
81
+ return scraper
82
+
83
+ def resolve_config():
84
+ global COOKIE_NUM, COOKIE_LIST, LAST_COOKIE_INDEX, TEMPORARY_MODE
85
+ COOKIE_LIST = []
86
+ cookie_index = 1
87
+
88
+ while True:
89
+ cookie_env_name = f"GROK_COOKIE_{cookie_index}"
90
+ cookie_string = os.environ.get(cookie_env_name)
91
+ if cookie_string:
92
+ try:
93
+ print(f"创建Cookie {cookie_index} 的CloudScraper实例...")
94
+ scraper = create_cf_scraper(cookie_string)
95
+ COOKIE_LIST.append(scraper)
96
+ cookie_index += 1
97
+ except Exception as e:
98
+ print(f"为Cookie {cookie_index} 创建CloudScraper失败: {e}")
99
+ cookie_index += 1
100
+ else:
101
+ break
102
+
103
+ COOKIE_NUM = len(COOKIE_LIST)
104
+ if COOKIE_NUM == 0:
105
+ raise ValueError("未提供Grok cookies,请通过环境变量设置 (GROK_COOKIE_1, GROK_COOKIE_2, ...)")
106
+
107
+ temporary_mode_str = os.environ.get("GROK_TEMPORARY_MODE", "false").lower()
108
+ TEMPORARY_MODE = temporary_mode_str == "true" or temporary_mode_str == "1"
109
+
110
+ LAST_COOKIE_INDEX = {model: 0 for model in MODELS}
111
+
112
+ print(f"已从环境变量加载 {COOKIE_NUM} 个Grok cookies。")
113
+ print(f"临时模式: {TEMPORARY_MODE}")
114
+
115
+
116
+ @app.route("/", methods=["GET"])
117
+ def root():
118
+ return "Grok Proxy is running (Cloudflare Protected)", 200, {'Content-Type': 'text/plain'}
119
+
120
+
121
+ @app.route("/health", methods=["GET"])
122
+ def health_check():
123
+ return "OK", 200, {'Content-Type': 'text/plain'}
124
+
125
+
126
+ @app.route("/v1/models", methods=["GET"])
127
+ def get_models():
128
+ model_list = []
129
+ for model in MODELS:
130
+ model_list.append(
131
+ {
132
+ "id": model,
133
+ "object": "model",
134
+ "created": int(time.time()),
135
+ "owned_by": "Elbert",
136
+ "name": model,
137
+ }
138
+ )
139
+ return jsonify({"object": "list", "data": model_list})
140
+
141
+
142
+ @app.route("/v1/chat/completions", methods=["POST"])
143
+ def chat_completions():
144
+ print("Received request")
145
+ openai_request = request.get_json()
146
+ print(openai_request)
147
+ stream = openai_request.get("stream", False)
148
+ messages = openai_request.get("messages")
149
+ model = openai_request.get("model")
150
+ if model not in MODELS:
151
+ return jsonify({"error": "Model not available"}), 500
152
+ if messages is None:
153
+ return jsonify({"error": "Messages is required"}), 400
154
+ disable_search, force_concise, messages = magic(messages)
155
+ message = format_message(messages)
156
+ is_reasoning = len(model) > 6
157
+ model = model[0:6]
158
+ return (
159
+ send_message(message, model, disable_search, force_concise, is_reasoning)
160
+ if stream
161
+ else send_message_non_stream(
162
+ message, model, disable_search, force_concise, is_reasoning)
163
+ )
164
+
165
+
166
+ def get_next_account(model):
167
+ current = (LAST_COOKIE_INDEX[model] + 1) % COOKIE_NUM
168
+ LAST_COOKIE_INDEX[model] = current
169
+ print(f"Using account {current+1}/{COOKIE_NUM} for {model}")
170
+ return COOKIE_LIST[current]
171
+
172
+
173
+ def send_message(message, model, disable_search, force_concise, is_reasoning):
174
+ headers = {
175
+ "authority": "grok.com",
176
+ "accept": "*/*",
177
+ "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
178
+ "cache-control": "no-cache",
179
+ "content-type": "application/json",
180
+ "origin": "https://grok.com",
181
+ "pragma": "no-cache",
182
+ "referer": "https://grok.com/",
183
+ "sec-ch-ua": '"Not(A:Brand";v="99", "Google Chrome";v="133", "Chromium";v="133"',
184
+ "sec-ch-ua-mobile": "?0",
185
+ "sec-ch-ua-platform": '"Windows"',
186
+ "sec-fetch-dest": "empty",
187
+ "sec-fetch-mode": "cors",
188
+ "sec-fetch-site": "same-origin",
189
+ }
190
+ payload = {
191
+ "temporary": TEMPORARY_MODE,
192
+ "modelName": "grok-3",
193
+ "message": message,
194
+ "fileAttachments": [],
195
+ "imageAttachments": [],
196
+ "disableSearch": disable_search,
197
+ "enableImageGeneration": False,
198
+ "returnImageBytes": False,
199
+ "returnRawGrokInXaiRequest": False,
200
+ "enableImageStreaming": True,
201
+ "imageGenerationCount": 2,
202
+ "forceConcise": force_concise,
203
+ "toolOverrides": {},
204
+ "enableSideBySide": True,
205
+ "isPreset": False,
206
+ "sendFinalMetadata": True,
207
+ "customInstructions": "",
208
+ "deepsearchPreset": "",
209
+ "isReasoning": is_reasoning,
210
+ }
211
+
212
+ try:
213
+ scraper = get_next_account(model)
214
+
215
+ # 预热Cloudflare
216
+ print("预热Cloudflare权限...")
217
+ scraper.get("https://grok.com/", timeout=15)
218
+ time.sleep(random.uniform(1.0, 2.0)) # 随机等待以模拟人类
219
+
220
+ print("发送消息请求...")
221
+ response = scraper.post(TARGET_URL, headers=headers, json=payload, stream=True)
222
+ response.raise_for_status()
223
+
224
+ def generate():
225
+ try:
226
+ print("---------- Response ----------")
227
+ cnt = 2
228
+ thinking = 2
229
+ for line in response.iter_lines():
230
+ if line:
231
+ if cnt != 0:
232
+ cnt -= 1
233
+ else:
234
+ decoded_line = line.decode("utf-8")
235
+ data = json.loads(decoded_line)
236
+ token = data["result"]["response"]["token"]
237
+ content = ""
238
+ if is_reasoning:
239
+ if thinking == 2:
240
+ thinking = 1
241
+ content = f"<Thinking>\n{token}"
242
+ print(f"{content}", end="")
243
+ elif thinking & (
244
+ not data["result"]["response"]["isThinking"]
245
+ ):
246
+ thinking = 0
247
+ content = f"\n</Thinking>\n{token}"
248
+ print(f"{content}", end="")
249
+ else:
250
+ content = token
251
+ print(content, end="")
252
+ else:
253
+ content = token
254
+ print(content, end="")
255
+ openai_chunk = {
256
+ "id": "chatcmpl-" + str(uuid.uuid4()),
257
+ "object": "chat.completion.chunk",
258
+ "created": int(time.time()),
259
+ "model": model,
260
+ "choices": [
261
+ {
262
+ "index": 0,
263
+ "delta": {"content": content},
264
+ "finish_reason": None,
265
+ }
266
+ ],
267
+ }
268
+ yield f"data: {json.dumps(openai_chunk)}\n\n"
269
+ if data["result"]["response"]["isSoftStop"]:
270
+ openai_chunk = {
271
+ "id": "chatcmpl-" + str(uuid.uuid4()),
272
+ "object": "chat.completion.chunk",
273
+ "created": int(time.time()),
274
+ "model": model,
275
+ "choices": [
276
+ {
277
+ "index": 0,
278
+ "delta": {"content": content},
279
+ "finish_reason": "completed",
280
+ }
281
+ ],
282
+ }
283
+ yield f"data: {json.dumps(openai_chunk)}\n\n"
284
+ break
285
+ print("\n---------- Response End ----------")
286
+ yield f"data: [DONE]\n\n"
287
+ except Exception as e:
288
+ print(f"Failed to send message: {e}")
289
+ yield f'data: {{"error": "{e}"}}\n\n'
290
+
291
+ return Response(generate(), content_type="text/event-stream")
292
+ except Exception as e:
293
+ print(f"Failed to send message: {e}")
294
+ return jsonify({"error": f"Failed to send message: {e}"}), 500
295
+
296
+
297
+ def send_message_non_stream(
298
+ message, model, disable_search, force_concise, is_reasoning
299
+ ):
300
+ headers = {
301
+ "authority": "grok.com",
302
+ "accept": "*/*",
303
+ "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
304
+ "cache-control": "no-cache",
305
+ "content-type": "application/json",
306
+ "origin": "https://grok.com",
307
+ "pragma": "no-cache",
308
+ "referer": "https://grok.com/",
309
+ "sec-ch-ua": '"Not(A:Brand";v="99", "Google Chrome";v="133", "Chromium";v="133"',
310
+ "sec-ch-ua-mobile": "?0",
311
+ "sec-ch-ua-platform": '"Windows"',
312
+ "sec-fetch-dest": "empty",
313
+ "sec-fetch-mode": "cors",
314
+ "sec-fetch-site": "same-origin",
315
+ }
316
+ payload = {
317
+ "temporary": TEMPORARY_MODE,
318
+ "modelName": "grok-3",
319
+ "message": message,
320
+ "fileAttachments": [],
321
+ "imageAttachments": [],
322
+ "disableSearch": disable_search,
323
+ "enableImageGeneration": False,
324
+ "returnImageBytes": False,
325
+ "returnRawGrokInXaiRequest": False,
326
+ "enableImageStreaming": True,
327
+ "imageGenerationCount": 2,
328
+ "forceConcise": force_concise,
329
+ "toolOverrides": {},
330
+ "enableSideBySide": True,
331
+ "isPreset": False,
332
+ "sendFinalMetadata": True,
333
+ "customInstructions": "",
334
+ "deepsearchPreset": "",
335
+ "isReasoning": is_reasoning,
336
+ }
337
+
338
+ thinking = 2
339
+ try:
340
+ scraper = get_next_account(model)
341
+
342
+ # 预热Cloudflare
343
+ print("预热Cloudflare权限...")
344
+ scraper.get("https://grok.com/", timeout=15)
345
+ time.sleep(random.uniform(1.0, 2.0)) # 随机等待以模拟人类
346
+
347
+ print("发送非流式消息请求...")
348
+ response = scraper.post(TARGET_URL, headers=headers, json=payload, stream=True)
349
+ response.raise_for_status()
350
+ cnt = 2
351
+ try:
352
+ print("---------- Response ----------")
353
+ buffer = io.StringIO()
354
+ for line in response.iter_lines():
355
+ if line:
356
+ if cnt != 0:
357
+ cnt -= 1
358
+ else:
359
+ decoded_line = line.decode("utf-8")
360
+ data = json.loads(decoded_line)
361
+ token = data["result"]["response"]["token"]
362
+ content = ""
363
+ if is_reasoning:
364
+ if thinking == 2:
365
+ thinking = 1
366
+ content = f"<Thinking>\n{token}"
367
+ print(f"{content}", end="")
368
+ buffer.write(content)
369
+ elif thinking & (
370
+ not data["result"]["response"]["isThinking"]
371
+ ):
372
+ thinking = 0
373
+ content = f"\n</Thinking>\n{token}"
374
+ print(f"{content}", end="")
375
+ buffer.write(content)
376
+ else:
377
+ content = token
378
+ print(content, end="")
379
+ buffer.write(content)
380
+ else:
381
+ content = token
382
+ print(content, end="")
383
+ buffer.write(content)
384
+ if data["result"]["response"]["isSoftStop"]:
385
+ break
386
+ print("\n---------- Response End ----------")
387
+ openai_response = {
388
+ "id": "chatcmpl-" + str(uuid.uuid4()),
389
+ "object": "chat.completion",
390
+ "created": int(time.time()),
391
+ "model": model,
392
+ "choices": [
393
+ {
394
+ "index": 0,
395
+ "message": {"role": "assistant", "content": buffer.getvalue()},
396
+ "finish_reason": "completed",
397
+ }
398
+ ],
399
+ }
400
+ return jsonify(openai_response)
401
+ except Exception as e:
402
+ print(f"Failed to process response: {e}")
403
+ return jsonify({"error": f"Failed to process response: {e}"}), 500
404
+ except Exception as e:
405
+ print(f"Failed to send message: {e}")
406
+ return jsonify({"error": f"Failed to send message: {e}"}), 500
407
+
408
+
409
+ def format_message(messages):
410
+ buffer = io.StringIO()
411
+ role_map, prefix, messages = extract_role(messages)
412
+ for message in messages:
413
+ role = message.get("role")
414
+ role = "\b" + role_map[role] if prefix else role_map[role]
415
+ content = message.get("content").replace("\\n", "\n")
416
+ pattern = re.compile(r"<\|removeRole\|>\n")
417
+ if pattern.match(content):
418
+ content = pattern.sub("", content)
419
+ buffer.write(f"{content}\n")
420
+ else:
421
+ buffer.write(f"{role}: {content}\n")
422
+ return buffer.getvalue()
423
+
424
+
425
+ def extract_role(messages):
426
+ role_map = {"user": "Human", "assistant": "Assistant", "system": "System"}
427
+ prefix = False
428
+ first_message = messages[0]["content"]
429
+ pattern = re.compile(
430
+ r"""
431
+ <roleInfo>\s*
432
+ user:\s*(?P<user>[^\n]*)\s*
433
+ assistant:\s*(?P<assistant>[^\n]*)\s*
434
+ system:\s*(?P<system>[^\n]*)\s*
435
+ prefix:\s*(?P<prefix>[^\n]*)\s*
436
+ </roleInfo>\n
437
+ """,
438
+ re.VERBOSE,
439
+ )
440
+ match = pattern.search(first_message)
441
+ if match:
442
+ role_map = {
443
+ "user": match.group("user"),
444
+ "assistant": match.group("assistant"),
445
+ "system": match.group("system"),
446
+ }
447
+ prefix = match.group("prefix") == "1"
448
+ messages[0]["content"] = pattern.sub("", first_message)
449
+ print(f"Extracted role map:")
450
+ print(
451
+ f"User: {role_map['user']}, {role_map['assistant']}, System: {role_map['system']}"
452
+ )
453
+ print(f"Using prefix: {prefix}")
454
+ return (role_map, prefix, messages)
455
+
456
+
457
+ def magic(messages):
458
+ first_message = messages[0]["content"]
459
+ disable_search = False
460
+ if re.search(r"<\|disableSearch\|>", first_message):
461
+ disable_search = True
462
+ print("Disable search")
463
+ first_message = re.sub(r"<\|disableSearch\|>", "", first_message)
464
+ force_concise = False
465
+ if re.search(r"<\|forceConcise\|>", first_message):
466
+ force_concise = True
467
+ print("Force concise")
468
+ first_message = re.sub(r"<\|forceConcise\|>", "", first_message)
469
+ messages[0]["content"] = first_message
470
+ return (disable_search, force_concise, messages)
471
+
472
+
473
+ # 初始化配置
474
+ resolve_config()
475
+
476
+ if __name__ == "__main__":
477
+ app.run(host="0.0.0.0", port=7860)
478
+