huggingfacecospacesYOURNAMEapp commited on
Commit
dbcc687
ยท
verified ยท
1 Parent(s): a09e7ab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -13
app.py CHANGED
@@ -3,34 +3,40 @@ import requests
3
  import time
4
 
5
  def monster_chat(message, history):
6
- # ืžื•ื“ืœ Llama-3 ื”ื™ืฆื™ื‘
7
- API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
 
 
 
8
 
9
  payload = {
10
- "inputs": f"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a funny AI. Answer in Hebrew.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\n{message}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
11
- "parameters": {"max_new_tokens": 150, "temperature": 0.7}
12
  }
13
 
14
  try:
15
- response = requests.post(API_URL, json=payload, timeout=15)
16
 
17
- # ืื ื”ืžื•ื“ืœ ื‘ื˜ืขื™ื ื” (503), ื ื—ื›ื” ืงืฆืช
18
  if response.status_code == 503:
19
  time.sleep(5)
20
- response = requests.post(API_URL, json=payload, timeout=15)
21
 
22
  result = response.json()
23
 
 
24
  if isinstance(result, list) and len(result) > 0:
25
- return result[0].get('generated_text', "ื”ืžืคืœืฆืช ืื™ื‘ื“ื” ืืช ื”ืงื•ืœ!")
26
- return "ื”ืžืคืœืฆืช ื—ื•ืฉื‘ืช... ื ืกื” ืœืฉืœื•ื— ืฉื•ื‘."
 
 
 
27
  except:
28
- return "ืชืงืœื” ื‘ื—ื™ื‘ื•ืจ ืœืžืคืœืฆืช."
29
 
30
- # ื™ืฆื™ืจืช ื”ืžืžืฉืง ื‘ืฆื•ืจื” ื ืงื™ื™ื” ื‘ืœื™ ืคืจืžื˜ืจื™ื ืฉื‘ื•ืจื™ื
31
  with gr.Blocks() as demo:
32
- gr.Markdown("# ๐Ÿ‘พ MONSTER CHAT")
33
  gr.ChatInterface(fn=monster_chat)
34
 
35
- # ื”ืขื‘ืจื ื• ืืช ื”-theme ืœืคื”, ื‘ื“ื™ื•ืง ื›ืžื• ืฉื”ืฉื’ื™ืื” ื‘ื™ืงืฉื”
36
  demo.launch()
 
3
  import time
4
 
5
  def monster_chat(message, history):
6
+ # ื”ื›ืชื•ื‘ืช ื”ืžื“ื•ื™ืงืช ืฉืœ ื”ืžื•ื“ืœ ืžื”ืฆื™ืœื•ื ืžืกืš ืฉืœืš
7
+ API_URL = "https://api-inference.huggingface.co/models/Jackrong/Qwen3.5-27B-Claude-4.6-Opus-Reasoning-Distilled"
8
+
9
+ # ื”ื’ื“ืจืช ื”ื•ื“ืขืช ื”ืžืขืจื›ืช ื›ื“ื™ ืฉื™ื“ื‘ืจ ืขื‘ืจื™ืช
10
+ prompt = f"<|im_start|>system\nYou are a funny and smart AI monster. Answer in Hebrew.<|im_end|>\n<|im_start|>user\n{message}<|im_end|>\n<|im_start|>assistant\n"
11
 
12
  payload = {
13
+ "inputs": prompt,
14
+ "parameters": {"max_new_tokens": 512, "temperature": 0.7}
15
  }
16
 
17
  try:
18
+ response = requests.post(API_URL, json=payload, timeout=20)
19
 
20
+ # ื˜ื™ืคื•ืœ ื‘ืžืฆื‘ ืฉื”ืžื•ื“ืœ ื ื˜ืขืŸ (ืฉื’ื™ืื” 503)
21
  if response.status_code == 503:
22
  time.sleep(5)
23
+ response = requests.post(API_URL, json=payload, timeout=20)
24
 
25
  result = response.json()
26
 
27
+ # ื—ื™ืœื•ืฅ ื”ืชืฉื•ื‘ื” ืžื”ืคื•ืจืžื˜ ืฉืœ Qwen
28
  if isinstance(result, list) and len(result) > 0:
29
+ text = result[0].get('generated_text', "")
30
+ # ื ื™ืงื•ื™ ื”ื˜ืงืกื˜ ื›ื“ื™ ืœื”ืฉืื™ืจ ืจืง ืืช ื”ืชืฉื•ื‘ื” ืฉืœ ื”-AI
31
+ answer = text.split("<|im_start|>assistant\n")[-1].replace("<|im_end|>", "").strip()
32
+ return answer
33
+ return "ื”ืžืคืœืฆืช ืžื”ืชืžื•ื ื” ื—ื•ืฉื‘ืช... ื ืกื” ืœืฉืœื•ื— ืฉื•ื‘!"
34
  except:
35
+ return "ื™ืฉ ืขื•ืžืก ืขืœ ื”ืžื•ื“ืœ ื”ืžืคืœืฆืชื™ ื”ื–ื”, ื ืกื” ืฉื•ื‘ ื‘ืขื•ื“ ืจื’ืข."
36
 
37
+ # ืžืžืฉืง ื”ืฆ'ืื˜
38
  with gr.Blocks() as demo:
39
+ gr.Markdown(f"# ๐Ÿ‘พ {next(iter(['Qwen 3.5 Monster Chat']))}")
40
  gr.ChatInterface(fn=monster_chat)
41
 
 
42
  demo.launch()