Update app.py
Browse files
app.py
CHANGED
|
@@ -22,7 +22,7 @@ class Config:
|
|
| 22 |
HISTORY_FILE = "chat_history.csv"
|
| 23 |
CONFIG_FILE = "config.json"
|
| 24 |
DEFAULT_SETTINGS = {
|
| 25 |
-
"language": "ar",
|
| 26 |
"theme": THEME,
|
| 27 |
"max_tokens": MAX_TOKENS,
|
| 28 |
"temperature": TEMPERATURE,
|
|
@@ -47,9 +47,11 @@ class Config:
|
|
| 47 |
def load_model():
|
| 48 |
"""Load the model and tokenizer with GPU support if available"""
|
| 49 |
try:
|
| 50 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
|
|
|
|
|
|
|
|
|
| 51 |
|
| 52 |
-
# Device configuration
|
| 53 |
if torch.cuda.is_available():
|
| 54 |
device = "cuda"
|
| 55 |
torch_dtype = torch.float16
|
|
@@ -62,7 +64,8 @@ def load_model():
|
|
| 62 |
model = AutoModelForCausalLM.from_pretrained(
|
| 63 |
Config.MODEL_ID,
|
| 64 |
torch_dtype=torch_dtype,
|
| 65 |
-
device_map="auto" if device == "cuda" else None
|
|
|
|
| 66 |
)
|
| 67 |
|
| 68 |
if device == "cpu":
|
|
@@ -159,7 +162,7 @@ You are Edraky (إدراكي), a smart, multilingual AI assistant built to suppo
|
|
| 159 |
|
| 160 |
🎓 Key Principles:
|
| 161 |
|
| 162 |
-
1. Use simplified language that students of all levels can understand—don
|
| 163 |
2. Always encourage curiosity, even if a question is simple or incorrect.
|
| 164 |
3. Teach in both Arabic and English, depending on the user's input. If the user speaks Arabic, prefer using Modern Standard Arabic unless otherwise specified.
|
| 165 |
4. Be culturally aware of Egypt and the Arab world. Use relevant examples when explaining.
|
|
@@ -198,12 +201,9 @@ You are Edraky (إدراكي), a smart, multilingual AI assistant built to suppo
|
|
| 198 |
pad_token_id=tokenizer.eos_token_id
|
| 199 |
)
|
| 200 |
|
| 201 |
-
# The output structure may depend on the pipeline/model; adjust as needed
|
| 202 |
-
# Here, we assume outputs[0]['generated_text'][-1]['content'] is correct
|
| 203 |
try:
|
| 204 |
response = outputs[0]['generated_text'][-1]['content']
|
| 205 |
except Exception:
|
| 206 |
-
# Fallback for different output structure
|
| 207 |
response = outputs[0]['generated_text'] if 'generated_text' in outputs[0] else str(outputs)
|
| 208 |
log_conversation(prompt, response, language)
|
| 209 |
return response
|
|
@@ -232,12 +232,10 @@ def analyze_history() -> Optional[plt.Figure]:
|
|
| 232 |
|
| 233 |
plt.figure(figsize=(12, 8))
|
| 234 |
|
| 235 |
-
# Create subplots
|
| 236 |
ax1 = plt.subplot2grid((2, 2), (0, 0))
|
| 237 |
ax2 = plt.subplot2grid((2, 2), (0, 1))
|
| 238 |
ax3 = plt.subplot2grid((2, 2), (1, 0), colspan=2)
|
| 239 |
|
| 240 |
-
# Plot 1: Daily conversation count
|
| 241 |
df['date'] = df['timestamp'].dt.date
|
| 242 |
daily_counts = df.groupby('date').size()
|
| 243 |
daily_counts.plot(kind='bar', ax=ax1, color='#4e79a7')
|
|
@@ -245,7 +243,6 @@ def analyze_history() -> Optional[plt.Figure]:
|
|
| 245 |
ax1.set_xlabel("Date")
|
| 246 |
ax1.set_ylabel("Count")
|
| 247 |
|
| 248 |
-
# Plot 2: Language distribution
|
| 249 |
lang_dist = df['language'].value_counts()
|
| 250 |
lang_dist.plot(kind='pie', ax=ax2, autopct='%1.1f%%',
|
| 251 |
colors=['#f28e2b', '#e15759'],
|
|
@@ -253,7 +250,6 @@ def analyze_history() -> Optional[plt.Figure]:
|
|
| 253 |
ax2.set_title("Language Distribution")
|
| 254 |
ax2.set_ylabel("")
|
| 255 |
|
| 256 |
-
# Plot 3: Hourly activity
|
| 257 |
df['hour'] = df['timestamp'].dt.hour
|
| 258 |
hourly = df.groupby('hour').size()
|
| 259 |
sns.lineplot(x=hourly.index, y=hourly.values, ax=ax3, color='#59a14f')
|
|
@@ -389,7 +385,6 @@ def create_chat_tab(settings: dict):
|
|
| 389 |
lines=10
|
| 390 |
)
|
| 391 |
|
| 392 |
-
# Event handlers
|
| 393 |
submit_btn.click(
|
| 394 |
fn=lambda p, l, m, t, tp: generate_response(
|
| 395 |
p,
|
|
@@ -415,7 +410,6 @@ def create_history_tab():
|
|
| 415 |
clear_btn = gr.Button("🗑️ مسح السجل / Clear History", variant="stop")
|
| 416 |
export_btn = gr.Button("📤 تصدير / Export")
|
| 417 |
|
| 418 |
-
# Event handlers
|
| 419 |
refresh_btn.click(fn=show_history, outputs=history_table)
|
| 420 |
clear_btn.click(fn=clear_history, outputs=history_table)
|
| 421 |
export_btn.click(
|
|
@@ -434,7 +428,6 @@ def create_analytics_tab():
|
|
| 434 |
with gr.Row():
|
| 435 |
refresh_analytics = gr.Button("🔄 تحديث التحليلات / Refresh Analytics")
|
| 436 |
|
| 437 |
-
# Event handlers
|
| 438 |
refresh_analytics.click(fn=analyze_history, outputs=analytics_plot)
|
| 439 |
|
| 440 |
def create_settings_tab(settings: dict):
|
|
@@ -476,7 +469,6 @@ def create_settings_tab(settings: dict):
|
|
| 476 |
save_btn = gr.Button("💾 حفظ الإعدادات / Save Settings", variant="primary")
|
| 477 |
status = gr.Textbox(label="الحالة / Status", interactive=False)
|
| 478 |
|
| 479 |
-
# Event handler
|
| 480 |
save_btn.click(
|
| 481 |
fn=lambda t, l, m, temp, tp: save_settings(t, l, m, temp, tp, status),
|
| 482 |
inputs=[theme, default_lang, default_max_tokens, default_temp, default_top_p],
|
|
@@ -503,28 +495,21 @@ def create_interface():
|
|
| 503 |
current_settings = Config.load_settings()
|
| 504 |
|
| 505 |
with gr.Blocks(theme=current_settings["theme"], css=get_css()) as app:
|
| 506 |
-
# Header section
|
| 507 |
create_header()
|
| 508 |
|
| 509 |
-
# Main tabs
|
| 510 |
with gr.Tabs():
|
| 511 |
-
# Chat tab
|
| 512 |
with gr.Tab("💬 Chat", id="chat"):
|
| 513 |
create_chat_tab(current_settings)
|
| 514 |
|
| 515 |
-
# History tab
|
| 516 |
with gr.Tab("📜 History", id="history"):
|
| 517 |
create_history_tab()
|
| 518 |
|
| 519 |
-
# Analytics tab
|
| 520 |
with gr.Tab("📊 Analytics", id="analytics"):
|
| 521 |
create_analytics_tab()
|
| 522 |
|
| 523 |
-
# Settings tab
|
| 524 |
with gr.Tab("⚙️ Settings", id="settings"):
|
| 525 |
create_settings_tab(current_settings)
|
| 526 |
|
| 527 |
-
# Footer
|
| 528 |
create_footer()
|
| 529 |
|
| 530 |
return app
|
|
@@ -533,10 +518,8 @@ def create_interface():
|
|
| 533 |
# MAIN EXECUTION
|
| 534 |
# ======================
|
| 535 |
if __name__ == "__main__":
|
| 536 |
-
# Ensure necessary directories exist
|
| 537 |
Path("logs").mkdir(exist_ok=True)
|
| 538 |
|
| 539 |
-
# Create and launch interface
|
| 540 |
app = create_interface()
|
| 541 |
app.launch(
|
| 542 |
server_name="0.0.0.0",
|
|
|
|
| 22 |
HISTORY_FILE = "chat_history.csv"
|
| 23 |
CONFIG_FILE = "config.json"
|
| 24 |
DEFAULT_SETTINGS = {
|
| 25 |
+
"language": "ar",
|
| 26 |
"theme": THEME,
|
| 27 |
"max_tokens": MAX_TOKENS,
|
| 28 |
"temperature": TEMPERATURE,
|
|
|
|
| 47 |
def load_model():
|
| 48 |
"""Load the model and tokenizer with GPU support if available"""
|
| 49 |
try:
|
| 50 |
+
tokenizer = AutoTokenizer.from_pretrained(
|
| 51 |
+
Config.MODEL_ID,
|
| 52 |
+
trust_remote_code=True
|
| 53 |
+
)
|
| 54 |
|
|
|
|
| 55 |
if torch.cuda.is_available():
|
| 56 |
device = "cuda"
|
| 57 |
torch_dtype = torch.float16
|
|
|
|
| 64 |
model = AutoModelForCausalLM.from_pretrained(
|
| 65 |
Config.MODEL_ID,
|
| 66 |
torch_dtype=torch_dtype,
|
| 67 |
+
device_map="auto" if device == "cuda" else None,
|
| 68 |
+
trust_remote_code=True
|
| 69 |
)
|
| 70 |
|
| 71 |
if device == "cpu":
|
|
|
|
| 162 |
|
| 163 |
🎓 Key Principles:
|
| 164 |
|
| 165 |
+
1. Use simplified language that students of all levels can understand—don't overcomplicate.
|
| 166 |
2. Always encourage curiosity, even if a question is simple or incorrect.
|
| 167 |
3. Teach in both Arabic and English, depending on the user's input. If the user speaks Arabic, prefer using Modern Standard Arabic unless otherwise specified.
|
| 168 |
4. Be culturally aware of Egypt and the Arab world. Use relevant examples when explaining.
|
|
|
|
| 201 |
pad_token_id=tokenizer.eos_token_id
|
| 202 |
)
|
| 203 |
|
|
|
|
|
|
|
| 204 |
try:
|
| 205 |
response = outputs[0]['generated_text'][-1]['content']
|
| 206 |
except Exception:
|
|
|
|
| 207 |
response = outputs[0]['generated_text'] if 'generated_text' in outputs[0] else str(outputs)
|
| 208 |
log_conversation(prompt, response, language)
|
| 209 |
return response
|
|
|
|
| 232 |
|
| 233 |
plt.figure(figsize=(12, 8))
|
| 234 |
|
|
|
|
| 235 |
ax1 = plt.subplot2grid((2, 2), (0, 0))
|
| 236 |
ax2 = plt.subplot2grid((2, 2), (0, 1))
|
| 237 |
ax3 = plt.subplot2grid((2, 2), (1, 0), colspan=2)
|
| 238 |
|
|
|
|
| 239 |
df['date'] = df['timestamp'].dt.date
|
| 240 |
daily_counts = df.groupby('date').size()
|
| 241 |
daily_counts.plot(kind='bar', ax=ax1, color='#4e79a7')
|
|
|
|
| 243 |
ax1.set_xlabel("Date")
|
| 244 |
ax1.set_ylabel("Count")
|
| 245 |
|
|
|
|
| 246 |
lang_dist = df['language'].value_counts()
|
| 247 |
lang_dist.plot(kind='pie', ax=ax2, autopct='%1.1f%%',
|
| 248 |
colors=['#f28e2b', '#e15759'],
|
|
|
|
| 250 |
ax2.set_title("Language Distribution")
|
| 251 |
ax2.set_ylabel("")
|
| 252 |
|
|
|
|
| 253 |
df['hour'] = df['timestamp'].dt.hour
|
| 254 |
hourly = df.groupby('hour').size()
|
| 255 |
sns.lineplot(x=hourly.index, y=hourly.values, ax=ax3, color='#59a14f')
|
|
|
|
| 385 |
lines=10
|
| 386 |
)
|
| 387 |
|
|
|
|
| 388 |
submit_btn.click(
|
| 389 |
fn=lambda p, l, m, t, tp: generate_response(
|
| 390 |
p,
|
|
|
|
| 410 |
clear_btn = gr.Button("🗑️ مسح السجل / Clear History", variant="stop")
|
| 411 |
export_btn = gr.Button("📤 تصدير / Export")
|
| 412 |
|
|
|
|
| 413 |
refresh_btn.click(fn=show_history, outputs=history_table)
|
| 414 |
clear_btn.click(fn=clear_history, outputs=history_table)
|
| 415 |
export_btn.click(
|
|
|
|
| 428 |
with gr.Row():
|
| 429 |
refresh_analytics = gr.Button("🔄 تحديث التحليلات / Refresh Analytics")
|
| 430 |
|
|
|
|
| 431 |
refresh_analytics.click(fn=analyze_history, outputs=analytics_plot)
|
| 432 |
|
| 433 |
def create_settings_tab(settings: dict):
|
|
|
|
| 469 |
save_btn = gr.Button("💾 حفظ الإعدادات / Save Settings", variant="primary")
|
| 470 |
status = gr.Textbox(label="الحالة / Status", interactive=False)
|
| 471 |
|
|
|
|
| 472 |
save_btn.click(
|
| 473 |
fn=lambda t, l, m, temp, tp: save_settings(t, l, m, temp, tp, status),
|
| 474 |
inputs=[theme, default_lang, default_max_tokens, default_temp, default_top_p],
|
|
|
|
| 495 |
current_settings = Config.load_settings()
|
| 496 |
|
| 497 |
with gr.Blocks(theme=current_settings["theme"], css=get_css()) as app:
|
|
|
|
| 498 |
create_header()
|
| 499 |
|
|
|
|
| 500 |
with gr.Tabs():
|
|
|
|
| 501 |
with gr.Tab("💬 Chat", id="chat"):
|
| 502 |
create_chat_tab(current_settings)
|
| 503 |
|
|
|
|
| 504 |
with gr.Tab("📜 History", id="history"):
|
| 505 |
create_history_tab()
|
| 506 |
|
|
|
|
| 507 |
with gr.Tab("📊 Analytics", id="analytics"):
|
| 508 |
create_analytics_tab()
|
| 509 |
|
|
|
|
| 510 |
with gr.Tab("⚙️ Settings", id="settings"):
|
| 511 |
create_settings_tab(current_settings)
|
| 512 |
|
|
|
|
| 513 |
create_footer()
|
| 514 |
|
| 515 |
return app
|
|
|
|
| 518 |
# MAIN EXECUTION
|
| 519 |
# ======================
|
| 520 |
if __name__ == "__main__":
|
|
|
|
| 521 |
Path("logs").mkdir(exist_ok=True)
|
| 522 |
|
|
|
|
| 523 |
app = create_interface()
|
| 524 |
app.launch(
|
| 525 |
server_name="0.0.0.0",
|