import gradio as gr
import pandas as pd
from openai import AzureOpenAI
import faiss
import numpy as np
import json
import time
import re
import tiktoken
import os
from IPython.display import HTML
def arabic_print(text, colour="blue"):
"""
Displays Arabic text with proper RTL and right alignment in Jupyter Notebook.
Parameters:
text (str): The Arabic text to display.
colour (str): The color of the text. Default is "blue".
"""
text=text.replace("\n","
")
html_content = f"""
{text}
"""
return HTML(html_content)
# Example usage
my_arabic_text = """
باسم صاحب السمو الشيخ خليفة بن زايد آل نهيان رئيس دولة الإمارات العربية المتحدة / حاكم إمارة أبو ظبي
بالجلسة المنعقدة بـ محكمة ابوظبي العمالية-ابتدائي بتاريخ 2 جمادى الآخرة 1441 هـ الموافق 27/01/2020 م
برئاسة القاضي: إبراهيم ناصر الاحبابي وعضوية القاضي: مرتضى الصديق الحسن وعضوية القاضي: خليفة سليم
"""
# Display the text using the function
arabic_print(my_arabic_text, colour="green")
from openai import AzureOpenAI
AZURE_OPENAI_PREVIEW_API_VERSION = os.getenv("AZURE_OPENAI_PREVIEW_API_VERSION")
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
AZURE_OPENAI_KEY = os.getenv("AZURE_OPENAI_KEY")
client = AzureOpenAI(
azure_endpoint = AZURE_OPENAI_ENDPOINT,
api_key=AZURE_OPENAI_KEY,
api_version= AZURE_OPENAI_PREVIEW_API_VERSION
)
def call_gpt_azure_message(message_text):
completion = client.chat.completions.create(
#model="GPT4Turbo",
model="gpt-4o",
messages = message_text,
temperature=0.0,
max_tokens=1000,
top_p=0.95,
frequency_penalty=0,
presence_penalty=0,
stop=None,
)
return completion.choices[0].message.content
def call_gpt_azure_message_stream(message_text):
completion = client.chat.completions.create(
#model="GPT4Turbo",
model="gpt-4o",
messages = message_text,
temperature=0.0,
max_tokens=2000,
top_p=0.95,
frequency_penalty=0,
presence_penalty=0,
stop=None,
stream=True
)
return completion
def call_gpt_azure(SYS_PROMPT,USER_PROMPT,MODEL="gpt-4o"):
message_text=[
{
"role": "system",
"content": SYS_PROMPT
},
{
"role": "user",
"content": USER_PROMPT
},
]
completion = client.chat.completions.create(
model=MODEL,
messages = message_text,
temperature=0.0,
max_tokens=1000,
top_p=0.95,
frequency_penalty=0,
presence_penalty=0,
stop=None
)
return completion.choices[0].message.content
# This is the main embedding function , that takes as input text and generate 1536 floats using ada3_small
def generate_embeddings(text, model="ada3_small"): # model = "deployment_name"
return client.embeddings.create(input = [text], model=model).data[0].embedding
import tiktoken
enc = tiktoken.get_encoding("o200k_base")
assert enc.decode(enc.encode("hello world")) == "hello world"
# To get the tokeniser corresponding to a specific model in the OpenAI API:
enc = tiktoken.encoding_for_model("gpt-4o")
import tiktoken
def count_tokens_ada3(text, model_name="text-embedding-3-small"):
# Automatically get the correct encoding for the given model
encoding = tiktoken.encoding_for_model(model_name)
# Encode the text and count the tokens
return len(encoding.encode(text))
def Get_nearest_cases_Json(case,K):
vquery=np.array(generate_embeddings(case))
vquery=vquery.reshape(1,-1)
D, I = index_law.search(vquery, K) # search
cxt_cases=""
cxt_list=[]
Locs=I[0]
for L in Locs:
cxt_cases+=str(json_cases[L])
return cxt_cases
def count_tokens(text):
return len(enc.encode(text))
vec_embs=np.load("data/All_cases_embedded.npy")
index_law=faiss.IndexFlatIP(vec_embs.shape[1])
index_law.add(vec_embs)
#case=Emb_text_list[10]
# File path
output_file = 'data/KSA_Legal_cases.json'
# Read the JSON file
with open(output_file, 'r', encoding='utf-8') as file:
json_cases = json.load(file)
def GPT_AI_Judge_Json(case , cxt):
SYS_PROMPT="""
**System Role**:
You are an **Arabic Legal Judge Assistant**, specialized in analyzing legal cases and extracting insights from related legal precedences.
### **Input Details**:
You will be given:
1. A **legal case** (primary input).
2. A **context** containing multiple legal precedences in json format.
### **Your Tasks**:
1. **Analyze the Input Case**:
- Focus on the **description of the case** (توصيف القضية) and its key aspects.
2. **Identify Relevant legal Precedences**:
- Search the provided context for precedences only **closely related** to the input case.
3. **Create a Comparative Analysis**:
- Present a **contrastive table** comparing the relevant precedences with columns containing metadata in context
4. **Discussion of Key Points**:
- Highlight **commonalities and differences** between the input case and the relevant precedences.
5. **Suggest a Ruling Decision**:
- Provide a **recommendation** for the Judge, based on the rulings of the similar precedences.
---
### **If No Relevant Precedences**:
- Clearly state that no related precedences were identified from the context.
- Apologize and note that a ruling recommendation cannot be provided.
---
### **Response Format**:
1. **Comparative Table**:
- compare relevant precedences in a table.
2. **RTL Formatting**:
- Use **right-to-left (RTL)** direction and **right alignment**.
- Ensure all headers, lists, and paragraphs include `dir="rtl"` and `text-align: right`.
3. **Clear Structure**:
- Provide a well-organized response for proper Arabic rendering.
---
"""
User_Prompt=f"Input Legal Case {case} \n Legal precedences context : {cxt}"
message_text=[
{
"role": "system",
"content": SYS_PROMPT
},
{
"role": "user",
"content": User_Prompt
},
]
completion = client.chat.completions.create(
#model="gpt-35-turbo-16k",
model="gpt-4o",
messages = message_text,
temperature=0.0,
max_tokens=3500,
top_p=0.95,
frequency_penalty=0,
presence_penalty=0,
stop=None
)
return completion.choices[0].message.content
import gradio as gr
# Define the processing function
def gpt_judge(case,history):
cxt=Get_nearest_cases_Json(case,5)
print("Tokens ==>",count_tokens(cxt))
# Example: Generate a markdown response
response = GPT_AI_Judge_Json(case,cxt)
# Save the response to a Markdown file
# Convert Markdown to HTML
return response
welcome_message="اذكر احداث ووقائع وملابسات القضية وسأقوم بتحليلها و اقتراح الحكم بناءا عى سوابق قضائية مشابهة "
chatbot=gr.Chatbot(value=[(None,welcome_message)],height=800,rtl=True)
tit_html='\n\n
اذكر الوقائع الخاصة بالقضية وتوصيفها للحصول على الاستشارة القانونية المناسبة.
\n
\n'
tit_img = """
"""
with gr.Blocks() as demo:
gr.ChatInterface(
gpt_judge,
chatbot=chatbot,
title=tit_img,
description=tit_html,
theme="soft",
)
#btn = gr.Button("توعية قانونية")
#btn.click(fn=greet)
demo.launch()