Experiment branch commit
This commit is contained in:
118
total_app.py
Normal file
118
total_app.py
Normal file
@@ -0,0 +1,118 @@
|
||||
# %% API call1
|
||||
|
||||
import time
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
import pandas as pd
|
||||
from openai import OpenAI
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# === CONFIGURATION ===
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
|
||||
OPENAI_BASE_URL = os.getenv("OPENAI_BASE_URL")
|
||||
MODEL_NAME = "GPT-OSS-120B"
|
||||
INPUT_CSV = "/home/shahin/Lab/Doktorarbeit/Barcelona/Data/MS_Briefe_400_with_unique_id_SHA3_explore_cleaned_unique.csv"
|
||||
EDSS_INSTRUCTIONS_PATH = "/home/shahin/Lab/Doktorarbeit/Barcelona/attach/Komplett.txt"
|
||||
|
||||
client = OpenAI(
|
||||
api_key=OPENAI_API_KEY,
|
||||
base_url=OPENAI_BASE_URL
|
||||
)
|
||||
|
||||
with open(EDSS_INSTRUCTIONS_PATH, 'r') as f:
|
||||
EDSS_INSTRUCTIONS = f.read().strip()
|
||||
|
||||
# === UPDATED RUN INFERENCE WITH RETRY LOGIC ===
|
||||
def run_inference(patient_text, max_retries=3):
|
||||
prompt = f'''Du bist ein medizinischer Assistent... (rest of your prompt)''' # Kept same as your original
|
||||
|
||||
# Instructions omitted for brevity, but keep your full prompt here
|
||||
|
||||
attempts = 0
|
||||
while attempts < max_retries:
|
||||
start_time = time.time()
|
||||
attempts += 1
|
||||
|
||||
try:
|
||||
response = client.chat.completions.create(
|
||||
messages=[
|
||||
{"role": "system", "content": "You extract EDSS scores..."},
|
||||
{"role": "user", "content": prompt}
|
||||
],
|
||||
model=MODEL_NAME,
|
||||
max_tokens=2048,
|
||||
temperature=0.0,
|
||||
response_format={"type": "json_object"}
|
||||
)
|
||||
|
||||
content = response.choices[0].message.content
|
||||
|
||||
# Check if content is empty or None
|
||||
if not content or content.strip() == "" or content.strip() == "{}":
|
||||
print(f" [Attempt {attempts}] Warning: Received empty response. Retrying...")
|
||||
time.sleep(1) # Short pause before retrying
|
||||
continue
|
||||
|
||||
# Parse the JSON response
|
||||
parsed = json.loads(content)
|
||||
inference_time = time.time() - start_time
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"result": parsed,
|
||||
"inference_time_sec": inference_time,
|
||||
"attempts": attempts
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f" [Attempt {attempts}] Error: {e}")
|
||||
if attempts < max_retries:
|
||||
time.sleep(2) # Wait longer on actual connection errors
|
||||
else:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed after {max_retries} attempts: {str(e)}",
|
||||
"inference_time_sec": -1,
|
||||
"attempts": attempts
|
||||
}
|
||||
|
||||
return {"success": False, "error": "Unknown failure", "attempts": attempts}
|
||||
|
||||
# === BUILD PATIENT TEXT ===
|
||||
def build_patient_text(row):
|
||||
summary = str(row.get("T_Zusammenfassung", "")) if pd.notna(row.get("T_Zusammenfassung")) else ""
|
||||
diagnoses = str(row.get("Diagnosen", "")) if pd.notna(row.get("Diagnosen")) else ""
|
||||
clinical = str(row.get("T_KlinBef", "")) if pd.notna(row.get("T_KlinBef")) else ""
|
||||
findings = str(row.get("T_Befunde", "")) if pd.notna(row.get("T_Befunde")) else ""
|
||||
return "\n".join([summary, diagnoses, clinical, findings]).strip()
|
||||
|
||||
if __name__ == "__main__":
|
||||
df = pd.read_csv(INPUT_CSV, sep=';')
|
||||
results = []
|
||||
|
||||
for idx, row in df.iterrows():
|
||||
print(f"Processing row {idx + 1}/{len(df)}")
|
||||
patient_text = build_patient_text(row)
|
||||
|
||||
# Calling the updated inference function
|
||||
result = run_inference(patient_text, max_retries=3)
|
||||
|
||||
result["unique_id"] = row.get("unique_id", f"row_{idx}")
|
||||
result["MedDatum"] = row.get("MedDatum", None)
|
||||
|
||||
results.append(result)
|
||||
# Optional: only print success to keep console clean
|
||||
if result["success"]:
|
||||
print(f" Success on attempt {result.get('attempts')}")
|
||||
|
||||
output_json = INPUT_CSV.replace(".csv", "_results_total.json")
|
||||
with open(output_json, 'w', encoding='utf-8') as f:
|
||||
json.dump(results, f, indent=2, ensure_ascii=False)
|
||||
print(f"Results saved to {output_json}")
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user