Not sure Clocks keep coming up and keep getting the run around might need time stamping confirmations?
import os
import re
import base64
import hashlib
import logging
from datetime import datetime, timedelta
from typing import Optional, Dict, List
import jwt
import rsa
import torch
import ipfshttpclient
from vllm import LLM, SamplingParams
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
from fastapi import FastAPI, Depends, HTTPException, Request, status
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import padding
from pydantic import BaseModel
from fastapi_csrf_protect import CsrfProtect
=============================
Configuration and Setup
=============================
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(name)
RSA Key Management
KEY_DIR = "keys"
PRIVATE_KEY_PATH = os.path.join(KEY_DIR, "private_key.pem")
PUBLIC_KEY_PATH = os.path.join(KEY_DIR, "public_key.pem")
def generate_or_load_rsa_keys():
if not os.path.exists(PRIVATE_KEY_PATH):
os.makedirs(KEY_DIR, exist_ok=True)
private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
# Save private key
pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
with open(PRIVATE_KEY_PATH, "wb") as f:
f.write(pem)
# Save public key
pub = private_key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
with open(PUBLIC_KEY_PATH, "wb") as f:
f.write(pub)
return private_key, private_key.public_key()
# Load existing keys
with open(PRIVATE_KEY_PATH, "rb") as f:
private_pem = f.read()
private_key = serialization.load_pem_private_key(private_pem, password=None)
return private_key, private_key.public_key()
private_key, public_key = generate_or_load_rsa_keys()
IPFS Integration
try:
ipfs_client = ipfshttpclient.connect()
except Exception as e:
logger.warning(f"IPFS connection failed: {e}")
ipfs_client = None
JWT Config
SECRET_KEY = os.getenv("JWT_SECRET", "super-secret-key")
ALGORITHM = "RS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 60
Qwen3 Model Config
TENSOR_PARALLEL_SIZE = int(os.getenv("TENSOR_PARALLEL_SIZE", "1"))
QWEN3_MODEL = os.getenv("QWEN3_MODEL", "Qwen/Qwen3-235M-A18B")
Application Setup
app = FastAPI(title="Healthcare AI Summarizer")
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
CSRF Protection
@CsrfProtect.load_config
def get_csrf_config():
return {"secret_key": os.getenv("CSRF_SECRET", "csrf-secret-123")}
=============================
Core Classes
=============================
class FractalDataBundler:
def init(self, iterations=3):
self.iterations = iterations
def fractal_encode(self, data: str) -> bytes:
encoded = data.encode('utf-8')
for _ in range(self.iterations):
digest = hashlib.sha256(encoded).digest()
encoded = base64.b64encode(digest + encoded)
return encoded
def fractal_decode(self, encoded: bytes) -> str:
for _ in range(self.iterations):
decoded = base64.b64decode(encoded)
encoded = decoded[32:] # Strip SHA-256 hash prefix
return encoded.decode('utf-8')
class MultilingualSummarizer:
def init(self, mt5_model="google/mt5-small"):
self.device = "cuda" if torch.cuda.is_available() else "cpu"
# MT5 Setup
self.mt5_tokenizer = AutoTokenizer.from_pretrained(mt5_model)
self.mt5_model = AutoModelForSeq2SeqLM.from_pretrained(mt5_model).to(self.device)
# Qwen3 Setup
try:
self.qwen3 = LLM(
model=QWEN3_MODEL,
tensor_parallel_size=TENSOR_PARALLEL_SIZE,
host="0.0.0.0",
enable_prefix_caching=True # Optimized generation
)
self.sampling_params = SamplingParams(
temperature=0.7,
max_tokens=32768,
min_p=0.1,
top_p=0.95,
repetition_penalty=1.1
)
except Exception as e:
logger.error(f"Failed to initialize Qwen3 model: {e}")
self.qwen3 = None
self.sampling_params = None
import os
import re
import base64
import hashlib
import logging
from datetime import datetime, timedelta
from typing import Optional, Dict
import jwt
import rsa
import torch
import ipfshttpclient
from vllm import LLM, SamplingParams
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
from fastapi import FastAPI, Depends, HTTPException, Request, status
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import padding
from pydantic import BaseModel
from fastapi_csrf_protect import CsrfProtect
=============================
Configuration and Setup
=============================
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(name)
RSA Key Management
KEY_DIR = "keys"
PRIVATE_KEY_PATH = os.path.join(KEY_DIR, "private_key.pem")
PUBLIC_KEY_PATH = os.path.join(KEY_DIR, "public_key.pem")
def generate_or_load_rsa_keys():
if not os.path.exists(PRIVATE_KEY_PATH):
os.makedirs(KEY_DIR, exist_ok=True)
private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
# Save private key
pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
with open(PRIVATE_KEY_PATH, "wb") as f:
f.write(pem)
# Save public key
pub = private_key.public_key().public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
with open(PUBLIC_KEY_PATH, "wb") as f:
f.write(pub)
return private_key, private_key.public_key()
# Load existing keys
with open(PRIVATE_KEY_PATH, "rb") as f:
private_pem = f.read()
private_key = serialization.load_pem_private_key(private_pem, password=None)
return private_key, private_key.public_key()
private_key, public_key = generate_or_load_rsa_keys()
IPFS Integration
try:
ipfs_client = ipfshttpclient.connect()
except Exception as e:
logger.warning(f"IPFS connection failed: {e}")
ipfs_client = None
JWT Config
SECRET_KEY = os.getenv("JWT_SECRET", "super-secret-key")
ALGORITHM = "RS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 60
Qwen3 Model Config
TENSOR_PARALLEL_SIZE = int(os.getenv("TENSOR_PARALLEL_SIZE", "1"))
QWEN3_MODEL = os.getenv("QWEN3_MODEL", "Qwen/Qwen3-235M-A18B")
Application Setup
app = FastAPI(title="Healthcare AI Summarizer")
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/token")
CSRF Protection
@CsrfProtect.load_config
def get_csrf_config():
return {"secret_key": os.getenv("CSRF_SECRET", "csrf-secret-123")}
=============================
Core Classes
=============================
class FractalDataBundler:
def init(self, iterations=3):
self.iterations = iterations
def fractal_encode(self, data: str) -> bytes:
encoded = data.encode('utf-8')
for _ in range(self.iterations):
digest = hashlib.sha256(encoded).digest()
encoded = base64.b64encode(digest + encoded)
return encoded
def fractal_decode(self, encoded: bytes) -> str:
for _ in range(self.iterations):
decoded = base64.b64decode(encoded)
encoded = decoded[32:] # Strip SHA-256 hash prefix
return encoded.decode('utf-8')
class MultilingualSummarizer:
def init(self, mt5_model="google/mt5-small"):
self.device = "cuda" if torch.cuda.is_available() else "cpu"
# MT5 Setup
self.mt5_tokenizer = AutoTokenizer.from_pretrained(mt5_model)
self.mt5_model = AutoModelForSeq2SeqLM.from_pretrained(mt5_model).to(self.device)
# Qwen3 Setup
try:
self.qwen3 = LLM(
model=QWEN3_MODEL,
tensor_parallel_size=TENSOR_PARALLEL_SIZE,
host="0.0.0.0",
enable_prefix_caching=True # Optimized generation
)
self.sampling_params = SamplingParams(
temperature=0.7,
max_tokens=32768,
min_p=0.1,
top_p=0.95,
repetition_penalty=1.1
)
except Exception