|
import gradio as gr |
|
import numpy as np |
|
import matplotlib.pyplot as plt |
|
from skimage.transform import radon, iradon |
|
from scipy.fft import fft, ifft |
|
from io import BytesIO |
|
import base64 |
|
import os |
|
import requests |
|
from PIL import Image |
|
|
|
|
|
API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2-large" |
|
HF_TOKEN = os.environ.get("HF_TOKEN") |
|
headers = {"Authorization": f"Bearer {HF_TOKEN}"} |
|
|
|
def query_gpt2(payload): |
|
"""Hugging Face API'sine istek gönderir ve yanıtı döner.""" |
|
try: |
|
response = requests.post(API_URL, headers=headers, json=payload) |
|
print("Status Code:", response.status_code) |
|
response.raise_for_status() |
|
response_json = response.json() |
|
print("Response JSON:", response_json) |
|
return response_json |
|
except requests.exceptions.HTTPError as errh: |
|
print(f"HTTP Error: {errh}") |
|
except requests.exceptions.ConnectionError as errc: |
|
print(f"Error Connecting: {errc}") |
|
except requests.exceptions.Timeout as errt: |
|
print(f"Timeout Error: {errt}") |
|
except requests.exceptions.RequestException as err: |
|
print(f"Something went wrong: {err}") |
|
return {"generated_text": "No response from GPT-2."} |
|
|
|
def process_and_query(image): |
|
"""Görüntüyü işleyip GPT-2'ye gönderir.""" |
|
|
|
image = image.convert("L") |
|
image = np.array(image) |
|
|
|
|
|
theta = np.linspace(0., 180., max(image.shape), endpoint=False) |
|
sinogram = radon(image, theta=theta, circle=True) |
|
|
|
|
|
sinogram_text = "\n".join([", ".join(map(str, row)) for row in sinogram]) |
|
print("Sinogram Verileri (Text):") |
|
print(sinogram_text) |
|
|
|
|
|
gpt_response = query_gpt2({"inputs": sinogram_text}) |
|
gpt_output = gpt_response.get("generated_text", "GPT-2'den yanıt alınamadı.") |
|
|
|
|
|
fourier = fft(sinogram, axis=0) |
|
|
|
|
|
freq = np.fft.fftfreq(sinogram.shape[0]).reshape(-1, 1) |
|
ramp_filter = np.abs(freq) |
|
filtered_fourier = fourier * ramp_filter |
|
|
|
|
|
filtered_sinogram = np.real(ifft(filtered_fourier, axis=0)) |
|
|
|
|
|
reconstructed_image = iradon(filtered_sinogram, theta=theta, circle=True) |
|
|
|
|
|
fig, axes = plt.subplots(2, 2, figsize=(10, 10)) |
|
axes[0, 0].set_title("Orijinal Görüntü") |
|
axes[0, 0].imshow(image, cmap="gray") |
|
axes[0, 0].axis("off") |
|
|
|
axes[0, 1].set_title("Sinogram") |
|
axes[0, 1].imshow(sinogram, cmap="gray", aspect="auto") |
|
axes[0, 1].axis("off") |
|
|
|
axes[1, 0].set_title("Filtrelenmiş Sinogram") |
|
axes[1, 0].imshow(filtered_sinogram, cmap="gray", aspect="auto") |
|
axes[1, 0].axis("off") |
|
|
|
axes[1, 1].set_title("Rekonstürülen Görüntü") |
|
axes[1, 1].imshow(reconstructed_image, cmap="gray") |
|
axes[1, 1].axis("off") |
|
|
|
plt.tight_layout() |
|
|
|
|
|
buf = BytesIO() |
|
plt.savefig(buf, format="png") |
|
buf.seek(0) |
|
encoded_image = base64.b64encode(buf.read()).decode("utf-8") |
|
buf.close() |
|
plt.close() |
|
|
|
|
|
return f"<img src='data:image/png;base64,{encoded_image}'/>", sinogram_text, gpt_output |
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("# Sinogram Görüntüleme ve GPT-2 İşlemleri") |
|
gr.Markdown("Bir görüntü yükleyin, sinogram verilerini işleyin ve GPT-2'ye gönderin.") |
|
|
|
with gr.Row(): |
|
image_input = gr.Image(type="pil", label="Görüntü Yükle") |
|
output = gr.HTML(label="Sonuç Görselleştirme") |
|
sinogram_output = gr.Textbox(label="Sinogram Verileri (Text)") |
|
gpt_output = gr.Textbox(label="GPT-2 Yanıtı") |
|
|
|
process_button = gr.Button("İşle ve GPT-2'ye Gönder") |
|
|
|
process_button.click(process_and_query, inputs=[image_input], outputs=[output, sinogram_output, gpt_output]) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|