import gradio as gr import requests import json import os from PIL import Image import moviepy.video.io.ImageSequenceClip as ic from pathlib import Path import bs4 import datetime import urllib.request import uuid import base64 import os main_directory = "https://services.swpc.noaa.gov/" sdo_source = "https://sdo.gsfc.nasa.gov/assets/img/browse/" sdo_source_format = "https://sdo.gsfc.nasa.gov/assets/img/browse/YEAR/MONTH/DAY/DATE_IDENT_SIZE_TOOL.jpg" comp_list=[ "https://services.swpc.noaa.gov/images/geospace/geospace_1_day.png", "https://services.swpc.noaa.gov/images/ace-epam-24-hour.gif", "https://services.swpc.noaa.gov/images/ace-epam-e-24-hour.gif", "https://services.swpc.noaa.gov/images/ace-epam-p-24-hour.gif", "https://services.swpc.noaa.gov/images/ace-mag-24-hour.gif", "https://services.swpc.noaa.gov/images/ace-mag-swepam-24-hour.gif", "https://services.swpc.noaa.gov/images/ace-sis-24-hour.gif", "https://services.swpc.noaa.gov/images/boulder-magnetometer.png", "https://services.swpc.noaa.gov/images/seaesrt-time-series-270.png", "https://services.swpc.noaa.gov/images/station-k-index.png", "https://services.swpc.noaa.gov/images/notifications-timeline.png", ] sdo_aia_latest=[ {"name":"Fe18 94A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_94.gif"}, {"name":"Fe20 131A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_131.gif"}, {"name":"Fe9/10 171A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_171.gif"}, {"name":"Fe12 193A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_193.gif"}, {"name":"Fe14 211A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_211.gif"}, {"name":"He2 304A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_304.gif"}, {"name":"Fe16 335A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_335.gif"}, {"name":"cont+C4 1600A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_1600.gif"}, {"name":"continuum 1700A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_1700.gif"}, {"name":"continuum 4500A", "source":"https://umbra.nascom.nasa.gov/images/latest_aia_4500.gif"}, ] nasa_images=[ {"name":"EIT 171A", "source":"https://soho.nascom.nasa.gov/data/realtime/eit_171/512/latest.jpg"}, {"name":"EIT 195A", "source":"https://soho.nascom.nasa.gov/data/realtime/eit_195/512/latest.jpg"}, {"name":"EIT 284A", "source":"https://soho.nascom.nasa.gov/data/realtime/eit_284/512/latest.jpg"}, {"name":"EIT 304A", "source":"https://soho.nascom.nasa.gov/data/realtime/eit_304/512/latest.jpg"}, ] nasa_soho_gifs=[ {"name":"EIT 304A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_304small.gif"}, {"name":"EIT 195A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_195small.gif"}, {"name":"EIT 171A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_171small.gif"}, {"name":"EIT 284A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_284small.gif"}, ] nasa_soho_mp4=[ {"name":"EIT 171A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_171small.mp4"}, {"name":"EIT 195A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_195small.mp4"}, {"name":"EIT 284A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_284small.mp4"}, {"name":"EIT 304A", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_eit_304small.mp4"}, {"name":"LASCO C2", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_c2small.mp4"}, {"name":"LASCO C3", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_c3small.mp4"}, {"name":"SDO/HMI Continuum", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_hmi_igr-512.mp4"}, {"name":"SDO/HMI Magnetogram", "source":"https://soho.nascom.nasa.gov/data/LATEST/current_hmi_mag-512.mp4"}, ] sdo_obj = ["HMIB","HMIBC","HMIIC","HMIIF","HMID","HMII","HMI171", "0094","0131","0171","0193","0211", "0304","0335","1600","1700","4500", "211193171","211193171n","211193171rg", "094335193","304211171"] sdo_size= [256,512,1024,2048,4096] month_dict={"01":31,"02":28,"03":31,"04":30,"05":31,"06":30,"07":31,"08":31,"09":30,"10":31,"11":30,"12":31} html = """
PAGE_LINK
""" css=""" .img_box{ display: flex; flex-direction: column; flex-flow: unset; flex-wrap: wrap; justify-content: space-around; } .img_class{ background: #ffffff; max-width: 48%; font-family: monospace; border-top: #9300ff; border-style: inset; margin-top: 5px; } .img_class_raw{ background: #ffffff; width: 100%; font-family: monospace; border-top: #9300ff; border-style: inset; margin-top: 5px; display:flex; flex-direction:column; } .img_box_soho{ display: flex; flex-direction: row; flex-wrap: wrap; justify-content: space-between; } .img_class_soho{ background: #ffffff; font-family: monospace; border-top: #9300ff; border-style: inset; margin-top: 5px; max-width: 48%; } .img_class_sdo{ background: #ffffff; font-family: monospace; border-top: #9300ff; border-style: inset; margin-top: 5px; max-width: 25%; font-size: small; } """ def load_json(url1="",url2="",url3="",url4="",url5="",url6="",url7="",url8=""): get_url=f'{main_directory}{url1}{url2}{url3}{url4}{url5}{url6}{url7}{url8}' get_url=get_url.split("None")[0] get_url=get_url.split("[]")[0] if get_url.endswith('.json'): feed1 = requests.get(get_url) return None, feed1.text elif get_url.endswith(".png") or get_url.endswith(".gif") or get_url.endswith(".jpg"): html_out=f"
" html_out+=f'
{get_url}
' return html_out, None return None,None def make_tree(url1="",url2="",url3="",url4="",url5="",url6="",url7="",url8=""): link_box=[] html_out="" get_url=f'{main_directory}{url1}{url2}{url3}{url4}{url5}{url6}{url7}{url8}' if not get_url.endswith('.json'): feed1 = requests.get(get_url) spl = feed1.text.split("href=") for line in spl: spl2 = line.split(">")[0] if spl2.endswith('/"') or spl2.endswith('.json"') or spl2.endswith('.png"') or spl2.endswith('.gif"') or spl2.endswith('.jpg"'): fin=line.split(">")[0].strip('""') link_box.append(fin) return gr.update(choices=[l for l in link_box],interactive=True) else: return None def get_images(): html_out=f"
" get_url=f'{main_directory}images/geospace/' feed1 = requests.get(get_url) spl = feed1.text.split("href=") for line in spl: spl2 = line.split(">")[0].strip('""') if spl2.endswith(".png") or spl2.endswith(".gif") or spl2.endswith(".jpg"): html_out+=f'
{spl2}
' get_url2=f'{main_directory}images/' feed2 = requests.get(get_url2) spl = feed2.text.split("href=") for line2 in spl: spl2 = line2.split(">")[0].strip('""') if spl2.endswith(".png") or spl2.endswith(".gif") or spl2.endswith(".jpg"): html_out+=f'
{spl2}
' html_out+="
" return html_out def make_animation_new(gif_box=[], fps=10, loop=True): if gif_box: gif_box.sort(reverse=False) print("Making GIF...") try: frames = [] for i,ea in enumerate(gif_box): urllib.request.urlretrieve(ea,f'tmp{i}.png') frames.append(f'tmp{i}.png') uid=uuid.uuid4() clip = ic.ImageSequenceClip(frames, fps=fps) temp_gif_path = f"temp_{uid}.gif" loop_val = 0 if loop else 1 clip.write_gif(temp_gif_path, fps=fps, loop=loop_val) with open(temp_gif_path, 'rb') as gif_file: gif_bytes = gif_file.read() gif_base64 = base64.b64encode(gif_bytes).decode('utf-8') gif_data_url = f"data:image/gif;base64,{gif_base64}" html_out=f"
" html_out+=f'
' html_out+="
" os.remove(temp_gif_path) for frame_file in frames: os.remove(frame_file) return html_out except Exception as e: print(f"ERROR HAPPENED: {e}") return f"
An error occurred: {e}
" else: return "
No images were provided to create an animation.
" def make_html(inp_files): html_out=f"
" for ea in inp_files: html_out+=f'
' html_out+='
' return html_out def make_nasa_soho_videos(): html_out=f"
" for ea in nasa_soho_mp4: file_name=ea['source'] html_out+=f''' ''' html_out+='
' return html_out def make_nasa_soho_images(inp_src: list): html_out="" for ea in inp_src: file_name=ea['source'] html_out+=f'
' return html_out def make_nasa_soho_image_trigger(): html_in=f"
" html_in+= make_nasa_soho_images(nasa_images) html_in+= make_nasa_soho_images(sdo_aia_latest) html_in+="
" return html_in def generate_sdo_urls(date1_str: str, date2_str: str) -> list[str]: url_box = [] base_url = "https://sdo.gsfc.nasa.gov/assets/img/browse" try: start_date = datetime.datetime.strptime(date1_str[:8], "%Y%m%d").date() end_date = datetime.datetime.strptime(date2_str[:8], "%Y%m%d").date() except ValueError: return [] if start_date > end_date: return [] current_date = start_date while current_date <= end_date: year_str = f"{current_date.year:04}" month_str = f"{current_date.month:02}" day_str = f"{current_date.day:02}" out_url = f"{base_url}/{year_str}/{month_str}/{day_str}/" url_box.append(out_url) current_date += datetime.timedelta(days=1) return url_box def nasa_sdo_images(obj,size,date1,date2): html_in=f"
" sdo_urls = generate_sdo_urls(date1, date2) if not sdo_urls: return "
Invalid date range or format. Please ensure the start date is not after the end date.
", [] out_box=[] start_day_str = date1[:8] end_day_str = date2[:8] start_time_str = date1[8:12] end_time_str = date2[8:12] for link in sdo_urls: current_day_str = "".join(link.split('/')[-4:-1]) is_first_day = (current_day_str == start_day_str) is_last_day = (current_day_str == end_day_str) try: feed1 = requests.get(link) feed1.raise_for_status() except requests.exceptions.RequestException as e: print(f"Could not fetch {link}: {e}") continue if "All" in size: size_list = sdo_size else: size_list = size if "All" in obj: obj_list = sdo_obj else: obj_list = obj soup = bs4.BeautifulSoup(feed1.content, 'html.parser') anchor_elements = soup.find_all('a') for element in anchor_elements: href=element.get('href') if href and href.endswith('.jpg'): parts = href.split('_') if len(parts) < 4: continue image_time_str = parts[1] image_size_str = parts[2] image_obj_str = parts[3].split('.jpg')[0] time_match = False if is_first_day and is_last_day: if start_time_str <= image_time_str <= end_time_str: time_match = True elif is_first_day: if image_time_str >= start_time_str: time_match = True elif is_last_day: if image_time_str <= end_time_str: time_match = True else: # Middle day time_match = True if time_match: if image_obj_str in obj_list and int(image_size_str) in size_list: full_url = f'{link}{href}' out_box.append(full_url) html_in+=f'
{href}
' html_in += "
" if not out_box: html_in = "
No images found for the specified criteria.
" return html_in, out_box def get_concat_h_cut(in1, in2): uid=uuid.uuid4() im1=Image.open(in1) im2=Image.open(in2) dst = Image.new('RGB', (im1.width + im2.width, min(im1.height, im2.height))) dst.paste(im1, (0, 0)) dst.paste(im2, (im1.width, 0)) dst.save(f"h_{uid}.jpg") return f"h_{uid}.jpg" def get_concat_v_cut(in1, in2, theme='dark'): uid=uuid.uuid4() im1=Image.open(in1) im2=Image.open(in2) if theme=='dark': color=(31,41,55) if theme=='light': color=(255,255,255) dst = Image.new('RGB', (min(im1.width, im2.width), im1.height + im2.height),color=color) dst.paste(im1, (0, 0)) dst.paste(im2, (0, im1.height)) dst.save(f"v_{uid}.jpg") return f"v_{uid}.jpg" def nasa_sdo_composite(obj,size,date1,date2,ret_num): html_in=f"
" in_year=f"{date2[0:4]}" in_month=f"{date2[4:6]}" in_day=f"{date2[6:8]}" sdo_source_format = f"https://sdo.gsfc.nasa.gov/assets/img/browse/{in_year}/{in_month}/{in_day}/" get_url=sdo_source_format feed1 = requests.get(get_url) if "All" in size: size = sdo_size if "All" in obj: obj = sdo_obj link_box=[] soup = bs4.BeautifulSoup(feed1.content, 'html.parser') anchor_elements = soup.find_all('a') cnt=1 max_cnt=ret_num for element in anchor_elements: href=element.get('href') if href.endswith('.jpg'): for o in obj: for s in size: ls=href.split("_") if ls[1][0:4]>=date1[8:13]: src_obj=ls[3].split('.jpg')[0] if src_obj == o: if int(ls[2]) == int(s): link_box.append(href) link_box.sort(reverse=True) out_box=[] for ea in link_box: if cnt<=max_cnt: out_link=f'{sdo_source_format}{ea}' out_box.append(out_link) cnt+=1 html_in+=f'
{ea}
' html_in+="
" return html_in, out_box def run(): out=make_tree() im_html=get_images() return im_html def get_date(year1,month1,day1,hour1,minute1,year2,month2,day2,hour2,minute2): try: start_dt = datetime.datetime(int(year1), int(month1), int(day1), int(hour1), int(minute1)) end_dt = datetime.datetime(int(year2), int(month2), int(day2), int(hour2), int(minute2)) date1 = start_dt.strftime('%Y%m%d%H%M') date2 = end_dt.strftime('%Y%m%d%H%M') return date1, date2 except ValueError as e: print(f"Error creating date: {e}") return None, None def comp_combine(inp_ims: list,comp_col,comp_row,resize=False): im_box=[] v_box=[] rows=comp_row cols=comp_col if not type(inp_ims)==type([]): try: inp_ims=eval(inp_ims) except: return None if not inp_ims: return None for i,im in enumerate(inp_ims): urllib.request.urlretrieve(im, f"tmp-{i}.jpg") if resize: pil_im = Image.open(f'tmp-{i}.jpg') pil_im = pil_im.resize((int(resize[0]),int(resize[1])),resample=Image.Resampling.LANCZOS).convert('RGB') pil_im.save(f'tmp-{i}.jpg') im_box.append(f"tmp-{i}.jpg") im_cnt=len(im_box) cnt=1 for rr in range(rows): if cnt > im_cnt: break col_start=[im_box[cnt-1]] for cc in range(cols-1): try: if cnt >= im_cnt: break col_out=get_concat_h_cut(col_start[0],im_box[cnt]) col_start[0]=col_out cnt+=1 except Exception as e: pass v_box.append(col_start[0]) if not v_box: return None v_cnt=1 v_box_cnt=len(v_box) v_start=[v_box[0]] for vv in range(v_box_cnt-1): v_out=get_concat_v_cut(v_start[0],v_box[v_cnt]) v_start[0]=v_out v_cnt+=1 return v_start[0] def comp_1(comp_list=comp_list,col=3,row=4,resize=[640,450]): outp=comp_combine(comp_list,col,row,resize) return (outp) def get_time(): now = datetime.datetime.utcnow() datetimestamp_str = now.strftime('%Y-%m-%d %H:%M:%S') html_out = f"""
Current: {datetimestamp_str} UTC
""" return (gr.update(value=html_out), now.year, now.month, now.day, now.hour, now.minute, now.year, now.month, now.day, now.hour, now.minute) with gr.Blocks() as app: url_list=gr.State([]) now = datetime.datetime.utcnow() time_html=gr.HTML() with gr.Tab("NOAA"): with gr.Tab("Compilation"): first_comp_btn=gr.Button("Load") first_comp_img=gr.Image() with gr.Tab("Images"): first_btn=gr.Button("Load") html_im=gr.HTML() with gr.Tab("Raw"): with gr.Row(): drop1=gr.Dropdown() drop2=gr.Dropdown() drop3=gr.Dropdown() drop4=gr.Dropdown() with gr.Row(): drop5=gr.Dropdown() drop6=gr.Dropdown() drop7=gr.Dropdown() drop8=gr.Dropdown() load_btn=gr.Button("Load") html_raw=gr.HTML() links=gr.JSON() with gr.Tab("NASA"): with gr.Tab("SDO Images"): with gr.Row(): gr.Markdown("
From:") year1=gr.Number(label="Year",minimum=2010,maximum=now.year,precision=0,value=now.year,min_width=10) month1=gr.Number(label="Month",minimum=1,maximum=12,precision=0,value=now.month,min_width=10) day1=gr.Number(label="Day",minimum=1,maximum=31,precision=0,value=now.day,min_width=10) hour1=gr.Number(label="Hour",minimum=0,maximum=23,precision=0,value=now.hour,min_width=10) minute1=gr.Number(label="Minute",minimum=0,maximum=59,precision=0,value=now.minute,min_width=10) gr.Markdown("
To:") year2=gr.Number(label="Year",minimum=2010,maximum=now.year,precision=0,value=now.year,min_width=10) month2=gr.Number(label="Month",minimum=1,maximum=12,precision=0,value=now.month,min_width=10) day2=gr.Number(label="Day",minimum=1,maximum=31,precision=0,value=now.day,min_width=10) hour2=gr.Number(label="Hour",minimum=0,maximum=23,precision=0,value=now.hour,min_width=10) minute2=gr.Number(label="Minute",minimum=0,maximum=59,precision=0,value=now.minute,min_width=10) with gr.Row(): sdo_drop1=gr.Dropdown(label="Instrument", multiselect=True, choices=["All"]+sdo_obj, value="0094") sdo_drop2=gr.Dropdown(label="Size", multiselect=True, choices=["All"]+sdo_size,value=512) with gr.Tab("Composite"): with gr.Row(): comp_num=gr.Number(label="Return",precision=0,value=4,interactive=True) comp_col=gr.Number(label="Columns",precision=0,value=2,interactive=True) comp_row=gr.Number(label="Rows",precision=0,value=2,interactive=True) with gr.Row(): comp_combine_btn=gr.Button("Combine") sdo_comp_btn=gr.Button("Load") img_out=gr.Image() comp_combine_html=gr.HTML() sdo_comp_html=gr.HTML() image_comp_list=gr.Textbox(visible=True) with gr.Tab("Images"): sdo_btn=gr.Button("Load") with gr.Row(): fps_slider = gr.Slider(minimum=1, maximum=60, value=10, step=1, label="Animation FPS") loop_checkbox = gr.Checkbox(label="Loop Animation", value=True) sdo_gif=gr.Button("Make GIF") sdo_gif_html=gr.HTML() sdo_gif.click(make_animation_new,[url_list, fps_slider, loop_checkbox],sdo_gif_html) sdo_html=gr.HTML() date1=gr.Textbox(visible=False) date2=gr.Textbox(visible=False) with gr.Tab("SOHO Images"): soho_btn=gr.Button("Load") html_nasa=gr.HTML() with gr.Tab("SOHO Video"): soho_vid_btn=gr.Button("Load") html_vid_nasa=gr.HTML() app.load(get_time,None,[time_html,year1,month1,day1,hour1,minute1,year2,month2,day2,hour2,minute2]).then(make_tree,None,drop1) first_comp_btn.click(comp_1,None,first_comp_img) comp_combine_btn.click(comp_combine,[image_comp_list,comp_col,comp_row],img_out) sdo_comp_btn.click(get_date,[year1,month1,day1,hour1,minute1,year2,month2,day2,hour2,minute2],[date1,date2]).then(nasa_sdo_composite,[sdo_drop1,sdo_drop2,date1,date2,comp_num],[sdo_comp_html,image_comp_list]) sdo_btn.click(get_date,[year1,month1,day1,hour1,minute1,year2,month2,day2,hour2,minute2],[date1,date2]).then(nasa_sdo_images,[sdo_drop1,sdo_drop2,date1,date2],[sdo_html,url_list]) soho_btn.click(make_nasa_soho_image_trigger,None,html_nasa) soho_vid_btn.click(make_nasa_soho_videos,None,html_vid_nasa) load_btn.click(load_json,[drop1,drop2,drop3,drop4,drop5,drop6,drop7,drop8],[html_raw,links]) drop1.change(make_tree,drop1,[drop2]) drop2.change(make_tree,[drop1,drop2],[drop3]) drop3.change(make_tree,[drop1,drop2,drop3],[drop4]) drop4.change(make_tree,[drop1,drop2,drop3,drop4],[drop5]) drop5.change(make_tree,[drop1,drop2,drop3,drop4,drop5],[drop6]) drop6.change(make_tree,[drop1,drop2,drop3,drop4,drop5,drop6],[drop7]) drop7.change(make_tree,[drop1,drop2,drop3,drop4,drop5,drop6,drop7],[drop8]) first_btn.click(run,None,[html_im]) app.queue(default_concurrency_limit=10).launch()