comments
Browse files- utils/jwst_downloading.ipynb +19 -13
- utils/jwst_filtering.ipynb +4 -0
utils/jwst_downloading.ipynb
CHANGED
@@ -9,8 +9,6 @@
|
|
9 |
"source": [
|
10 |
"\"\"\"\n",
|
11 |
"\n",
|
12 |
-
"FULLY UNCLEANED CODE\n",
|
13 |
-
"\n",
|
14 |
"Contains the necessary scripts to actually download the FITS files that are in your JWST csv.\n",
|
15 |
"\n",
|
16 |
"\n",
|
@@ -644,6 +642,11 @@
|
|
644 |
"\n",
|
645 |
"THRESH = JWST_FOV\n",
|
646 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
647 |
"clustering = AgglomerativeClustering(n_clusters=None, metric='precomputed', linkage='single', distance_threshold=THRESH)\n",
|
648 |
"labels = clustering.fit_predict(angular_separations_matrix)\n",
|
649 |
"\n",
|
@@ -740,6 +743,11 @@
|
|
740 |
"source": [
|
741 |
"from astropy.table import unique, vstack, Table\n",
|
742 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
743 |
"matched_obs = result\n",
|
744 |
"\n",
|
745 |
"# Split the observations into \"chunks\" of size five\n",
|
@@ -782,17 +790,8 @@
|
|
782 |
"metadata": {},
|
783 |
"outputs": [],
|
784 |
"source": [
|
785 |
-
"manifest = Observations.download_products(files['obsID'], curl_flag=True)"
|
786 |
-
|
787 |
-
},
|
788 |
-
{
|
789 |
-
"cell_type": "code",
|
790 |
-
"execution_count": null,
|
791 |
-
"id": "b8b7937f-ca1d-4b4c-abef-1055cdf115ef",
|
792 |
-
"metadata": {},
|
793 |
-
"outputs": [],
|
794 |
-
"source": [
|
795 |
-
"wi"
|
796 |
]
|
797 |
},
|
798 |
{
|
@@ -807,6 +806,12 @@
|
|
807 |
"from astropy.table import Table\n",
|
808 |
"import glob\n",
|
809 |
"\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
810 |
"def create_combined_hubble_file(short_obs_id):\n",
|
811 |
" \n",
|
812 |
" file_list = list(files[files['obs_id_short'] == short_obs_id]['productFilename'])\n",
|
@@ -1005,6 +1010,7 @@
|
|
1005 |
"source": [
|
1006 |
"\"\"\"\n",
|
1007 |
"Code to verify test/train pollution.\n",
|
|
|
1008 |
"\"\"\"\n",
|
1009 |
"\n",
|
1010 |
"\n",
|
|
|
9 |
"source": [
|
10 |
"\"\"\"\n",
|
11 |
"\n",
|
|
|
|
|
12 |
"Contains the necessary scripts to actually download the FITS files that are in your JWST csv.\n",
|
13 |
"\n",
|
14 |
"\n",
|
|
|
642 |
"\n",
|
643 |
"THRESH = JWST_FOV\n",
|
644 |
"\n",
|
645 |
+
"\"\"\"\n",
|
646 |
+
"Initial clustering and filtering using single RA DEC value.\n",
|
647 |
+
"\n",
|
648 |
+
"\"\"\"\n",
|
649 |
+
"\n",
|
650 |
"clustering = AgglomerativeClustering(n_clusters=None, metric='precomputed', linkage='single', distance_threshold=THRESH)\n",
|
651 |
"labels = clustering.fit_predict(angular_separations_matrix)\n",
|
652 |
"\n",
|
|
|
743 |
"source": [
|
744 |
"from astropy.table import unique, vstack, Table\n",
|
745 |
"\n",
|
746 |
+
"\"\"\"\n",
|
747 |
+
"Call API that gives the file URLs for each observations.\n",
|
748 |
+
"\n",
|
749 |
+
"\"\"\"\n",
|
750 |
+
"\n",
|
751 |
"matched_obs = result\n",
|
752 |
"\n",
|
753 |
"# Split the observations into \"chunks\" of size five\n",
|
|
|
790 |
"metadata": {},
|
791 |
"outputs": [],
|
792 |
"source": [
|
793 |
+
"manifest = Observations.download_products(files['obsID'], curl_flag=True)\n",
|
794 |
+
"# Creates .sh scripts that have to then be run to actually download data"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
795 |
]
|
796 |
},
|
797 |
{
|
|
|
806 |
"from astropy.table import Table\n",
|
807 |
"import glob\n",
|
808 |
"\n",
|
809 |
+
"\"\"\"\n",
|
810 |
+
"This code allows us to combine multiple exposures of the same observation into one FITS file.\n",
|
811 |
+
"\n",
|
812 |
+
"This code was NOT used; we only used a single exposure for our dataset.\n",
|
813 |
+
"\"\"\"\n",
|
814 |
+
"\n",
|
815 |
"def create_combined_hubble_file(short_obs_id):\n",
|
816 |
" \n",
|
817 |
" file_list = list(files[files['obs_id_short'] == short_obs_id]['productFilename'])\n",
|
|
|
1010 |
"source": [
|
1011 |
"\"\"\"\n",
|
1012 |
"Code to verify test/train pollution.\n",
|
1013 |
+
"Prints images which are nearby and might overlap.\n",
|
1014 |
"\"\"\"\n",
|
1015 |
"\n",
|
1016 |
"\n",
|
utils/jwst_filtering.ipynb
CHANGED
@@ -265,6 +265,8 @@
|
|
265 |
"# This is 6 arcmin; both detectors are 5.1 by 2.2 arcmin\n",
|
266 |
"JWST_FOV = 0.085 # 5.1 arcmin\n",
|
267 |
"\n",
|
|
|
|
|
268 |
"THRESH = JWST_FOV * 2\n",
|
269 |
"\n",
|
270 |
"clustering = AgglomerativeClustering(n_clusters=None, metric='precomputed', linkage='single', distance_threshold=THRESH)\n",
|
@@ -3182,6 +3184,8 @@
|
|
3182 |
"failed_labels = []\n",
|
3183 |
"failed_paths = []\n",
|
3184 |
"\n",
|
|
|
|
|
3185 |
"for label in tqdm(np.unique(labels)):\n",
|
3186 |
" polys = [(all_polys[i], confirmed_fits_paths[i]) for i in range(len(labels)) if labels[i] == label]\n",
|
3187 |
" if len(polys) > 1:\n",
|
|
|
265 |
"# This is 6 arcmin; both detectors are 5.1 by 2.2 arcmin\n",
|
266 |
"JWST_FOV = 0.085 # 5.1 arcmin\n",
|
267 |
"\n",
|
268 |
+
"# Initial clustering from RA DEC only\n",
|
269 |
+
"\n",
|
270 |
"THRESH = JWST_FOV * 2\n",
|
271 |
"\n",
|
272 |
"clustering = AgglomerativeClustering(n_clusters=None, metric='precomputed', linkage='single', distance_threshold=THRESH)\n",
|
|
|
3184 |
"failed_labels = []\n",
|
3185 |
"failed_paths = []\n",
|
3186 |
"\n",
|
3187 |
+
"# Compute spherical polygon overlaps to verify overlap and remove ones that overlap\n",
|
3188 |
+
"\n",
|
3189 |
"for label in tqdm(np.unique(labels)):\n",
|
3190 |
" polys = [(all_polys[i], confirmed_fits_paths[i]) for i in range(len(labels)) if labels[i] == label]\n",
|
3191 |
" if len(polys) > 1:\n",
|