diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bf938d9e..20e1c23d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ ci: autoupdate_schedule: "quarterly" repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.6.0" + rev: "v0.6.4" hooks: - id: ruff args: ["--fix", "--unsafe-fixes"] diff --git a/.readthedocs.yaml b/.readthedocs.yaml index f9e3ca3c..efea3bdf 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -1,9 +1,9 @@ version: 2 build: - os: ubuntu-22.04 + os: ubuntu-lts-latest tools: - python: "3.11" + python: latest jobs: post_checkout: - git submodule update --init diff --git a/exts/rawfiles.py b/exts/rawfiles.py index 18dd05e4..a00aa105 100644 --- a/exts/rawfiles.py +++ b/exts/rawfiles.py @@ -13,6 +13,6 @@ def on_html_collect_pages(app): return () -def setup(app): +def setup(app) -> None: app.add_config_value("rawfiles", [], "html") app.connect("html-collect-pages", on_html_collect_pages) diff --git a/posts/2018/2018-07-21-coronal-loop-coordinates.ipynb b/posts/2018/2018-07-21-coronal-loop-coordinates.ipynb index 40a01f18..3bf1fbd0 100644 --- a/posts/2018/2018-07-21-coronal-loop-coordinates.ipynb +++ b/posts/2018/2018-07-21-coronal-loop-coordinates.ipynb @@ -53,25 +53,22 @@ }, "outputs": [], "source": [ - "import warnings\n", - "warnings.simplefilter('ignore')\n", - "\n", + "import astropy.constants as const\n", + "import astropy.time\n", + "import astropy.units as u\n", + "import matplotlib.colors\n", + "import matplotlib.pyplot as plt\n", "import numpy as np\n", - "from scipy.ndimage import gaussian_filter\n", "import scipy.integrate\n", "import scipy.optimize\n", - "import matplotlib.pyplot as plt\n", - "import matplotlib.colors\n", - "import astropy.units as u\n", - "import astropy.time\n", - "import astropy.constants as const\n", "from astropy.coordinates import SkyCoord\n", + "from scipy.ndimage import gaussian_filter\n", + "from sunpy.coordinates import Heliocentric, Helioprojective\n", "from sunpy.map import GenericMap\n", - "from sunpy.util.metadata import MetaDict\n", "from sunpy.sun import constants as sun_const\n", - "from sunpy.coordinates import Helioprojective,HeliographicStonyhurst,Heliocentric\n", + "from sunpy.util.metadata import MetaDict\n", "\n", - "%matplotlib inline\n" + "%matplotlib inline" ] }, { @@ -103,30 +100,31 @@ "metadata": {}, "outputs": [], "source": [ - "def semi_circular_loop(length,theta0=0*u.deg):\n", + "def semi_circular_loop(length, theta0=0 * u.deg):\n", " r_1 = const.R_sun\n", + "\n", " def r_2_func(x):\n", - " return np.arccos(0.5*x/r_1.to(u.cm).value) - np.pi + length.to(u.cm).value/2./x\n", - " r_2 = scipy.optimize.bisect(r_2_func,length.to(u.cm).value/(2*np.pi),\n", - " length.to(u.cm).value/np.pi) * u.cm\n", - " alpha = np.arccos(0.5*(r_2/r_1).decompose())\n", - " phi = np.linspace(-np.pi*u.rad + alpha,np.pi*u.rad-alpha,2000)\n", + " return np.arccos(0.5 * x / r_1.to(u.cm).value) - np.pi + length.to(u.cm).value / 2.0 / x\n", + "\n", + " r_2 = scipy.optimize.bisect(r_2_func, length.to(u.cm).value / (2 * np.pi), length.to(u.cm).value / np.pi) * u.cm\n", + " alpha = np.arccos(0.5 * (r_2 / r_1).decompose())\n", + " phi = np.linspace(-np.pi * u.rad + alpha, np.pi * u.rad - alpha, 2000)\n", " # Quadratic formula to find r\n", - " a = 1.\n", - " b = -2*(r_1.to(u.cm)*np.cos(phi.to(u.radian)))\n", - " c = r_1.to(u.cm)**2 - r_2.to(u.cm)**2\n", - " r = (-b + np.sqrt(b**2 - 4*a*c))/2/a\n", + " a = 1.0\n", + " b = -2 * (r_1.to(u.cm) * np.cos(phi.to(u.radian)))\n", + " c = r_1.to(u.cm) ** 2 - r_2.to(u.cm) ** 2\n", + " r = (-b + np.sqrt(b**2 - 4 * a * c)) / 2 / a\n", " # Choose only points above the surface\n", - " i_r = np.where(r>r_1)\n", + " i_r = np.where(r > r_1)\n", " r = r[i_r]\n", " phi = phi[i_r]\n", - " hcc_frame = Heliocentric(observer=SkyCoord(\n", - " lon=0*u.deg,lat=theta0,radius=r_1,frame='heliographic_stonyhurst'))\n", - " return (SkyCoord(x=r.to(u.cm)*np.sin(phi.to(u.radian)),\n", - " y=u.Quantity(r.shape[0]*[0*u.cm]),\n", - " z=r.to(u.cm)*np.cos(phi.to(u.radian)),\n", - " frame=hcc_frame)\n", - " .transform_to('heliographic_stonyhurst'))\n" + " hcc_frame = Heliocentric(observer=SkyCoord(lon=0 * u.deg, lat=theta0, radius=r_1, frame=\"heliographic_stonyhurst\"))\n", + " return SkyCoord(\n", + " x=r.to(u.cm) * np.sin(phi.to(u.radian)),\n", + " y=u.Quantity(r.shape[0] * [0 * u.cm]),\n", + " z=r.to(u.cm) * np.cos(phi.to(u.radian)),\n", + " frame=hcc_frame,\n", + " ).transform_to(\"heliographic_stonyhurst\")" ] }, { @@ -135,7 +133,7 @@ "metadata": {}, "outputs": [], "source": [ - "loop = semi_circular_loop(500*u.Mm,theta0=30*u.deg)\n" + "loop = semi_circular_loop(500 * u.Mm, theta0=30 * u.deg)" ] }, { @@ -157,7 +155,7 @@ } ], "source": [ - "loop[[0,-1]] # First and last points\n" + "loop[[0, -1]] # First and last points" ] }, { @@ -180,20 +178,31 @@ "metadata": {}, "outputs": [], "source": [ - "data = np.ones((10,10))\n", + "data = np.ones((10, 10))\n", "time_now = astropy.time.Time.now()\n", - "meta = MetaDict({\n", - " 'ctype1': 'HPLN-TAN','ctype2': 'HPLT-TAN',\n", - " 'cunit1': 'arcsec', 'cunit2': 'arcsec',\n", - " 'crpix1': (data.shape[0] + 1)/2., 'crpix2': (data.shape[1] + 1)/2.,\n", - " 'cdelt1': 1.0, 'cdelt2': 1.0, 'crval1': 0.0, 'crval2': 0.0,\n", - " 'hgln_obs': 0.0, 'hglt_obs': 0.0,\n", - " 'dsun_obs': const.au.to(u.m).value, 'dsun_ref': const.au.to(u.m).value,\n", - " 'rsun_ref': const.R_sun.to(u.m).value,\n", - " 'rsun_obs': ((const.R_sun/const.au).decompose()*u.radian).to(u.arcsec).value,\n", - " 't_obs': time_now.iso, 'date-obs': time_now.iso,\n", - "})\n", - "dummy_map = GenericMap(data,meta)\n" + "meta = MetaDict(\n", + " {\n", + " \"ctype1\": \"HPLN-TAN\",\n", + " \"ctype2\": \"HPLT-TAN\",\n", + " \"cunit1\": \"arcsec\",\n", + " \"cunit2\": \"arcsec\",\n", + " \"crpix1\": (data.shape[0] + 1) / 2.0,\n", + " \"crpix2\": (data.shape[1] + 1) / 2.0,\n", + " \"cdelt1\": 1.0,\n", + " \"cdelt2\": 1.0,\n", + " \"crval1\": 0.0,\n", + " \"crval2\": 0.0,\n", + " \"hgln_obs\": 0.0,\n", + " \"hglt_obs\": 0.0,\n", + " \"dsun_obs\": const.au.to(u.m).value,\n", + " \"dsun_ref\": const.au.to(u.m).value,\n", + " \"rsun_ref\": const.R_sun.to(u.m).value,\n", + " \"rsun_obs\": ((const.R_sun / const.au).decompose() * u.radian).to(u.arcsec).value,\n", + " \"t_obs\": time_now.iso,\n", + " \"date-obs\": time_now.iso,\n", + " }\n", + ")\n", + "dummy_map = GenericMap(data, meta)" ] }, { @@ -225,11 +234,11 @@ } ], "source": [ - "fig = plt.figure(figsize=(10,10))\n", + "fig = plt.figure(figsize=(10, 10))\n", "ax = fig.gca(projection=dummy_map)\n", - "dummy_map.plot(alpha=0,extent=[-1000,1000,-1000,1000],title=False)\n", - "ax.plot_coord(loop.transform_to(dummy_map.coordinate_frame),color='C0',lw=2)\n", - "dummy_map.draw_grid(grid_spacing=10*u.deg,color='k',axes=ax)\n" + "dummy_map.plot(alpha=0, extent=[-1000, 1000, -1000, 1000], title=False)\n", + "ax.plot_coord(loop.transform_to(dummy_map.coordinate_frame), color=\"C0\", lw=2)\n", + "dummy_map.draw_grid(grid_spacing=10 * u.deg, color=\"k\", axes=ax)" ] }, { @@ -270,14 +279,13 @@ "metadata": {}, "outputs": [], "source": [ - "def isothermal_density(loop, length, n0=1e12*u.cm**(-3), T=1*u.MK,):\n", - " s = np.linspace(0*length.unit,length,loop.radius.shape[0]).to(u.cm)\n", + "def isothermal_density(loop, length, n0=1e12 * u.cm ** (-3), t=1 * u.MK):\n", + " s = np.linspace(0 * length.unit, length, loop.radius.shape[0]).to(u.cm)\n", " r = loop.radius.to(u.cm)\n", - " lambda_p = 2*const.k_B*T/(const.m_p*sun_const.surface_gravity)\n", - " integral = (np.gradient(r.value,(s[1]-s[0]).value)\n", - " /(r.value**2)*np.gradient(s.value)).cumsum()/u.cm\n", - " exp_term = (const.R_sun**2)/lambda_p*integral\n", - " return n0*np.exp(-exp_term)\n" + " lambda_p = 2 * const.k_B * t / (const.m_p * sun_const.surface_gravity)\n", + " integral = (np.gradient(r.value, (s[1] - s[0]).value) / (r.value**2) * np.gradient(s.value)).cumsum() / u.cm\n", + " exp_term = (const.R_sun**2) / lambda_p * integral\n", + " return n0 * np.exp(-exp_term)" ] }, { @@ -293,7 +301,7 @@ "metadata": {}, "outputs": [], "source": [ - "density = isothermal_density(loop,500*u.Mm,T=1*u.MK,n0=1e11/(u.cm**3))\n" + "density = isothermal_density(loop, 500 * u.Mm, T=1 * u.MK, n0=1e11 / (u.cm**3))" ] }, { @@ -331,10 +339,7 @@ "metadata": {}, "outputs": [], "source": [ - "observer = SkyCoord(lon=0*u.deg,\n", - " lat=0*u.deg,\n", - " radius=const.au,\n", - " frame='heliographic_stonyhurst')\n" + "observer = SkyCoord(lon=0 * u.deg, lat=0 * u.deg, radius=const.au, frame=\"heliographic_stonyhurst\")" ] }, { @@ -350,7 +355,7 @@ "metadata": {}, "outputs": [], "source": [ - "coords = loop.transform_to(Helioprojective(observer=observer))\n" + "coords = loop.transform_to(Helioprojective(observer=observer))" ] }, { @@ -366,14 +371,14 @@ "metadata": {}, "outputs": [], "source": [ - "res_x,res_y = 5*u.arcsec/u.pixel,5*u.arcsec/u.pixel\n", - "pad_x,pad_y = res_x*5*u.pixel,res_y*5*u.pixel\n", - "min_x,max_x = coords.Tx.min()-pad_x,coords.Tx.max()+pad_x\n", - "min_y,max_y = coords.Ty.min()-pad_y,coords.Ty.max()+pad_y\n", - "min_z,max_z = coords.distance.min(),coords.distance.max()\n", + "res_x, res_y = 5 * u.arcsec / u.pixel, 5 * u.arcsec / u.pixel\n", + "pad_x, pad_y = res_x * 5 * u.pixel, res_y * 5 * u.pixel\n", + "min_x, max_x = coords.Tx.min() - pad_x, coords.Tx.max() + pad_x\n", + "min_y, max_y = coords.Ty.min() - pad_y, coords.Ty.max() + pad_y\n", + "min_z, max_z = coords.distance.min(), coords.distance.max()\n", "bins_x = np.ceil((max_x - min_x) / res_x)\n", "bins_y = np.ceil((max_y - min_y) / res_y)\n", - "bins_z = max(bins_x, bins_y)\n" + "bins_z = max(bins_x, bins_y)" ] }, { @@ -389,8 +394,8 @@ "metadata": {}, "outputs": [], "source": [ - "dz = (max_z - min_z).cgs / bins_z * (1. * u.pixel)\n", - "em = density**2 * dz.value\n" + "dz = (max_z - min_z).cgs / bins_z * (1.0 * u.pixel)\n", + "em = density**2 * dz.value" ] }, { @@ -406,8 +411,7 @@ "metadata": {}, "outputs": [], "source": [ - "rsun_obs = ((const.R_sun / (observer.radius - const.R_sun))\n", - " .decompose()* u.radian).to(u.arcsec)\n" + "rsun_obs = ((const.R_sun / (observer.radius - const.R_sun)).decompose() * u.radian).to(u.arcsec)" ] }, { @@ -424,8 +428,8 @@ "outputs": [], "source": [ "off_disk = np.sqrt(coords.Tx**2 + coords.Ty**2) > rsun_obs\n", - "in_front_of_disk = coords.distance - observer.radius < 0.\n", - "mask = np.any(np.stack([off_disk, in_front_of_disk], axis=1), axis=1)\n" + "in_front_of_disk = coords.distance - observer.radius < 0.0\n", + "mask = np.any(np.stack([off_disk, in_front_of_disk], axis=1), axis=1)" ] }, { @@ -434,7 +438,7 @@ "metadata": {}, "outputs": [], "source": [ - "weights = em * mask\n" + "weights = em * mask" ] }, { @@ -451,10 +455,12 @@ "outputs": [], "source": [ "hist, _, _ = np.histogram2d(\n", - " coords.Tx.value, coords.Ty.value,\n", + " coords.Tx.value,\n", + " coords.Ty.value,\n", " bins=(bins_x.value, bins_y.value),\n", - " range=((min_x.value,max_x.value), (min_y.value,max_y.value)),\n", - " weights=weights)\n" + " range=((min_x.value, max_x.value), (min_y.value, max_y.value)),\n", + " weights=weights,\n", + ")" ] }, { @@ -470,7 +476,7 @@ "metadata": {}, "outputs": [], "source": [ - "em_hist = gaussian_filter(hist.T, (1.0, 1.0))\n" + "em_hist = gaussian_filter(hist.T, (1.0, 1.0))" ] }, { @@ -486,23 +492,24 @@ "metadata": {}, "outputs": [], "source": [ - "header = MetaDict({\n", - " 'crval1': (min_x + (max_x - min_x)/2).value,\n", - " 'crval2': (min_y + (max_y - min_y)/2).value,\n", - " 'cunit1': coords.Tx.unit.to_string(),\n", - " 'cunit2': coords.Ty.unit.to_string(),\n", - " 'hglt_obs': observer.lat.to(u.deg).value,\n", - " 'hgln_obs': observer.lon.to(u.deg).value,\n", - " 'ctype1': 'HPLN-TAN',\n", - " 'ctype2': 'HPLT-TAN',\n", - " 'dsun_obs': observer.radius.to(u.m).value,\n", - " 'rsun_obs': ((const.R_sun / (observer.radius - const.R_sun))\n", - " .decompose() * u.radian).to(u.arcsec).value,\n", - " 'cdelt1': res_x.value,\n", - " 'cdelt2': res_y.value,\n", - " 'crpix1': (bins_x.value + 1.0)/2.0,\n", - " 'crpix2': (bins_y.value + 1.0)/2.0,\n", - "})\n" + "header = MetaDict(\n", + " {\n", + " \"crval1\": (min_x + (max_x - min_x) / 2).value,\n", + " \"crval2\": (min_y + (max_y - min_y) / 2).value,\n", + " \"cunit1\": coords.Tx.unit.to_string(),\n", + " \"cunit2\": coords.Ty.unit.to_string(),\n", + " \"hglt_obs\": observer.lat.to(u.deg).value,\n", + " \"hgln_obs\": observer.lon.to(u.deg).value,\n", + " \"ctype1\": \"HPLN-TAN\",\n", + " \"ctype2\": \"HPLT-TAN\",\n", + " \"dsun_obs\": observer.radius.to(u.m).value,\n", + " \"rsun_obs\": ((const.R_sun / (observer.radius - const.R_sun)).decompose() * u.radian).to(u.arcsec).value,\n", + " \"cdelt1\": res_x.value,\n", + " \"cdelt2\": res_y.value,\n", + " \"crpix1\": (bins_x.value + 1.0) / 2.0,\n", + " \"crpix2\": (bins_y.value + 1.0) / 2.0,\n", + " }\n", + ")" ] }, { @@ -511,7 +518,7 @@ "metadata": {}, "outputs": [], "source": [ - "em_map = GenericMap(em_hist,header)\n" + "em_map = GenericMap(em_hist, header)" ] }, { @@ -550,17 +557,19 @@ } ], "source": [ - "fig = plt.figure(figsize=(15,10))\n", + "fig = plt.figure(figsize=(15, 10))\n", "ax = fig.gca(projection=em_map)\n", - "im = em_map.plot(cmap='magma',title=False,\n", - " norm=matplotlib.colors.SymLogNorm(1,vmin=1e27,vmax=5e29),\n", - " )\n", - "ax.plot_coord(SkyCoord(-300*u.arcsec,300*u.arcsec,frame=em_map.coordinate_frame),alpha=0)\n", - "ax.plot_coord(SkyCoord(300*u.arcsec,900*u.arcsec,frame=em_map.coordinate_frame),alpha=0)\n", - "em_map.draw_grid(grid_spacing=10*u.deg,color='w',axes=ax)\n", + "im = em_map.plot(\n", + " cmap=\"magma\",\n", + " title=False,\n", + " norm=matplotlib.colors.SymLogNorm(1, vmin=1e27, vmax=5e29),\n", + ")\n", + "ax.plot_coord(SkyCoord(-300 * u.arcsec, 300 * u.arcsec, frame=em_map.coordinate_frame), alpha=0)\n", + "ax.plot_coord(SkyCoord(300 * u.arcsec, 900 * u.arcsec, frame=em_map.coordinate_frame), alpha=0)\n", + "em_map.draw_grid(grid_spacing=10 * u.deg, color=\"w\", axes=ax)\n", "ax.grid(alpha=0)\n", - "ax.set_facecolor('k')\n", - "fig.colorbar(im,ax=ax)\n" + "ax.set_facecolor(\"k\")\n", + "fig.colorbar(im, ax=ax)" ] }, { @@ -579,28 +588,34 @@ "metadata": {}, "outputs": [], "source": [ - "def loop_arcade(n_loops, length_min=10*u.Mm, length_max=100*u.Mm, theta_min=-10*u.deg,theta_max=10*u.deg):\n", + "def loop_arcade(n_loops, length_min=10 * u.Mm, length_max=100 * u.Mm, theta_min=-10 * u.deg, theta_max=10 * u.deg):\n", " # Generate loops\n", - " x = np.random.rand(n_loops)\n", + " rnd_gen = np.random.Generator(np.random.PCG64())\n", + " x = rnd_gen.random(n_loops)\n", " alpha = -1.5\n", - " lengths = ((length_max**(alpha + 1.) - length_min**(alpha + 1.))*x\n", - " + length_min**(alpha + 1.))**(1./(alpha + 1.))\n", + " lengths = ((length_max ** (alpha + 1.0) - length_min ** (alpha + 1.0)) * x + length_min ** (alpha + 1.0)) ** (\n", + " 1.0 / (alpha + 1.0)\n", + " )\n", " thetas = np.linspace(theta_min, theta_max, n_loops)\n", - " loops = [semi_circular_loop(l,theta0=th,) for l,th in zip(lengths,thetas)]\n", + " loops = [semi_circular_loop(l, theta0=th) for l, th in zip(lengths, thetas)]\n", " # Get densities\n", " ## Choose heating rate, get T from RTV scaling laws\n", - " E = 1e-4*u.erg/(u.cm**3)/u.s\n", - " T = (1.83e3)*(E.value/5.09e4*(lengths.to(u.cm).value**2))**(2/7) * u.K\n", - " density = np.hstack([isothermal_density(loop, length, T=t.to(u.MK),n0=1e11/(u.cm**3))\n", - " for loop,length,t in zip(loops,lengths,T)])\n", - " density = u.Quantity(density.value,'cm^-3')\n", + " e = 1e-4 * u.erg / (u.cm**3) / u.s\n", + " t = (1.83e3) * (e.value / 5.09e4 * (lengths.to(u.cm).value ** 2)) ** (2 / 7) * u.K\n", + " density = np.hstack(\n", + " [\n", + " isothermal_density(loop, length, t=t.to(u.MK), n0=1e11 / (u.cm**3))\n", + " for loop, length, t in zip(loops, lengths, t)\n", + " ]\n", + " )\n", + " density = u.Quantity(density.value, \"cm^-3\")\n", " # Stack coordinates\n", - " lon = u.Quantity(np.hstack([l.lon.value for l in loops]),loops[0].lon.unit)\n", - " lat = u.Quantity(np.hstack([l.lat.value for l in loops]),loops[0].lat.unit)\n", - " radius = u.Quantity(np.hstack([l.radius.value for l in loops]),loops[0].radius.unit)\n", - " coords = SkyCoord(lon=lon,lat=lat,radius=radius,frame=loops[0].frame)\n", + " lon = u.Quantity(np.hstack([l.lon.value for l in loops]), loops[0].lon.unit)\n", + " lat = u.Quantity(np.hstack([l.lat.value for l in loops]), loops[0].lat.unit)\n", + " radius = u.Quantity(np.hstack([l.radius.value for l in loops]), loops[0].radius.unit)\n", + " coords = SkyCoord(lon=lon, lat=lat, radius=radius, frame=loops[0].frame)\n", "\n", - " return coords,density\n" + " return coords, density" ] }, { @@ -609,8 +624,9 @@ "metadata": {}, "outputs": [], "source": [ - "coords,densities = loop_arcade(1000,length_min=50*u.Mm,length_max=500*u.Mm,\n", - " theta_min=-10*u.deg,theta_max=10*u.deg,)\n" + "coords, densities = loop_arcade(\n", + " 1000, length_min=50 * u.Mm, length_max=500 * u.Mm, theta_min=-10 * u.deg, theta_max=10 * u.deg\n", + ")" ] }, { @@ -626,54 +642,55 @@ "metadata": {}, "outputs": [], "source": [ - "def arcade_to_map(coords,densities,observer):\n", + "def arcade_to_map(coords, densities, observer):\n", " coords = coords.transform_to(Helioprojective(observer=observer))\n", " # Setup Bins\n", - " res_x,res_y = 5*u.arcsec/u.pixel,5*u.arcsec/u.pixel\n", - " pad_x,pad_y = res_x*5*u.pixel,res_y*5*u.pixel\n", - " min_x,max_x = coords.Tx.min()-pad_x,coords.Tx.max()+pad_x\n", - " min_y,max_y = coords.Ty.min()-pad_y,coords.Ty.max()+pad_y\n", - " min_z,max_z = coords.distance.min(),coords.distance.max()\n", + " res_x, res_y = 5 * u.arcsec / u.pixel, 5 * u.arcsec / u.pixel\n", + " pad_x, pad_y = res_x * 5 * u.pixel, res_y * 5 * u.pixel\n", + " min_x, max_x = coords.Tx.min() - pad_x, coords.Tx.max() + pad_x\n", + " min_y, max_y = coords.Ty.min() - pad_y, coords.Ty.max() + pad_y\n", + " min_z, max_z = coords.distance.min(), coords.distance.max()\n", " bins_x = np.ceil((max_x - min_x) / res_x)\n", " bins_y = np.ceil((max_y - min_y) / res_y)\n", " bins_z = max(bins_x, bins_y)\n", " # Compute Weights\n", - " dz = (max_z - min_z).cgs / bins_z * (1. * u.pixel)\n", + " dz = (max_z - min_z).cgs / bins_z * (1.0 * u.pixel)\n", " em = densities**2 * dz.value\n", - " rsun_obs = ((const.R_sun / (observer.radius - const.R_sun))\n", - " .decompose()* u.radian).to(u.arcsec)\n", + " rsun_obs = ((const.R_sun / (observer.radius - const.R_sun)).decompose() * u.radian).to(u.arcsec)\n", " off_disk = np.sqrt(coords.Tx**2 + coords.Ty**2) > rsun_obs\n", - " in_front_of_disk = coords.distance - observer.radius < 0.\n", + " in_front_of_disk = coords.distance - observer.radius < 0.0\n", " mask = np.any(np.stack([off_disk, in_front_of_disk], axis=1), axis=1)\n", " weights = em * mask\n", " # Bin values\n", " hist, _, _ = np.histogram2d(\n", - " coords.Tx.value, coords.Ty.value,\n", + " coords.Tx.value,\n", + " coords.Ty.value,\n", " bins=(bins_x.value, bins_y.value),\n", - " range=((min_x.value,max_x.value), (min_y.value,max_y.value)),\n", - " weights=weights)\n", + " range=((min_x.value, max_x.value), (min_y.value, max_y.value)),\n", + " weights=weights,\n", + " )\n", " hist = gaussian_filter(hist.T, (1.0, 1.0))\n", " # Make header\n", - " header = MetaDict({\n", - " 'crval1': (min_x + (max_x - min_x)/2).value,\n", - " 'crval2': (min_y + (max_y - min_y)/2).value,\n", - " 'cunit1': coords.Tx.unit.to_string(),\n", - " 'cunit2': coords.Ty.unit.to_string(),\n", - " 'hglt_obs': observer.lat.to(u.deg).value,\n", - " 'hgln_obs': observer.lon.to(u.deg).value,\n", - " 'ctype1': 'HPLN-TAN',\n", - " 'ctype2': 'HPLT-TAN',\n", - " 'dsun_obs': observer.radius.to(u.m).value,\n", - " 'rsun_obs': ((const.R_sun / (observer.radius - const.R_sun))\n", - " .decompose() * u.radian).to(u.arcsec).value,\n", - " 'cdelt1': res_x.value,\n", - " 'cdelt2': res_y.value,\n", - " 'crpix1': (bins_x.value + 1.0)/2.0,\n", - " 'crpix2': (bins_y.value + 1.0)/2.0,\n", - " })\n", - " plot_settings = {'cmap': 'magma','title':False,\n", - " 'norm': matplotlib.colors.SymLogNorm(1,vmin=5e29,vmax=2e32)}\n", - " return GenericMap(hist,header,plot_settings=plot_settings)\n" + " header = MetaDict(\n", + " {\n", + " \"crval1\": (min_x + (max_x - min_x) / 2).value,\n", + " \"crval2\": (min_y + (max_y - min_y) / 2).value,\n", + " \"cunit1\": coords.Tx.unit.to_string(),\n", + " \"cunit2\": coords.Ty.unit.to_string(),\n", + " \"hglt_obs\": observer.lat.to(u.deg).value,\n", + " \"hgln_obs\": observer.lon.to(u.deg).value,\n", + " \"ctype1\": \"HPLN-TAN\",\n", + " \"ctype2\": \"HPLT-TAN\",\n", + " \"dsun_obs\": observer.radius.to(u.m).value,\n", + " \"rsun_obs\": ((const.R_sun / (observer.radius - const.R_sun)).decompose() * u.radian).to(u.arcsec).value,\n", + " \"cdelt1\": res_x.value,\n", + " \"cdelt2\": res_y.value,\n", + " \"crpix1\": (bins_x.value + 1.0) / 2.0,\n", + " \"crpix2\": (bins_y.value + 1.0) / 2.0,\n", + " }\n", + " )\n", + " plot_settings = {\"cmap\": \"magma\", \"title\": False, \"norm\": matplotlib.colors.SymLogNorm(1, vmin=5e29, vmax=2e32)}\n", + " return GenericMap(hist, header, plot_settings=plot_settings)" ] }, { @@ -689,7 +706,7 @@ "metadata": {}, "outputs": [], "source": [ - "arcade_map = arcade_to_map(coords,densities,observer)\n" + "arcade_map = arcade_to_map(coords, densities, observer)" ] }, { @@ -721,15 +738,15 @@ } ], "source": [ - "fig = plt.figure(figsize=(15,10))\n", + "fig = plt.figure(figsize=(15, 10))\n", "ax = fig.gca(projection=arcade_map)\n", "im = arcade_map.plot()\n", - "ax.plot_coord(SkyCoord(-900*u.arcsec,-900*u.arcsec,frame=arcade_map.coordinate_frame),alpha=0)\n", - "ax.plot_coord(SkyCoord(900*u.arcsec,900*u.arcsec,frame=arcade_map.coordinate_frame),alpha=0)\n", - "em_map.draw_grid(grid_spacing=10*u.deg,color='w',axes=ax)\n", + "ax.plot_coord(SkyCoord(-900 * u.arcsec, -900 * u.arcsec, frame=arcade_map.coordinate_frame), alpha=0)\n", + "ax.plot_coord(SkyCoord(900 * u.arcsec, 900 * u.arcsec, frame=arcade_map.coordinate_frame), alpha=0)\n", + "em_map.draw_grid(grid_spacing=10 * u.deg, color=\"w\", axes=ax)\n", "ax.grid(alpha=0)\n", - "ax.set_facecolor('k')\n", - "fig.colorbar(im,ax=ax)\n" + "ax.set_facecolor(\"k\")\n", + "fig.colorbar(im, ax=ax)" ] }, { @@ -745,11 +762,8 @@ "metadata": {}, "outputs": [], "source": [ - "observer = SkyCoord(lon=-25*u.deg,\n", - " lat=-25*u.deg,\n", - " radius=const.au,\n", - " frame='heliographic_stonyhurst')\n", - "arcade_map = arcade_to_map(coords,densities,observer)\n" + "observer = SkyCoord(lon=-25 * u.deg, lat=-25 * u.deg, radius=const.au, frame=\"heliographic_stonyhurst\")\n", + "arcade_map = arcade_to_map(coords, densities, observer)" ] }, { @@ -781,17 +795,15 @@ } ], "source": [ - "fig = plt.figure(figsize=(15,10))\n", + "fig = plt.figure(figsize=(15, 10))\n", "ax = fig.gca(projection=arcade_map)\n", "im = arcade_map.plot()\n", - "arcade_map.draw_grid(grid_spacing=10*u.deg,color='w',axes=ax)\n", - "ax.plot_coord(SkyCoord(-900*u.arcsec,-900*u.arcsec,\n", - " frame=arcade_map.coordinate_frame),color='w',alpha=0)\n", - "ax.plot_coord(SkyCoord(900*u.arcsec,900*u.arcsec,\n", - " frame=arcade_map.coordinate_frame),color='w',alpha=0)\n", + "arcade_map.draw_grid(grid_spacing=10 * u.deg, color=\"w\", axes=ax)\n", + "ax.plot_coord(SkyCoord(-900 * u.arcsec, -900 * u.arcsec, frame=arcade_map.coordinate_frame), color=\"w\", alpha=0)\n", + "ax.plot_coord(SkyCoord(900 * u.arcsec, 900 * u.arcsec, frame=arcade_map.coordinate_frame), color=\"w\", alpha=0)\n", "ax.grid(alpha=0)\n", - "ax.set_facecolor('k')\n", - "fig.colorbar(im,ax=ax)\n" + "ax.set_facecolor(\"k\")\n", + "fig.colorbar(im, ax=ax)" ] }, { @@ -811,11 +823,8 @@ "metadata": {}, "outputs": [], "source": [ - "observer = SkyCoord(lon=0*u.deg,\n", - " lat=-90*u.deg,\n", - " radius=const.au,\n", - " frame='heliographic_stonyhurst')\n", - "arcade_map = arcade_to_map(coords,densities,observer)\n" + "observer = SkyCoord(lon=0 * u.deg, lat=-90 * u.deg, radius=const.au, frame=\"heliographic_stonyhurst\")\n", + "arcade_map = arcade_to_map(coords, densities, observer)" ] }, { @@ -847,17 +856,15 @@ } ], "source": [ - "fig = plt.figure(figsize=(18,10))\n", + "fig = plt.figure(figsize=(18, 10))\n", "ax = fig.gca(projection=arcade_map)\n", "im = arcade_map.plot()\n", - "arcade_map.draw_grid(grid_spacing=10*u.deg,color='w',axes=ax)\n", - "ax.plot_coord(SkyCoord(-450*u.arcsec,600*u.arcsec,\n", - " frame=arcade_map.coordinate_frame),color='w',alpha=0)\n", - "ax.plot_coord(SkyCoord(450*u.arcsec,1200*u.arcsec,\n", - " frame=arcade_map.coordinate_frame),color='w',alpha=0)\n", + "arcade_map.draw_grid(grid_spacing=10 * u.deg, color=\"w\", axes=ax)\n", + "ax.plot_coord(SkyCoord(-450 * u.arcsec, 600 * u.arcsec, frame=arcade_map.coordinate_frame), color=\"w\", alpha=0)\n", + "ax.plot_coord(SkyCoord(450 * u.arcsec, 1200 * u.arcsec, frame=arcade_map.coordinate_frame), color=\"w\", alpha=0)\n", "ax.grid(alpha=0)\n", - "ax.set_facecolor('k')\n", - "fig.colorbar(im,ax=ax)\n" + "ax.set_facecolor(\"k\")\n", + "fig.colorbar(im, ax=ax)" ] }, { @@ -873,11 +880,8 @@ "metadata": {}, "outputs": [], "source": [ - "observer = SkyCoord(lon=-90*u.deg,\n", - " lat=0*u.deg,\n", - " radius=const.au,\n", - " frame='heliographic_stonyhurst')\n", - "arcade_map = arcade_to_map(coords,densities,observer)\n" + "observer = SkyCoord(lon=-90 * u.deg, lat=0 * u.deg, radius=const.au, frame=\"heliographic_stonyhurst\")\n", + "arcade_map = arcade_to_map(coords, densities, observer)" ] }, { @@ -909,17 +913,15 @@ } ], "source": [ - "fig = plt.figure(figsize=(18,10))\n", + "fig = plt.figure(figsize=(18, 10))\n", "ax = fig.gca(projection=arcade_map)\n", "im = arcade_map.plot()\n", - "arcade_map.draw_grid(grid_spacing=10*u.deg,color='w',axes=ax)\n", - "ax.plot_coord(SkyCoord(700*u.arcsec,-300*u.arcsec,\n", - " frame=arcade_map.coordinate_frame),color='w',alpha=0)\n", - "ax.plot_coord(SkyCoord(1300*u.arcsec,300*u.arcsec,\n", - " frame=arcade_map.coordinate_frame),color='w',alpha=0)\n", + "arcade_map.draw_grid(grid_spacing=10 * u.deg, color=\"w\", axes=ax)\n", + "ax.plot_coord(SkyCoord(700 * u.arcsec, -300 * u.arcsec, frame=arcade_map.coordinate_frame), color=\"w\", alpha=0)\n", + "ax.plot_coord(SkyCoord(1300 * u.arcsec, 300 * u.arcsec, frame=arcade_map.coordinate_frame), color=\"w\", alpha=0)\n", "ax.grid(alpha=0)\n", - "ax.set_facecolor('k')\n", - "fig.colorbar(im,ax=ax)\n" + "ax.set_facecolor(\"k\")\n", + "fig.colorbar(im, ax=ax)" ] }, { diff --git a/posts/2018/2018-10-20-time.ipynb b/posts/2018/2018-10-20-time.ipynb index 90c2556a..07c6cf47 100644 --- a/posts/2018/2018-10-20-time.ipynb +++ b/posts/2018/2018-10-20-time.ipynb @@ -57,8 +57,9 @@ ], "source": [ "from astropy.time import Time\n", - "t = Time('2012-06-18T02:00:05.453', scale='tai')\n", - "t\n" + "\n", + "t = Time(\"2012-06-18T02:00:05.453\", scale=\"tai\")\n", + "t" ] }, { @@ -85,7 +86,7 @@ } ], "source": [ - "t.utc\n" + "t.utc" ] }, { @@ -115,7 +116,7 @@ ], "source": [ "t.precision = 9\n", - "t\n" + "t" ] }, { @@ -148,7 +149,8 @@ ], "source": [ "import astropy.units as u\n", - "Time('2016-12-31T23:59:60'),Time('2016-12-31T23:59:59') + 1 * u.s\n" + "\n", + "Time(\"2016-12-31T23:59:60\"), Time(\"2016-12-31T23:59:59\") + 1 * u.s" ] }, { @@ -193,7 +195,7 @@ } ], "source": [ - "Time.FORMATS\n" + "Time.FORMATS" ] }, { @@ -214,7 +216,8 @@ ], "source": [ "import datetime\n", - "Time(datetime.datetime.now())\n" + "\n", + "Time(datetime.datetime.now(tz=datetime.timezone.utc))" ] }, { diff --git a/posts/2022/2022-01-06-aiapy-demo.ipynb b/posts/2022/2022-01-06-aiapy-demo.ipynb index 6a3afb76..a3156664 100644 --- a/posts/2022/2022-01-06-aiapy-demo.ipynb +++ b/posts/2022/2022-01-06-aiapy-demo.ipynb @@ -28,29 +28,37 @@ "metadata": {}, "outputs": [], "source": [ + "import aiapy\n", "import astropy\n", - "import astropy.units as u\n", - "from astropy.coordinates import SkyCoord\n", "import astropy.time\n", - "from astropy.visualization import time_support, ImageNormalize, LogStretch\n", - "import numpy as np\n", + "import astropy.units as u\n", + "import matplotlib as mpl\n", "import matplotlib.pyplot as plt\n", + "import numpy as np\n", "import sunpy\n", "import sunpy.map\n", - "from sunpy.net import Fido, attrs as a\n", - "from sunpy.time import parse_time\n", - "\n", - "import aiapy\n", - "from aiapy.psf import psf, deconvolve\n", - "from aiapy.calibrate import (register,update_pointing,correct_degradation, estimate_error,\n", - " degradation,normalize_exposure, respike, fetch_spikes)\n", + "from aiapy.calibrate import (\n", + " correct_degradation,\n", + " degradation,\n", + " estimate_error,\n", + " fetch_spikes,\n", + " normalize_exposure,\n", + " register,\n", + " respike,\n", + " update_pointing,\n", + ")\n", "from aiapy.calibrate.util import get_correction_table\n", + "from aiapy.psf import deconvolve, psf\n", "from aiapy.response import Channel\n", + "from astropy.coordinates import SkyCoord\n", + "from astropy.visualization import ImageNormalize, LogStretch, time_support\n", + "from sunpy.net import Fido\n", + "from sunpy.net import attrs as a\n", + "from sunpy.time import parse_time\n", "\n", - "import matplotlib as mpl\n", "# Increases the figure size in this notebook.\n", "mpl.rcParams[\"savefig.dpi\"] = 150\n", - "mpl.rcParams[\"figure.dpi\"] = 150\n" + "mpl.rcParams[\"figure.dpi\"] = 150" ] }, { @@ -95,9 +103,9 @@ } ], "source": [ - "print(f'astropy v{astropy.__version__}')\n", - "print(f'sunpy v{sunpy.__version__}')\n", - "print(f'aiapy v{aiapy.__version__}')\n" + "print(f\"astropy v{astropy.__version__}\")\n", + "print(f\"sunpy v{sunpy.__version__}\")\n", + "print(f\"aiapy v{aiapy.__version__}\")" ] }, { @@ -163,13 +171,13 @@ } ], "source": [ - "t_start = parse_time('2017-09-10T20:00:00')\n", + "t_start = parse_time(\"2017-09-10T20:00:00\")\n", "search_results = Fido.search(\n", - " a.Time(t_start, t_start+11*u.s),\n", + " a.Time(t_start, t_start + 11 * u.s),\n", " a.Instrument.aia,\n", - " a.Wavelength(171*u.angstrom) | a.Wavelength(335*u.angstrom),\n", + " a.Wavelength(171 * u.angstrom) | a.Wavelength(335 * u.angstrom),\n", ")\n", - "search_results\n" + "search_results" ] }, { @@ -186,7 +194,7 @@ } ], "source": [ - "files = Fido.fetch(search_results, max_conn=1)\n" + "files = Fido.fetch(search_results, max_conn=1)" ] }, { @@ -202,7 +210,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171, m_335 = sunpy.map.Map(sorted(files))\n" + "m_171, m_335 = sunpy.map.Map(sorted(files))" ] }, { @@ -237,7 +245,7 @@ ], "source": [ "m_171.peek(vmin=0)\n", - "m_335.peek(vmin=0)\n" + "m_335.peek(vmin=0)" ] }, { @@ -293,7 +301,7 @@ "metadata": {}, "outputs": [], "source": [ - "psf_171 = psf(m_171.wavelength)\n" + "psf_171 = psf(m_171.wavelength)" ] }, { @@ -332,8 +340,8 @@ } ], "source": [ - "plt.imshow(psf_171, origin='lower', norm=ImageNormalize(vmax=1e-6, stretch=LogStretch()))\n", - "plt.colorbar()\n" + "plt.imshow(psf_171, origin=\"lower\", norm=ImageNormalize(vmax=1e-6, stretch=LogStretch()))\n", + "plt.colorbar()" ] }, { @@ -356,7 +364,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171_deconvolved = deconvolve(m_171, psf=psf_171)\n" + "m_171_deconvolved = deconvolve(m_171, psf=psf_171)" ] }, { @@ -372,10 +380,10 @@ "metadata": {}, "outputs": [], "source": [ - "blc = SkyCoord(750,-375,unit='arcsec',frame=m_171.coordinate_frame)\n", - "fov = {'width': 400*u.arcsec, 'height': 400*u.arcsec}\n", + "blc = SkyCoord(750, -375, unit=\"arcsec\", frame=m_171.coordinate_frame)\n", + "fov = {\"width\": 400 * u.arcsec, \"height\": 400 * u.arcsec}\n", "m_171_cutout = m_171.submap(blc, **fov)\n", - "m_171_deconvolved_cutout = m_171_deconvolved.submap(blc, **fov)\n" + "m_171_deconvolved_cutout = m_171_deconvolved.submap(blc, **fov)" ] }, { @@ -397,12 +405,12 @@ } ], "source": [ - "fig = plt.figure(figsize=(7,3))\n", + "fig = plt.figure(figsize=(7, 3))\n", "ax = fig.add_subplot(121, projection=m_171_cutout)\n", - "m_171_cutout.plot(axes=ax, title='Before Deconvolution')\n", + "m_171_cutout.plot(axes=ax, title=\"Before Deconvolution\")\n", "ax = fig.add_subplot(122, projection=m_171_deconvolved_cutout)\n", - "m_171_deconvolved_cutout.plot(axes=ax, title='After Deconvolution')\n", - "ax.coords[1].set_axislabel(' ')\n" + "m_171_deconvolved_cutout.plot(axes=ax, title=\"After Deconvolution\")\n", + "ax.coords[1].set_axislabel(\" \")" ] }, { @@ -420,9 +428,9 @@ "metadata": {}, "outputs": [], "source": [ - "x = np.linspace(m_171_deconvolved_cutout.dimensions.x.value*0.55, m_171_deconvolved_cutout.dimensions.x.value*0.7)\n", + "x = np.linspace(m_171_deconvolved_cutout.dimensions.x.value * 0.55, m_171_deconvolved_cutout.dimensions.x.value * 0.7)\n", "y = 0.59 * m_171_deconvolved_cutout.dimensions.y.value * np.ones(x.shape)\n", - "sl = np.s_[np.round(y[0]).astype(int), np.round(x[0]).astype(int):np.round(x[-1]).astype(int)]\n" + "sl = np.s_[np.round(y[0]).astype(int), np.round(x[0]).astype(int) : np.round(x[-1]).astype(int)]" ] }, { @@ -444,18 +452,18 @@ } ], "source": [ - "fig = plt.figure(figsize=(7,3))\n", + "fig = plt.figure(figsize=(7, 3))\n", "ax = fig.add_subplot(121, projection=m_171_deconvolved_cutout)\n", "m_171_deconvolved_cutout.plot(axes=ax)\n", "ax.plot(x, y, lw=1)\n", "ax = fig.add_subplot(122)\n", "Tx = sunpy.map.all_coordinates_from_map(m_171_cutout)[sl].Tx\n", - "ax.plot(Tx,m_171_cutout.data[sl], label='Original')\n", - "ax.plot(Tx,m_171_deconvolved_cutout.data[sl], label='Deconvolved')\n", - "ax.set_ylabel(f'Intensity [{m_171_cutout.unit}]')\n", - "ax.set_xlabel(r'Helioprojective Longitude [arcsec]')\n", - "ax.legend(loc='upper center', ncol=2, frameon=False, bbox_to_anchor=(0.5,1.15))\n", - "plt.tight_layout()\n" + "ax.plot(Tx, m_171_cutout.data[sl], label=\"Original\")\n", + "ax.plot(Tx, m_171_deconvolved_cutout.data[sl], label=\"Deconvolved\")\n", + "ax.set_ylabel(f\"Intensity [{m_171_cutout.unit}]\")\n", + "ax.set_xlabel(r\"Helioprojective Longitude [arcsec]\")\n", + "ax.legend(loc=\"upper center\", ncol=2, frameon=False, bbox_to_anchor=(0.5, 1.15))\n", + "plt.tight_layout()" ] }, { @@ -490,7 +498,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171_respiked = respike(m_171)\n" + "m_171_respiked = respike(m_171)" ] }, { @@ -519,7 +527,7 @@ "ax = fig.add_subplot(122, projection=m_171_respiked)\n", "m_171_respiked.plot(axes=ax)\n", "ax.set_title(f\"Respiked (Level {m_171_respiked.processing_level})\")\n", - "ax.coords[1].set_axislabel(' ')\n" + "ax.coords[1].set_axislabel(\" \")" ] }, { @@ -536,7 +544,7 @@ "metadata": {}, "outputs": [], "source": [ - "pix_coords, vals = fetch_spikes(m_171,)\n" + "pix_coords, vals = fetch_spikes(m_171)" ] }, { @@ -552,7 +560,7 @@ "metadata": {}, "outputs": [], "source": [ - "vals_despiked = m_171.data[pix_coords.y.value.round().astype(int), pix_coords.x.value.round().astype(int)]\n" + "vals_despiked = m_171.data[pix_coords.y.value.round().astype(int), pix_coords.x.value.round().astype(int)]" ] }, { @@ -584,11 +592,11 @@ } ], "source": [ - "plt.hist(vals, bins='scott', log=True, histtype='step', label='Respiked');\n", - "plt.hist(vals_despiked, bins='scott', log=True, histtype='step', label='Despiked');\n", + "plt.hist(vals, bins=\"scott\", log=True, histtype=\"step\", label=\"Respiked\")\n", + "plt.hist(vals_despiked, bins=\"scott\", log=True, histtype=\"step\", label=\"Despiked\")\n", "plt.legend()\n", - "plt.xlabel(f'Intensity [{m_171.unit.to_string()}]')\n", - "plt.ylabel('Number of Pixels')\n" + "plt.xlabel(f\"Intensity [{m_171.unit.to_string()}]\")\n", + "plt.ylabel(\"Number of Pixels\")" ] }, { @@ -606,7 +614,7 @@ "metadata": {}, "outputs": [], "source": [ - "spike_coords = m_171.pixel_to_world(pix_coords.x, pix_coords.y)\n" + "spike_coords = m_171.pixel_to_world(pix_coords.x, pix_coords.y)" ] }, { @@ -641,7 +649,7 @@ "fig = plt.figure()\n", "ax = fig.add_subplot(111, projection=m_171_respiked)\n", "m_171_respiked.plot(axes=ax)\n", - "ax.plot_coord(spike_coords, marker='.', ls=' ', markersize=1)\n" + "ax.plot_coord(spike_coords, marker=\".\", ls=\" \", markersize=1)" ] }, { @@ -679,7 +687,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171_up = update_pointing(m_171)\n" + "m_171_up = update_pointing(m_171)" ] }, { @@ -706,7 +714,7 @@ } ], "source": [ - "m_171.reference_pixel\n" + "m_171.reference_pixel" ] }, { @@ -726,7 +734,7 @@ } ], "source": [ - "m_171_up.reference_pixel\n" + "m_171_up.reference_pixel" ] }, { @@ -752,7 +760,7 @@ } ], "source": [ - "m_171_L15 = register(m_171_up)\n" + "m_171_l15 = register(m_171_up)" ] }, { @@ -780,7 +788,7 @@ ], "source": [ "print(m_171_up.scale)\n", - "print(m_171_up.rotation_matrix)\n" + "print(m_171_up.rotation_matrix)" ] }, { @@ -799,8 +807,8 @@ } ], "source": [ - "print(m_171_L15.scale)\n", - "print(m_171_L15.rotation_matrix)\n" + "print(m_171_l15.scale)\n", + "print(m_171_l15.rotation_matrix)" ] }, { @@ -817,7 +825,7 @@ "outputs": [], "source": [ "def prep(smap):\n", - " return register(update_pointing(smap))\n" + " return register(update_pointing(smap))" ] }, { @@ -834,7 +842,7 @@ } ], "source": [ - "m_335_L15 = prep(m_335)\n" + "m_335_l15 = prep(m_335)" ] }, { @@ -860,8 +868,8 @@ } ], "source": [ - "print(m_335_L15.scale)\n", - "print(m_335_L15.rotation_matrix)\n" + "print(m_335_l15.scale)\n", + "print(m_335_l15.rotation_matrix)" ] }, { @@ -907,9 +915,9 @@ "metadata": {}, "outputs": [], "source": [ - "t_begin = parse_time('2010-03-25T00:00:00')\n", + "t_begin = parse_time(\"2010-03-25T00:00:00\")\n", "now = astropy.time.Time.now()\n", - "time_window = t_begin + np.arange(0, (now - t_begin).to(u.day).value, 7) * u.day\n" + "time_window = t_begin + np.arange(0, (now - t_begin).to(u.day).value, 7) * u.day" ] }, { @@ -939,7 +947,7 @@ } ], "source": [ - "correction_table = get_correction_table()\n" + "correction_table = get_correction_table()" ] }, { @@ -955,7 +963,7 @@ "metadata": {}, "outputs": [], "source": [ - "d_335 = degradation(m_335.wavelength, time_window, correction_table=correction_table)\n" + "d_335 = degradation(m_335.wavelength, time_window, correction_table=correction_table)" ] }, { @@ -971,7 +979,7 @@ "metadata": {}, "outputs": [], "source": [ - "d_335_map = degradation(m_335.wavelength, m_335.date, correction_table=correction_table)\n" + "d_335_map = degradation(m_335.wavelength, m_335.date, correction_table=correction_table)" ] }, { @@ -990,7 +998,7 @@ "outputs": [], "source": [ "d_335_v9 = degradation(m_335.wavelength, time_window, calibration_version=9, correction_table=correction_table)\n", - "d_335_v8 = degradation(m_335.wavelength, time_window, calibration_version=8, correction_table=correction_table)\n" + "d_335_v8 = degradation(m_335.wavelength, time_window, calibration_version=8, correction_table=correction_table)" ] }, { @@ -1029,13 +1037,13 @@ } ], "source": [ - "with time_support(format='jyear'):\n", - " plt.plot(time_window, d_335, label='v10')\n", - " plt.plot(time_window, d_335_v9, label='v9')\n", - " plt.plot(time_window, d_335_v8, label='v8')\n", - " plt.plot(m_335.date, d_335_map, linestyle='', marker='o', color='C0', label=m_335.date.iso)\n", - "plt.ylabel('Degradation 335 Å')\n", - "plt.legend()\n" + "with time_support(format=\"jyear\"):\n", + " plt.plot(time_window, d_335, label=\"v10\")\n", + " plt.plot(time_window, d_335_v9, label=\"v9\")\n", + " plt.plot(time_window, d_335_v8, label=\"v8\")\n", + " plt.plot(m_335.date, d_335_map, linestyle=\"\", marker=\"o\", color=\"C0\", label=m_335.date.iso)\n", + "plt.ylabel(\"Degradation 335 Å\")\n", + "plt.legend()" ] }, { @@ -1051,7 +1059,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_335_corrected = correct_degradation(m_335, correction_table=correction_table)\n" + "m_335_corrected = correct_degradation(m_335, correction_table=correction_table)" ] }, { @@ -1075,10 +1083,10 @@ "source": [ "fig = plt.figure(figsize=(7, 3))\n", "ax = fig.add_subplot(121, projection=m_335)\n", - "m_335.plot(axes=ax, vmin=0, vmax=2.5e3,title='Uncorrected')\n", + "m_335.plot(axes=ax, vmin=0, vmax=2.5e3, title=\"Uncorrected\")\n", "ax = fig.add_subplot(122, projection=m_335_corrected)\n", - "m_335_corrected.plot(axes=ax, vmin=0, vmax=2.5e3, title='Corrected')\n", - "ax.coords[1].set_axislabel(' ')\n" + "m_335_corrected.plot(axes=ax, vmin=0, vmax=2.5e3, title=\"Corrected\")\n", + "ax.coords[1].set_axislabel(\" \")" ] }, { @@ -1123,7 +1131,7 @@ } ], "source": [ - "errors_171 = estimate_error(m_171_L15.quantity/u.pix, m_171_L15.wavelength)\n" + "errors_171 = estimate_error(m_171_l15.quantity / u.pix, m_171_l15.wavelength)" ] }, { @@ -1139,7 +1147,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171_errors = sunpy.map.Map(errors_171.value, m_171_L15.meta)\n" + "m_171_errors = sunpy.map.Map(errors_171.value, m_171_l15.meta)" ] }, { @@ -1161,7 +1169,7 @@ } ], "source": [ - "m_171_errors.peek(norm=ImageNormalize(vmax=50))\n" + "m_171_errors.peek(norm=ImageNormalize(vmax=50))" ] }, { @@ -1188,7 +1196,7 @@ } ], "source": [ - "errors_171_chianti = estimate_error(m_171_L15.quantity/u.pix, m_171_L15.wavelength, include_chianti=True)\n" + "errors_171_chianti = estimate_error(m_171_l15.quantity / u.pix, m_171_l15.wavelength, include_chianti=True)" ] }, { @@ -1204,7 +1212,7 @@ "metadata": {}, "outputs": [], "source": [ - "errors_171_eve = estimate_error(m_171_L15.quantity/u.pix, m_171_L15.wavelength, include_eve=True)\n" + "errors_171_eve = estimate_error(m_171_l15.quantity / u.pix, m_171_l15.wavelength, include_eve=True)" ] }, { @@ -1244,14 +1252,14 @@ } ], "source": [ - "hist_params = {'bins': np.logspace(0,4,50), 'histtype': 'step', 'log': True}\n", - "plt.hist(errors_171.value.flatten(), **hist_params, label='Nominal');\n", - "plt.hist(errors_171_chianti.value.flatten(), **hist_params, label='CHIANTI');\n", - "plt.hist(errors_171_eve.value.flatten(), **hist_params, label='Photometric (EVE)');\n", - "plt.xlabel('Uncertainty [ct/pix]')\n", - "plt.ylabel('Number of Pixels')\n", - "plt.xscale('log')\n", - "plt.legend(frameon=False)\n" + "hist_params = {\"bins\": np.logspace(0, 4, 50), \"histtype\": \"step\", \"log\": True}\n", + "plt.hist(errors_171.value.flatten(), **hist_params, label=\"Nominal\")\n", + "plt.hist(errors_171_chianti.value.flatten(), **hist_params, label=\"CHIANTI\")\n", + "plt.hist(errors_171_eve.value.flatten(), **hist_params, label=\"Photometric (EVE)\")\n", + "plt.xlabel(\"Uncertainty [ct/pix]\")\n", + "plt.ylabel(\"Number of Pixels\")\n", + "plt.xscale(\"log\")\n", + "plt.legend(frameon=False)" ] }, { @@ -1269,7 +1277,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171_norm = normalize_exposure(m_171_L15)\n" + "m_171_norm = normalize_exposure(m_171_l15)" ] }, { @@ -1294,8 +1302,8 @@ } ], "source": [ - "print(m_171_L15.unit)\n", - "print(m_171_norm.unit)\n" + "print(m_171_l15.unit)\n", + "print(m_171_norm.unit)" ] }, { @@ -1320,8 +1328,8 @@ } ], "source": [ - "print(m_171_L15.exposure_time)\n", - "print(m_171_norm.exposure_time)\n" + "print(m_171_l15.exposure_time)\n", + "print(m_171_norm.exposure_time)" ] }, { @@ -1337,7 +1345,7 @@ "metadata": {}, "outputs": [], "source": [ - "m_171_norm = m_171_L15 / m_171_L15.exposure_time\n" + "m_171_norm = m_171_l15 / m_171_l15.exposure_time" ] }, { @@ -1361,7 +1369,7 @@ } ], "source": [ - "print(m_171_norm.unit)\n" + "print(m_171_norm.unit)" ] }, { @@ -1378,7 +1386,7 @@ } ], "source": [ - "print(m_171_norm.exposure_time)\n" + "print(m_171_norm.exposure_time)" ] }, { @@ -1401,7 +1409,7 @@ "metadata": {}, "outputs": [], "source": [ - "c = Channel(m_335.wavelength)\n" + "c = Channel(m_335.wavelength)" ] }, { @@ -1420,7 +1428,7 @@ ], "source": [ "print(c.channel)\n", - "print(c.telescope_number)\n" + "print(c.telescope_number)" ] }, { @@ -1446,7 +1454,7 @@ "metadata": {}, "outputs": [], "source": [ - "r = c.wavelength_response()\n" + "r = c.wavelength_response()" ] }, { @@ -1481,7 +1489,7 @@ "print(c.focal_plane_filter_efficiency)\n", "print(c.contamination)\n", "print(c.quantum_efficiency)\n", - "print(c.gain)\n" + "print(c.gain)" ] }, { @@ -1511,7 +1519,7 @@ ], "source": [ "r_time = c.wavelength_response(obstime=m_335.date)\n", - "r_time_eve = c.wavelength_response(obstime=m_335.date, include_eve_correction=True)\n" + "r_time_eve = c.wavelength_response(obstime=m_335.date, include_eve_correction=True)" ] }, { @@ -1543,14 +1551,14 @@ } ], "source": [ - "plt.plot(c.wavelength,r,label='Uncorrected')\n", - "plt.plot(c.wavelength,r_time,label='Degradation')\n", - "plt.plot(c.wavelength,r_time_eve,label='Degradation + EVE')\n", - "plt.xlim([315,355])\n", - "plt.ylim([0,0.03])\n", - "plt.xlabel('$\\lambda$ [Å]')\n", - "plt.ylabel(f'$R(\\lambda)$ [{r.unit.to_string(format=\"latex_inline\")}]')\n", - "plt.legend(loc=2, frameon=False)\n" + "plt.plot(c.wavelength, r, label=\"Uncorrected\")\n", + "plt.plot(c.wavelength, r_time, label=\"Degradation\")\n", + "plt.plot(c.wavelength, r_time_eve, label=\"Degradation + EVE\")\n", + "plt.xlim([315, 355])\n", + "plt.ylim([0, 0.03])\n", + "plt.xlabel(r\"$\\lambda$ [Å]\")\n", + "plt.ylabel(rf'$R(\\lambda)$ [{r.unit.to_string(format=\"latex_inline\")}]')\n", + "plt.legend(loc=2, frameon=False)" ] }, { diff --git a/posts/2024/2024-04-03-eclipse.ipynb b/posts/2024/2024-04-03-eclipse.ipynb index c62828cd..54e9dfe9 100644 --- a/posts/2024/2024-04-03-eclipse.ipynb +++ b/posts/2024/2024-04-03-eclipse.ipynb @@ -47,29 +47,29 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", + "from pathlib import Path\n", + "\n", + "import astropy.units as u\n", + "import exifread\n", "import matplotlib.image\n", "import matplotlib.pyplot as plt\n", - "import astropy.units as u\n", - "import scipy.ndimage as ndimage\n", - "\n", + "import numpy as np\n", + "import sunpy.coordinates\n", + "import sunpy.coordinates.sun\n", "from astropy.constants import R_earth\n", "from astropy.coordinates import CartesianRepresentation, EarthLocation, SkyCoord\n", + "\n", + "# We have defined a few helper functions in this `eclipse_helpers.py` file.\n", + "from eclipse_helpers import SOLAR_ECLIPSE_IMAGE, get_camera_metadata\n", "from matplotlib.patches import Circle\n", + "from scipy import ndimage\n", "from skimage.color import rgb2gray\n", "from skimage.feature import peak_local_max\n", "from skimage.transform import hough_circle, hough_circle_peaks\n", - "\n", - "import sunpy.coordinates.sun\n", - "import sunpy.coordinates\n", "from sunpy.map.header_helper import make_fitswcs_header\n", - "from sunpy.net import Fido, attrs as a\n", - "from sunpy.time import parse_time\n", - "\n", - "import exifread\n", - "\n", - "# We have defined a few helper functions in this `eclipse_helpers.py` file.\n", - "from eclipse_helpers import SOLAR_ECLIPSE_IMAGE, get_camera_metadata" + "from sunpy.net import Fido\n", + "from sunpy.net import attrs as a\n", + "from sunpy.time import parse_time" ] }, { @@ -127,7 +127,7 @@ "metadata": {}, "outputs": [], "source": [ - "with open(SOLAR_ECLIPSE_IMAGE, mode='rb') as f:\n", + "with Path(SOLAR_ECLIPSE_IMAGE).open(\"rb\") as f:\n", " tags = exifread.process_file(f)" ] }, @@ -156,7 +156,7 @@ "metadata": {}, "outputs": [], "source": [ - "camera_metadata[\"time\"] = parse_time('2017-08-21 17:27:13')" + "camera_metadata[\"time\"] = parse_time(\"2017-08-21 17:27:13\")" ] }, { @@ -209,7 +209,7 @@ "outputs": [], "source": [ "label_im, nb_labels = ndimage.label(mask)\n", - "slice_y, slice_x = ndimage.find_objects(label_im==1)[0]\n", + "slice_y, slice_x = ndimage.find_objects(label_im == 1)[0]\n", "roi = blur_im[slice_y, slice_x]" ] }, @@ -229,8 +229,8 @@ "metadata": {}, "outputs": [], "source": [ - "sx = ndimage.sobel(roi, axis=1, mode='constant')\n", - "sy = ndimage.sobel(roi, axis=0, mode='constant')\n", + "sx = ndimage.sobel(roi, axis=1, mode=\"constant\")\n", + "sy = ndimage.sobel(roi, axis=0, mode=\"constant\")\n", "sob = np.hypot(sx, sy)" ] }, @@ -250,7 +250,7 @@ "metadata": {}, "outputs": [], "source": [ - "hough_radii = np.arange(np.floor(np.mean(sob.shape)/4), np.ceil(np.mean(sob.shape)/2), 10)\n", + "hough_radii = np.arange(np.floor(np.mean(sob.shape) / 4), np.ceil(np.mean(sob.shape) / 2), 10)\n", "hough_res = hough_circle(sob > (sob.mean() * 5), hough_radii)\n", "\n", "# Select the most prominent circle\n", @@ -277,13 +277,15 @@ "source": [ "fig, ax = plt.subplots(ncols=3, nrows=1, figsize=(9.5, 6))\n", "ax[0].imshow(im[slice_y, slice_x])\n", - "ax[0].set_title('Original')\n", + "ax[0].set_title(\"Original\")\n", "ax[1].imshow(sob > (sob.mean() * 5))\n", - "ax[1].set_title('Sobel')\n", - "circ = Circle([cx, cy], radius=radii, facecolor='none', edgecolor='red', linewidth=2, linestyle='dashed', label='Hough fit')\n", + "ax[1].set_title(\"Sobel\")\n", + "circ = Circle(\n", + " [cx, cy], radius=radii, facecolor=\"none\", edgecolor=\"red\", linewidth=2, linestyle=\"dashed\", label=\"Hough fit\"\n", + ")\n", "ax[2].imshow(im[slice_y, slice_x])\n", "ax[2].add_patch(circ)\n", - "ax[2].set_title('Original with fit')\n", + "ax[2].set_title(\"Original with fit\")\n", "plt.legend()\n", "plt.show()" ] @@ -361,10 +363,10 @@ "metadata": {}, "outputs": [], "source": [ - "moon = SkyCoord(sunpy.coordinates.get_body_heliographic_stonyhurst('moon', camera_metadata[\"time\"], observer=observer))\n", + "moon = SkyCoord(sunpy.coordinates.get_body_heliographic_stonyhurst(\"moon\", camera_metadata[\"time\"], observer=observer))\n", "R_moon = 0.2725076 * R_earth # IAU mean radius\n", "dist_moon = SkyCoord(observer).separation_3d(moon)\n", - "moon_obs = np.arcsin(R_moon / dist_moon).to('arcsec')\n", + "moon_obs = np.arcsin(R_moon / dist_moon).to(\"arcsec\")\n", "print(moon_obs)" ] }, @@ -456,7 +458,7 @@ "metadata": {}, "outputs": [], "source": [ - "fig = plt.figure(figsize=(9,9))\n", + "fig = plt.figure(figsize=(9, 9))\n", "ax = plt.subplot(projection=eclipse_map)\n", "eclipse_map.plot(axes=ax)\n", "plt.show()" @@ -492,7 +494,7 @@ "metadata": {}, "outputs": [], "source": [ - "regulus = SkyCoord(ra='10h08m22.311s', dec='11d58m01.95s', distance=79.3 * u.lightyear, frame='icrs')\n", + "regulus = SkyCoord(ra=\"10h08m22.311s\", dec=\"11d58m01.95s\", distance=79.3 * u.lightyear, frame=\"icrs\")\n", "print(regulus)" ] }, @@ -513,14 +515,15 @@ "metadata": {}, "outputs": [], "source": [ - "fig = plt.figure(figsize=(9,9))\n", + "fig = plt.figure(figsize=(9, 9))\n", "ax = plt.subplot(projection=eclipse_map)\n", - "eclipse_map.plot(axes=ax, clip_interval=(0,90)*u.percent)\n", - "ax.plot_coord(regulus, 'o', markeredgewidth=0.5, markeredgecolor='w',\n", - " markerfacecolor='None', markersize=10, label='Regulus')\n", + "eclipse_map.plot(axes=ax, clip_interval=(0, 90) * u.percent)\n", + "ax.plot_coord(\n", + " regulus, \"o\", markeredgewidth=0.5, markeredgecolor=\"w\", markerfacecolor=\"None\", markersize=10, label=\"Regulus\"\n", + ")\n", "plt.legend()\n", - "plt.xlim(100,500)\n", - "plt.ylim(0,500)\n", + "plt.xlim(100, 500)\n", + "plt.ylim(0, 500)\n", "plt.show()" ] }, @@ -541,7 +544,10 @@ "outputs": [], "source": [ "regulus_pixel = CartesianRepresentation(*eclipse_map.wcs.world_to_pixel(regulus), 0) * u.pix\n", - "sun_pixel = CartesianRepresentation(*eclipse_map.wcs.world_to_pixel(SkyCoord(0*u.arcsec, 0*u.arcsec, frame=frame)), 0) * u.pix\n", + "sun_pixel = (\n", + " CartesianRepresentation(*eclipse_map.wcs.world_to_pixel(SkyCoord(0 * u.arcsec, 0 * u.arcsec, frame=frame)), 0)\n", + " * u.pix\n", + ")\n", "regulus_r = (regulus_pixel - sun_pixel).norm()\n", "print(regulus_r)" ] @@ -566,7 +572,7 @@ "xx, yy = np.meshgrid(pix_x, pix_y)\n", "r = np.sqrt(xx**2 + yy**2)\n", "\n", - "filter_r = regulus_r - (regulus_r/5)\n", + "filter_r = regulus_r - (regulus_r / 5)\n", "\n", "masked = im.copy()\n", "masked[r < filter_r] = masked.min()" @@ -682,14 +688,15 @@ "metadata": {}, "outputs": [], "source": [ - "fig = plt.figure(figsize=(9,9))\n", + "fig = plt.figure(figsize=(9, 9))\n", "ax = plt.subplot(projection=eclipse_map)\n", - "eclipse_map.plot(axes=ax, clip_interval=(0,90)*u.percent)\n", - "ax.plot_coord(regulus, 'o', markeredgewidth=0.5, markeredgecolor='w',\n", - " markerfacecolor='None', markersize=10, label='Regulus')\n", + "eclipse_map.plot(axes=ax, clip_interval=(0, 90) * u.percent)\n", + "ax.plot_coord(\n", + " regulus, \"o\", markeredgewidth=0.5, markeredgecolor=\"w\", markerfacecolor=\"None\", markersize=10, label=\"Regulus\"\n", + ")\n", "plt.legend()\n", - "plt.xlim(100,500)\n", - "plt.ylim(0,500)\n", + "plt.xlim(100, 500)\n", + "plt.ylim(0, 500)\n", "plt.show()" ] }, @@ -714,9 +721,9 @@ "outputs": [], "source": [ "aia_result = Fido.search(\n", - " a.Time('2017-08-21 17:27:00', \"2017-08-21 17:45:00\", eclipse_map.date),\n", - " a.Instrument('AIA'),\n", - " a.Wavelength(171*u.Angstrom)\n", + " a.Time(\"2017-08-21 17:27:00\", \"2017-08-21 17:45:00\", eclipse_map.date),\n", + " a.Instrument(\"AIA\"),\n", + " a.Wavelength(171 * u.Angstrom),\n", ")\n", "print(aia_result)" ] @@ -728,7 +735,7 @@ "metadata": {}, "outputs": [], "source": [ - "files = Fido.fetch(aia_result[0,0])" + "files = Fido.fetch(aia_result[0, 0])" ] }, { @@ -757,7 +764,7 @@ "metadata": {}, "outputs": [], "source": [ - "fig = plt.figure(figsize=(9,9))\n", + "fig = plt.figure(figsize=(9, 9))\n", "ax = plt.subplot(projection=eclipse_map)\n", "eclipse_map.plot(axes=ax)\n", "aia_map.plot(axes=ax, autoalign=True)\n", diff --git a/posts/2024/2024-08-09-anaconda.md b/posts/2024/2024-08-09-anaconda.md index f57ef13f..cb15d90c 100644 --- a/posts/2024/2024-08-09-anaconda.md +++ b/posts/2024/2024-08-09-anaconda.md @@ -7,9 +7,9 @@ category: Information # Anaconda packages are not "free" -We wanted to inform the wider community about [Anaconda](https://www.anaconda.com/) and if you are legally allowed to use it for free. +We wanted to inform the wider community about [Anaconda Inc.](https://www.anaconda.com/) and if you are legally allowed to use their products for free. -What is not commonly known is that Anaconda has specific clauses in its license which determine if one is able to use it for free, based on the organization you work for. +What is not commonly known is that Anaconda Inc. has implemented specific clauses in its software licenses which determine if one is able to use Anaconda or Anaconda Navigator for free, based on the organization you work for. For example, [The Register](https://www.theregister.com/2024/08/08/anaconda_puts_the_squeeze_on/) has reported that: > Research and academic organizations are just now finding out that they will have to pay for software made by Anaconda, when for years these groups were under the impression it could be used at no cost. @@ -39,8 +39,9 @@ I want to define a few words which will be used heavily below: - Anaconda's "defaults" channel, which is used for the base environment. It is used if one installs the `Anaconda` distribution or the `miniconda` distribution. Anything that is "curated, built, maintained, and served by Anaconda's engineers on its secure cloud infrastructure", has these licenses. +- [Anaconda Navigator](https://docs.anaconda.com/navigator/) which is a Graphical User Interface (GUI) that many people use to manage their Anaconda install. -**Using the defaults channel can lead to you becoming legally required to pay Anaconda for the use the defaults channel.** +**Using the defaults channel or Anaconda Navigator can lead to you becoming legally required to pay Anaconda Inc. for their use.** If you want to use these for free, you have to meet the following conditions: @@ -62,24 +63,25 @@ and > In addition, we're looking into a flexible pricing structure for non-profit organizations with over 200 full-time employees -Hopefully, by the end of the year with these changes in place, users will have a clear understanding if they are allowed to use Anaconda for free. +Hopefully, by the end of the year with these changes in place, users will have a clear understanding if they are allowed to use Anaconda Inc. products for free. ## What is the alternative? The answer from the SunPy Project is the following: -- Using the channels provided by "conda-forge". - conda-forge is a community-led project which creates recipes, hosts infrastructure and distributions for use with conda. - They provide a distribution (similar to `miniconda`) called [miniforge](https://github.com/conda-forge/miniforge) is configured to use the conda-forge channel by default. - It is also the only way to [install any SunPy Project library](https://docs.sunpy.org/en/stable/tutorial/installation.html#installing-miniforge) via conda. -- Never setup or use the defaults "channel" if you install `miniforge`. +- Use the "conda-forge" channel provided by the conda-forge project. + conda-forge is a community-led project which creates recipes, hosts infrastructure and distributions for use with `conda`. + They provide a distribution (similar to `miniconda`) called [miniforge](https://github.com/conda-forge/miniforge) is configured to use the "conda-forge" channel by default. + It is also the only way to [install any SunPy Project library](https://docs.sunpy.org/en/stable/tutorial/installation.html#installing-miniforge) via `conda`. +- Never setup or use the "defaults" channel if you install `miniforge`. A blog post written by Tim de Jager & Ruben Arts [summarizes how conda-forge is free and avoids any vendor lock in.](https://prefix.dev/blog/towards_a_vendor_lock_in_free_conda_experience) -## I have Anaconda already - What should I do? +## I have Anaconda or Anaconda Navigator already - What should I do? Though, you could get rid of the default channel(s) using the `conda config` command as shown [in this Stack Overflow answer about switching channels from anaconda to conda-forge](https://stackoverflow.com/a/67708768). This is good for new environments but it doesn't remove what you've got already installed (e.g., the `base` environment) and you may be still infringing the Terms of Services for Anaconda. +If you have Anaconda Navigator, you have to remove it unless you know that you are not violating the license requirements. -Therefore, the cleanest way would be to remove Anaconda completely and install [miniforge](https://docs.sunpy.org/en/stable/tutorial/installation.html#installing-miniforge). -Unfortunately, there is no automated way of recreating all of your environments, and [this post about migrating from Anaconda to miniforge](https://it.martinos.org/help/migrating-anaconda-miniconda-install-to-a-miniforge-install/) details the steps that one has to follow to migrate. +Therefore, the cleanest method is to completely remove Anaconda and Anaconda Navigator and install [`miniforge`](https://docs.sunpy.org/en/stable/tutorial/installation.html#installing-miniforge). +Unfortunately, there is no automated way of recreating all of your environments, and [this post about migrating from Anaconda to `miniforge`](https://it.martinos.org/help/migrating-anaconda-miniconda-install-to-a-miniforge-install/) details the steps that one has to follow to migrate. diff --git a/ruff.toml b/ruff.toml index 1209f362..dfeb00bb 100644 --- a/ruff.toml +++ b/ruff.toml @@ -2,64 +2,23 @@ lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" target-version = "py39" line-length = 120 -exclude=[ +extend-exclude=[ ".git,", "__pycache__", "build", "tools/**", ] lint.select = [ - "A", - "ARG", - "ASYNC", - "B", - "BLE", - "C4", - "COM", - "DTZ", - "E", - "EM", - "ERA", - "EXE", - "F", - "FBT", - "FLY", - "G", - "I", - "ICN", - "INP", - "INT", - "ISC", - "LOG", - "NPY", - "PERF", - "PGH", - "PIE", - "PLE", - "PT", - "PTH", - "PYI", - "Q", - "RET", - "RSE", - "RUF", - "SIM", - "SLF", - "SLOT", - "T10", - "T20", - "TCH", - "TID", - "TRIO", - "TRY", - "UP", - "W", - "YTT", + "ALL", ] lint.extend-ignore = [ + "ANN", # Ignore missing annotations "COM812", # May cause conflicts when used with the formatter - "E501", # Line too long + "D", # Ignore docstrings + "E501", # Line too long + "E741", # Ambiguous variable name "ISC001", # May cause conflicts when used with the formatter + "PD011", # Use `.to_numpy()` instead of `.values` "T201", # Print statements ]