{
"cells": [
{
"cell_type": "markdown",
"id": "bcdfb138",
"metadata": {},
"source": [
"# IC/ARA Coincident Simulation Events Analysis"
]
},
{
"cell_type": "markdown",
"id": "c7ad7c80",
"metadata": {},
"source": [
"### Settings and imports"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "b6915a86",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<style>.container { width:75% !important; }</style>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"## Makes this notebook maximally wide\n",
"from IPython.display import display, HTML\n",
"display(HTML(\"<style>.container { width:75% !important; }</style>\"))"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "6ef9e20b",
"metadata": {},
"outputs": [],
"source": [
"## Author: Alex Machtay (machtay.1@osu.edu)\n",
"## Modified by: Alan Salcedo (salcedogomez.1@osu.edu)\n",
"## Date: 4/26/23\n",
"\n",
"## Purpose:\n",
"### This script will read the data files produced by AraRun_corrected_MultStat.py to make histograms relevant plots of\n",
"### neutrino events passing through icecube and detected by ARA station (in AraSim)\n",
"\n",
"\n",
"## Imports\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"import sys\n",
"sys.path.append(\"/users/PAS0654/osu8354/root6_18_build/lib\") # go to parent dir\n",
"sys.path.append(\"/users/PCON0003/cond0068/.local/lib/python3.6/site-packages\")\n",
"import math\n",
"import argparse\n",
"import glob\n",
"import pandas as pd\n",
"pd.options.mode.chained_assignment = None # default='warn'\n",
"from mpl_toolkits.mplot3d import Axes3D\n",
"import jupyterthemes as jt"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "f24b8292",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/bin/bash: jt: command not found\r\n"
]
}
],
"source": [
"## Set style for the jupyter notebook\n",
"!jt -t grade3 -T -N -kl -lineh 160 -f code -fs 14 -ofs 14 -cursc o\n",
"jt.jtplot.style('grade3', gridlines='')"
]
},
{
"cell_type": "markdown",
"id": "a5a812d6",
"metadata": {},
"source": [
"### Set constants\n",
"#### These are things like the position of ARA station holes, the South Pole, IceCube's position in ARA station's coordinates, and IceCube's radius"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "cdb851de",
"metadata": {},
"outputs": [],
"source": [
"## What IceCube (IC) station are you analyzing\n",
"station = 1\n",
"\n",
"## What's the radius around ARA where neutrinos were injected\n",
"inj_rad = 5 #in km\n",
"\n",
"## IceCube's center relative to each ARA station\n",
"IceCube = [[-1128.08, -2089.42, -1942.39], [-335.812, -3929.26, -1938.23],\n",
" [-2320.67, -3695.78, -1937.35], [-3153.04, -1856.05, -1942.81], [472.49, -5732.98, -1922.06]] #IceCube's position relative to A1, A2, or A3\n",
"\n",
"#To calculate this, we need to do some coordinate transformations. Refer to this notebook to see the calculations: \n",
"# IceCube_Relative_to_ARA_Stations.ipynb (found here - http://radiorm.physics.ohio-state.edu/elog/How-To/48) \n",
"\n",
"## IceCube's radius\n",
"IceCube_radius = 564.189583548 #Modelling IceCube as a cylinder, we find the radius with V = h * pi*r^2 with V = 1x10^9 m^3 and h = 1x10^3 m "
]
},
{
"cell_type": "markdown",
"id": "af5c6fc2",
"metadata": {},
"source": [
"### Read the data\n",
"\n",
"#### Once we import the data, we'll make dataframes to concatenate it and make some calculations"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "921f61f8",
"metadata": {},
"outputs": [],
"source": [
"## Import data files\n",
"\n",
"#Here, it's from OSC Connolly's group project space\n",
"source = '/fs/project/PAS0654/IceCube_ARA_Coincident_Search/AraSim/outputs/Coincident_Search_Runs/20M_GZK_5km_S1_correct' \n",
"num_files = 200 # Number of files to read in from the source directory\n",
"\n",
"## Make a list of all of the paths to check \n",
"file_list = []\n",
"for i in range(1, num_files + 1):\n",
" for name in glob.glob(source + \"/\" + str(i) + \"/*.csv\"):\n",
" file_list.append(str(name))\n",
" #file_list gets paths to .csv files\n",
" \n",
"## Now read the csv files into a pandas dataframe\n",
"dfs = []\n",
"for filename in file_list:\n",
" df = pd.read_csv(filename, index_col=None, header=0) #Store each csv file into a pandas data frame\n",
" dfs.append(df) #Append the csv file to store all of them in one\n",
"frame = pd.concat(dfs, axis=0, ignore_index = True) #Concatenate pandas dataframes "
]
},
{
"cell_type": "markdown",
"id": "813fb3ee",
"metadata": {},
"source": [
"### Work with the data\n",
"\n",
"#### All the data from our coincidence simulations (made by AraRun_MultStat.sh) is now stored in a pandas data frame that we can work with"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "8b449583",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"19800000\n",
"19799802\n"
]
}
],
"source": [
"## Now let's clean up our data and calculate other relevant things\n",
"print(len(frame))\n",
"frame = frame[frame['weight'].between(0,1)] #Filter out events with ill-defined weight (should be between 0 and 1)\n",
"print(len(frame))\n",
"\n",
"frame['x-positions'] = (frame['Radius (m)'] * np.cos(frame['Azimuth (rad)']) * np.sin(frame['Zenith (rad)']))\n",
"frame['y-positions'] = (frame['Radius (m)'] * np.sin(frame['Azimuth (rad)']) * np.sin(frame['Zenith (rad)']))\n",
"frame['z-positions'] = (frame['Radius (m)'] * np.cos(frame['Zenith (rad)']))\n",
"\n",
"## The energy in eV will be 10 raised to the number in the file, multiplied by 1-y (y is inelasticity)\n",
"frame['Nu Energies (eV)'] = np.power(10, (frame['Energy (log10) (eV)']))\n",
"frame['Mu Energies (eV)'] = ((1-frame['Inelasticity']) * frame['Nu Energies (eV)']) #Energy of the produced lepton\n",
"#Here the lepton is not a muon necesarily, hence the label 'Mu Energies (eV)' may be misleading\n",
"\n",
"## Get a frame with only coincident events\n",
"coincident_frame = frame[frame['Coincident'] == 1] \n",
"\n",
"## And a frame for strictly events *detected* by ARA\n",
"detected_frame = frame[frame['Detected'] == 1]\n",
"\n",
"\n",
"## Now let's calculate the energy of the lepton when reaching IceCube (IC)\n",
"\n",
"# To do this correctly, I need to find exactly the distance traveled by the muon and apply the equation\n",
"# I need the trajectory of the muon to find the time it takes to reach IceCube, then I can find the distance it travels in that time\n",
"# I should allow events that occur inside the icecube volume to have their full energy (but pretty much will happen anyway)\n",
"## a = sin(Theta)*cos(Phi)\n",
"## b = sin(Theta)*sin(Phi)\n",
"## c = cos(Theta)\n",
"## a_0 = x-position\n",
"## b_0 = y-position\n",
"## c_0 = z-position\n",
"## x_0 = IceCube[0]\n",
"## y_0 = IceCube[1]\n",
"## z_0 = IceCube[2]\n",
"## t = (-(a*(a_0-x_0) + b*(b_0-y_0))+D**0.5)/(a**2+b**2)\n",
"## D = (a**2+b**2)*R_IC**2 - (a*(b_0-y_0)+b*(a_0-x_0))**2\n",
"## d = ((a*t)**2 + (b*t)**2 + (c*t)**2)**0.5\n",
"\n",
"## Trajectories\n",
"coincident_frame['a'] = (np.sin(coincident_frame['Theta (rad)'])*np.cos(coincident_frame['Phi (rad)']))\n",
"coincident_frame['b'] = (np.sin(coincident_frame['Theta (rad)'])*np.sin(coincident_frame['Phi (rad)']))\n",
"coincident_frame['c'] = (np.cos(coincident_frame['Theta (rad)']))\n",
"\n",
"## Discriminant\n",
"coincident_frame['D'] = ((coincident_frame['a']**2 + coincident_frame['b']**2)*IceCube_radius**2 - \n",
" (coincident_frame['a']*(coincident_frame['y-position (m)']-IceCube[station-1][1])- ## I think this might need to be a minus sign!\n",
" coincident_frame['b']*(coincident_frame['x-position (m)']-IceCube[station-1][0]))**2)\n",
"\n",
"## Interaction time (this is actually the same as the distance traveled, at least for a straight line)\n",
"coincident_frame['t_1'] = (-(coincident_frame['a']*(coincident_frame['x-position (m)']-IceCube[station-1][0])+\n",
" coincident_frame['b']*(coincident_frame['y-position (m)']-IceCube[station-1][1]))+\n",
" np.sqrt(coincident_frame['D']))/(coincident_frame['a']**2+coincident_frame['b']**2)\n",
"coincident_frame['t_2'] = (-(coincident_frame['a']*(coincident_frame['x-position (m)']-IceCube[station-1][0])+\n",
" coincident_frame['b']*(coincident_frame['y-position (m)']-IceCube[station-1][1]))-\n",
" np.sqrt(coincident_frame['D']))/(coincident_frame['a']**2+coincident_frame['b']**2)\n",
"\n",
"## Intersection coordinates\n",
"coincident_frame['x-intersect_1'] = (coincident_frame['a'] * coincident_frame['t_1'] + coincident_frame['x-position (m)'])\n",
"coincident_frame['y-intersect_1'] = (coincident_frame['b'] * coincident_frame['t_1'] + coincident_frame['y-position (m)'])\n",
"coincident_frame['z-intersect_1'] = (coincident_frame['c'] * coincident_frame['t_1'] + coincident_frame['z-position (m)'])\n",
"\n",
"coincident_frame['x-intersect_2'] = (coincident_frame['a'] * coincident_frame['t_2'] + coincident_frame['x-position (m)'])\n",
"coincident_frame['y-intersect_2'] = (coincident_frame['b'] * coincident_frame['t_2'] + coincident_frame['y-position (m)'])\n",
"coincident_frame['z-intersect_2'] = (coincident_frame['c'] * coincident_frame['t_2'] + coincident_frame['z-position (m)'])\n",
"\n",
"## Distance traveled (same as the parametric time, at least for a straight line)\n",
"coincident_frame['d_1'] = (np.sqrt((coincident_frame['a']*coincident_frame['t_1'])**2+\n",
" (coincident_frame['b']*coincident_frame['t_1'])**2+\n",
" (coincident_frame['c']*coincident_frame['t_1'])**2))\n",
"coincident_frame['d_2'] = (np.sqrt((coincident_frame['a']*coincident_frame['t_2'])**2+\n",
" (coincident_frame['b']*coincident_frame['t_2'])**2+\n",
" (coincident_frame['c']*coincident_frame['t_2'])**2))\n",
"\n",
"## Check if it started inside and set the distance based on if it needs to travel to reach icecube or not\n",
"coincident_frame['Inside'] = (np.where((coincident_frame['t_1']/coincident_frame['t_2'] < 0) & (coincident_frame['z-position (m)'].between(-2450, -1450)), 1, 0))\n",
"coincident_frame['preliminary d'] = (np.where(coincident_frame['d_1'] <= coincident_frame['d_2'], coincident_frame['d_1'], coincident_frame['d_2']))\n",
"coincident_frame['d'] = (np.where(coincident_frame['Inside'] == 1, 0, coincident_frame['preliminary d']))\n",
"\n",
"## Check if the event lies in the cylinder\n",
"coincident_frame['In IC'] = (np.where((np.sqrt((coincident_frame['x-position (m)']-IceCube[station-1][0])**2 + (coincident_frame['y-position (m)']-IceCube[station-1][1])**2) < IceCube_radius) &\n",
" ((coincident_frame['z-position (m)']).between(-2450, -1450)) , 1, 0))\n",
"\n",
"#Correct coincident_frame to only have electron neutrinos inside IC\n",
"coincident_frame = coincident_frame[(((coincident_frame['In IC'] == 1) & (coincident_frame['flavor'] == 1)) | (coincident_frame['flavor'] == 2) | (coincident_frame['flavor'] == 3)) ]\n",
"\n",
"#Now calculate the lepton energies when they reach IC\n",
"coincident_frame['IC Mu Energies (eV)'] = (coincident_frame['Mu Energies (eV)'] * np.exp(-10**-5 * coincident_frame['d']*100)) # convert d from meters to cm\n",
"coincident_frame['weighted energies'] = (coincident_frame['weight'] * coincident_frame['Nu Energies (eV)'])"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "f1757103",
"metadata": {
"scrolled": false
},
"outputs": [],
"source": [
"## Add possible Tau decay to the frame\n",
"coincident_frame['Tau decay'] = ''\n",
"# Again, the label 'Tau Decay' may be misleading because not all leptons may be taus\n",
"\n",
"## Calculate distance from the interaction point to its walls and keep the shortest (the first interaction with the volume)\n",
"\n",
"coincident_frame['distance-to-IC_1'] = np.sqrt((coincident_frame['x-positions'] - coincident_frame['x-intersect_1'])**2 + \n",
" (coincident_frame['y-positions'] - coincident_frame['y-intersect_1'])**2)\n",
"coincident_frame['distance-to-IC_2'] = np.sqrt((coincident_frame['x-positions'] - coincident_frame['x-intersect_2'])**2 + \n",
... 17889 more lines ...
|
{
"cells": [
{
"cell_type": "markdown",
"id": "a950b9af",
"metadata": {},
"source": [
"**This script is simply for me to calculate the location of IceCube relative to the origin of any ARA station**\n",
"\n",
"The relevant documentation to understand the definitions after the imports can be found in https://elog.phys.hawaii.edu/elog/ARA/130712_170712/doc.pdf"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "b926e2e3",
"metadata": {},
"outputs": [],
"source": [
"import numpy as np"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "901b442b",
"metadata": {},
"outputs": [],
"source": [
"#Definitions of translations in surveyor's coordinates:\n",
"\n",
"t_IC_to_ARAg = np.array([-24100, 1700, 6400])\n",
"t_ARAg_to_A1 = np.array([16401.71, -2835.37, -25.67])\n",
"t_ARAg_to_A2 = np.array([13126.7, -8519.62, -18.72])\n",
"t_ARAg_to_A3 = np.array([9848.35, -2835.19, -12.7])\n",
"\n",
"#Definitions of rotations from surveyor's axes to the ARA Station's coordinate systems\n",
"\n",
"R1 = np.array([[-0.598647, 0.801013, -0.000332979], [-0.801013, -0.598647, -0.000401329], \\\n",
" [-0.000520806, 0.0000264661, 1]])\n",
"R2 = np.array([[-0.598647, 0.801013, -0.000970507], [-0.801007, -0.598646,-0.00316072 ], \\\n",
" [-0.00311277, -0.00111477, 0.999995]])\n",
"R3 = np.array([[-0.598646, 0.801011, -0.00198193],[-0.801008, -0.598649,-0.00247504], \\\n",
" [-0.00316902, 0.000105871, 0.999995]])"
]
},
{
"cell_type": "markdown",
"id": "ab2d3206",
"metadata": {},
"source": [
"**Using these definitions, I should be able to calculate the location of IceCube relative to each ARA station by:**\n",
"\n",
"$$\n",
"\\vec{r}_{A 1}^{I C}=-R_1\\left(\\vec{t}_{I C}^{A R A}+\\vec{t}_{A R A}^{A 1}\\right)\n",
"$$\n",
"\n",
"We have a write-up of how to get this. Contact salcedogomez.1@osu.edu if you need that.\n",
"\n",
"Alex had done this already, he got that \n",
"\n",
"$$\n",
"\\vec{r}_{A 1}^{I C}=-3696.99^{\\prime} \\hat{x}-6843.56^{\\prime} \\hat{y}-6378.31^{\\prime} \\hat{z}\n",
"$$\n",
"\n",
"Let me verify that I get the same"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "912163d2",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"IC coordinates relative to A1 (in): [-3696.98956579 -6843.55800868 -6378.30926681]\n",
"IC coordinates relative to A1 (m): [-1127.13096518 -2086.4506124 -1944.60648378]\n",
"Distance of IC from A1 (m): 3066.788996234438\n"
]
}
],
"source": [
"IC_A1 = -R1 @ np.add(t_ARAg_to_A1, t_IC_to_ARAg).T\n",
"print(\"IC coordinates relative to A1 (in): \", IC_A1)\n",
"print(\"IC coordinates relative to A1 (m): \", IC_A1/3.28)\n",
"print(\"Distance of IC from A1 (m): \", np.sqrt((IC_A1[0]/3.28)**2 + (IC_A1[1]/3.28)**2 + (IC_A1[2]/3.28)**2))"
]
},
{
"cell_type": "markdown",
"id": "f9c9f252",
"metadata": {},
"source": [
"Looks good!\n",
"\n",
"Now, I just get the other ones:"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "8afa27c6",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"IC coordinates relative to A2 (in): [ -1100.33577313 -12852.0589083 -6423.00776043]\n",
"IC coordinates relative to A2 (m): [ -335.46822352 -3918.31064277 -1958.2340733 ]\n",
"Distance of IC from A2 (m): 4393.219537890439\n"
]
}
],
"source": [
"IC_A2 = -R2 @ np.add(t_ARAg_to_A2, t_IC_to_ARAg).T\n",
"print(\"IC coordinates relative to A2 (in): \", IC_A2)\n",
"print(\"IC coordinates relative to A2 (m): \", IC_A2/3.28)\n",
"print(\"Distance of IC from A2 (m): \", np.sqrt((IC_A2[0]/3.28)**2 + (IC_A2[1]/3.28)**2 + (IC_A2[2]/3.28)**2))"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "9959d0a4",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"IC coordinates relative to A3 (in): [ -7609.73440732 -12079.45719852 -6432.31164368]\n",
"IC coordinates relative to A3 (m): [-2320.04097784 -3682.76134101 -1961.07062307]\n",
"Distance of IC from A3 (m): 4774.00452685144\n"
]
}
],
"source": [
"IC_A3 = -R3 @ np.add(t_ARAg_to_A3, t_IC_to_ARAg).T\n",
"print(\"IC coordinates relative to A3 (in): \", IC_A3)\n",
"print(\"IC coordinates relative to A3 (m): \", IC_A3/3.28)\n",
"print(\"Distance of IC from A3 (m): \", np.sqrt((IC_A3[0]/3.28)**2 + (IC_A3[1]/3.28)**2 + (IC_A3[2]/3.28)**2))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "093dff67",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.12"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
|