{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Combined analysis of J1206\n",
    "This notebook takes the MCMC chains of multiple runs with various configurations and choices of the model and combines the output of the lens modelling with the kinematics data, time-delay data and trunslates them together into constrainst on the angular diameter distances."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pickle\n",
    "import copy\n",
    "import os\n",
    "import numpy as np\n",
    "from astropy.io import fits\n",
    "import matplotlib.pyplot as plt\n",
    "from matplotlib import rc\n",
    "import matplotlib.patches as mpatches\n",
    "from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
    "\n",
    "rc('xtick', labelsize=12) \n",
    "rc('ytick', labelsize=12)\n",
    "\n",
    "%matplotlib inline\n",
    "\n",
    "import lenstronomy.Util.util as util\n",
    "from lenstronomy.ImSim.image_model import ImageModel\n",
    "from lenstronomy.Workflow.fitting import Fitting\n",
    "from lenstronomy.Sampling.parameters import Param\n",
    "from lenstronomy.Analysis.lens_properties import LensProp\n",
    "from lenstronomy.Analysis.lens_analysis import LensAnalysis\n",
    "from lenstronomy.LensModel.lens_model_extensions import LensModelExtensions\n",
    "\n",
    "import lenstronomy.Plots.output_plots as out_plot\n",
    "import lenstronomy.Util.class_creator as class_creator\n",
    "\n",
    "cwd = os.getcwd()\n",
    "base_path, _ = os.path.split(cwd)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## data sets and priors"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYkAAAD/CAYAAAAE0SrVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAGRlJREFUeJzt3X+QZWV95/H3xxkFZJgIoUXEzPT6K5ixgKyzMbsuimL8ua6UmBT+WEZNHFdrVkvcGNwacBS0JEY3MejqpIQRf+wq2RE1qCkp8Ve5KTMkGXTirBsCKCJmQDIwAwLqd/+4p8310qfnTt9zu29Pv19Vt/qe5zn3PN8+fe799vM8556TqkKSpNk8YLEDkCRNLpOEJKmVSUKS1MokIUlqZZKQJLUySUiSWpkkJEmtTBKSpFYmCUlSq5WLHcBsjj322Jqenl7sMCRpSbnmmmturaqpLrc5kUlienqaHTt2LHYYkrSkJLmx62063CRJamWSkCS1MklIklqZJCRJrUwSkqRWJglJUiuThCSplUlCktTKJCFJamWSkMZs+twrFzsEad5MEtKEmD73ShOKJo5JQpLUyiQhSWplkpAktRoqSSTZlGRHknuSbBuoe3CS9yW5NcneJF/pq0uSi5Lc1jwuSpKOfwdJ0pgMez+Jm4ELgWcCRwzUbW228zjgR8ApfXUbgTOAk4ECvgBcD7x//iFLkhbKUEmiqrYDJFkPPGKmPMmJwH8EHlFVdzTF1/S9dAPwrqq6qVn/XcArMUlI0pIw6pzEbwA3Am9phpu+meTMvvp1wM6+5Z1N2f0k2dgMae3Ys2fPiGFJk8/TXbUUjJokHgE8HtgLPBzYBHwoyeOa+lVN3Yy9wKrZ5iWqamtVra+q9VNTnd6iVZI0T6MmibuB+4ALq+reqvoycDXwjKZ+H7C6b/3VwL6qqhHblSQtgGEnrttcO0tZfwLYRW/S+hvN8slNmXTIczhJh4JhT4FdmeRwYAWwIsnhSVYCXwG+C7ypWedJwFOBv2xeehlwTpITkjwceAOwretfQpI0HsMON22mN7R0LvDS5vnmqroPeD7wHHrzDX8GnF1Vu5vXfQD4DPBN4FvAlU2ZtGQs5jWV7I1osQ17CuwWYEtL3S7g37bUFfDG5iFJWmK8LIckqZVJQpLUyiQhSWplkpAktTJJSJJamSQkSa1MEtKE897XWkwmCUlSK5OEJKmVSUKS1MokIUlqZZKQhjQ4eeyEspYDk4QkqZVJQloA9jq0VJkkpIPgh72Wm2HvTLcpyY4k9yTZ1rLO+UkqydP7yg5LckmSO5LckuScjuKWJC2AYe9xfTNwIfBM4IjByiSPAn4b+MFA1RbgMcBa4GHA1Un+vqo+P9+ApUk007u44R3PXeRIpG4N1ZOoqu1VdQVwW8sq7wX+ALh3oHwDcEFV3V5V36Z3e9OXzTNWacnzDCktNcP2JFol+W3gnqr6bJL+8qOB44GdfavvBM5o2c5GYCPAmjVrRg1LWjR+6OtQMtLEdZKjgLcDr5ulelXzc29f2V7gqNm2VVVbq2p9Va2fmpoaJSxpKJP8YW4PQ5Ni1LObtgAfrqobZqnb1/xc3Ve2GrhzxDYlSQtk1CRxOvDa5sylW4BfAT6R5A+q6nZ6E9kn961/MrBrxDYlSQtkqDmJJCubdVcAK5IcDvyEXpJ4YN+qfw2cA3yuWb4M2JxkB3Ac8Erg5d2ELkkat2F7EpuBu4FzgZc2zzdX1W1VdcvMA/gpcHtVzQw1vRm4DrgR+DLwTk9/laSlY9hTYLdUVQYeW2ZZb7qqrupbvqeqXlFVq6vquKp6d4exS4tmsSeVF7t9LR9elkOS1MokIUlqZZKQJLUySUiSWpkkJEmtTBKSpFYjX+BP0sHx9FUtJSYJaRbz/SA3AehQ43CTNAFMLppUJglpiTCRaDGYJCRJrUwSkqRWJglpwjispElikpAktRoqSSTZlGRHknuSbOsr/80kX0jyoyR7klye5Pi++iS5KMltzeOiJBnD7yFJGoNhexI3AxcClwyUHw1sBaaBtfTuX31pX/1G4Ax6ty09CXge8Kr5hytJWkjD3nRoe1VdAdw2UP65qrq8qu6oqruAi4En9a2yAXhXVd1UVd8H3gW8rJvQpcmwWHMI0+deOWfbzm2oC13PSTwZ2NW3vA7Y2be8symTJC0BnV2WI8lJwPnA8/uKVwF7+5b3AquSpKpq4PUb6Q1PsWbNmq7CkiSNoJOeRJJHA58DXldVX+2r2ges7lteDewbTBAAVbW1qtZX1fqpqakuwpKG5tCMNLuRexJJ1gJXARdU1YcHqnfRm7T+RrN8Mr84HCUtKpODNLdhT4FdmeRwYAWwIsnhTdkJwBeBi6vq/bO89DLgnCQnJHk48AZgW0exS5LGbNjhps3A3cC5wEub55uB3wMeCWxJsm/m0fe6DwCfAb4JfAu4simT1JEDneFkb0mjGPYU2C1VlYHHlqp6S/N8Vf+j73VVVW+sqmOaxxtnm4+Q1A0TgrrmZTm0rAzzITrJH7T2DLTQTBKSpFYmCUlSK+9xrWXBIRppfuxJ6JDl+L00OnsS0iHGxKgumSS07LR9iPrhKt2fw02SpFYmCUlSK5OEJKmVSUKHPOcaZud+0TBMEpIATxnW7EwSkqRWJgktKf63Ky0sk4QkqdWwd6bblGRHknuSbBuoOz3J7iR3Jbm6uZ3pTN1hSS5JckeSW5Kc03H8krCHpfEZtidxM3AhcEl/YZJjge3AecAxwA7g432rbAEeA6wFngq8McmzRgtZkrRQhr0z3faqugK4baDqBcCuqrq8qn5MLymcnOTEpn4DcEFV3V5V3wb+DHhZJ5FLksZu1DmJdcDOmYWq2g9cB6xLcjRwfH9983zdiG1KkhbIqEliFbB3oGwvcFRTx0D9TN39JNnYzHvs2LNnz4hhablybF7q1qhJYh+weqBsNXBnU8dA/Uzd/VTV1qpaX1Xrp6amRgxLktSFUZPELuDkmYUkRwKPojdPcTvwg/765vmuEdvUMmKvQFpcw54CuzLJ4cAKYEWSw5OsBD4JPD7JmU39+cC1VbW7eellwOYkRzeT2a8EtnX+W0iSxmLYnsRm4G7gXOClzfPNVbUHOBN4G3A78ETgrL7XvZneRPaNwJeBd1bV57sJXZI0bkPdma6qttA7vXW2uquAE1vq7gFe0TwkSUuMl+WQljHPBtOBmCQkSa1MElqSDvTfr/8dS90wSUiSWpkkJEmtTBLSMuAEteZrqFNgpUnnB6A0HvYkJEmtTBKSpFYmCWkZcVhOB8skIUlqZZKQZA9DrUwSkqRWJglJUiuThKQ5+UW85a2TJJFkOslnk9ye5JYkFzd3riPJKUmuSXJX8/OULtqUJI1fVz2J9wH/BBwPnAI8BXhNkgcBnwI+AhwNfAj4VFMudcL/cqXx6SpJ/CvgE1X146q6Bfg8sA44jd6lP/64qu6pqvcAAZ7WUbuSxsgErK6SxB8DZyV5cJITgGfzL4ni2qqqvnWvbcolSROuqwv8fQXYCNwBrKA3rHQFsBnYO7DuXuCowQ0k2dhsgzVr1nQUlqRB9g50MEbuSSR5AL1ew3bgSOBYevMPFwH7gNUDL1kN3Dm4naraWlXrq2r91NTUqGFJkjrQxXDTMcAa4OJm3uE24FLgOcAu4KQk6Vv/pKZc+rlh/rv1P2Bp4Y2cJKrqVuB64NVJViZ5CLCB3tzDl4CfAq9NcliSTc3Lvjhqu5LGw2Ssfl1NXL8AeBawB/gH4D7g9VV1L3AGcDbwz8ArgDOacknShOtk4rqq/o7e6a6z1f0t8IQu2pEkLSwvyyFJauU9rrVkOXYujZ89CUlSK5OEJKmVSUITw0tSS5PHJCFJamWSkCS18uwmTTyHoKTFY09CE8eksPicH9IMk4QkqZVJQpLUyiQhSWplkpAktTJJaME5KSotHSYJSVKrzpJEkrOSfDvJ/iTXJTm1KT89ye4kdyW5OsnartrU0jBXr8FexdLh32l56iRJJPkt4CLg5cBRwJOBf0xyLLAdOI/evbB3AB/vok1J0vh19Y3rtwBvraq/apa/D5BkI7Crqi5vlrcAtyY5sap2d9S2JGlMRu5JJFkBrAemkvxDkpuSXJzkCGAdsHNm3araD1zXlA9uZ2OSHUl27NmzZ9SwNGEcVpKWpi6Gm44DHgi8EDgVOAX4dWAzsArYO7D+XnpDUr+gqrZW1fqqWj81NdVBWJKkUXWRJO5ufv5pVf2gqm4F3g08B9gHrB5YfzVwZwftSpoA9hAPbSMniaq6HbgJqP7i5ucu4OSZwiRHAo9qyrXM+eEiTb6uJq4vBf5Lks8D9wGvB/4C+CTwziRnAlcC5wPXOmm9fA2bGEwg0mTo6nsSFwB/DXwH+Dbwt8DbqmoPcCbwNuB24InAWR21KWmRmMSXj056ElV1H/Ca5jFYdxVwYhftSJIWlpflkCS18valGhuHJKSlz56EJKmVSUKS1MokIUlq5ZyEpHlxzml5MElIGpqJYflxuEmd8QNE/TweDg0mCUlSK5OEJKmVSUKS1MqJa43MsWfp0GVPQmNh4pAODSYJSVIrk4QkqVWnSSLJY5L8OMlH+spenOTGJPuTXJHkmC7blCSNT9cT1++ld4c6AJKsAz4APBf4G2Ar8D68O90hy7kI6dDSWZJIchbwz8DXgUc3xS8BPlNVX2nWOQ/4dpKjqurOrtrW4jAhaND0uVdywzueu9hhqEOdJIkkq4G3Ak8Dfq+vah29pAFAVV2X5F7gscA1A9vYCGwEWLNmTRdhSVog/sNw6OpqTuIC4INVddNA+Spg70DZXuCowQ1U1daqWl9V66empjoKS5I0ipF7EklOAZ4O/Pos1fuA1QNlqwGHmiRpCehiuOk0YBr4bhLo9R5WJPk14PPAyTMrJnkkcBjwnQ7alSSNWRdJYivwv/qW/yu9pPFq4KHA/0lyKr2zm94KbHfSeulzDFpt2o6NmXIntpeWkZNEVd0F3DWznGQf8OOq2gPsSfKfgY8CvwxcBbx81Da1sHxzS8tX5xf4q6otA8sfAz7WdTuSpPHzshySpFYmCUlSK5OEJKmVNx3SrNomqz2rSQfDkx6WPnsSy9D0uVf6Ya+J5jE6OUwSkqRWJglJUiuThCSplUlCc3JcWFreTBLLWH8CMBlonGY7vjz+lgaThCSplUlCktTKJCFJauU3rvVzfjtWC8H5h6Vl5J5EksOSfDDJjUnuTPJ3SZ7dV396kt1J7kpydZK1o7YpSVoYXQw3rQS+BzwF+CVgM/CJJNNJjgW2A+cBxwA7gI930KYkaQF0cWe6/cCWvqK/SHI98AR6d6PbVVWXAyTZAtya5MSq2j1q21pYDhNIy0/ncxJJjgMeC+yid5/rnTN1VbU/yXXAOsAkMQH84Jc0l07PbkryQHr3s/5Q01NYBewdWG0vcNQsr92YZEeSHXv27OkyLEnSPHXWk0jyAODDwL3ApqZ4H7B6YNXVwJ2Dr6+qrcBWgPXr11dXcUmaXAf6JrYWXydJIkmADwLHAc+pqvuaql3Ahr71jgQe1ZRrQvkm1WLwuJtMXQ03/Q/gccDzquruvvJPAo9PcmaSw4HzgWudtJakpaGL70msBV4FnALckmRf83hJVe0BzgTeBtwOPBE4a9Q2JUkLo4tTYG8EMkf9VcCJo7YjSVp4XrtpGXCsV0vV4LHrva8XnklimfDNJWk+TBKSpFYmiSWurXdgr0GHGo/pxeGlwg9hvql0qBo8tr28/fjYkziEmBR0KHI+bXHZk1gCDuZmQAd6M/lmk3Qw7ElIklrZk5hw/f/59z/v71XYO5D+hbfh7ZY9iQnlOKw0vFHfL77X2pkkJB2SxvHBvxz/eXO4aYFNn3tlazf4YA6+5XagSqOY632nudmTkCS1sifRgQNNlPlfjLQwZrsg4FzrtJ0AcjCnmx/q7217EiMa9bIYy3GMU5oUbe+//vLl/h4de5JIckySTybZn+TGJC8ed5uLZdjLGi/3g05aKsZxxtRSe+8vxHDTe4F76d3/+hTgyiQ7q2os97le7C7gwRwYbd+BkNS9cb3HBoeT55scFvuzq81YexJJjqR3+9LzqmpfVX0N+DTwn8bZriSpG+PuSTwW+ElVfaevbCfwlDG3O+9JqMH1/ZazJJhfz/9QuJZaqmp8G09OBS6vqof1lb0SeElVnTaw7kZgY7P4q8D/7as+Frh1bIF2Y9JjNL7RTXqMxje6SY/xQPGtraqpLhscd09iH7B6oGw1cOfgilW1Fdg620aS7Kiq9d2H151Jj9H4RjfpMRrf6CY9xsWIb9xnN30HWJnkMX1lJwNjmbSWJHVrrEmiqvYD24G3JjkyyZOA5wMfHme7kqRuLMSX6V4DHAH8E/A/gVfP4/TXWYehJsykx2h8o5v0GI1vdJMe44LHN9aJa0nS0uZlOSRJrUwSkqRWi5IkkmxKsiPJPUm2DdT9ZpIvJPlRkj1JLk9yfF/97yf5VpI7k1yf5PfnaGc6SSXZ1/c4bwFiTJKLktzWPC5KkjnaenFzXav9Sa5IcsyI8T0oyZ8nuaH5/U8bqP/cwD65N8k3W9qZ1z4cMb4tSe4baPORc7R10PuvgxjHfhyOGN+iHoNN/elJdie5K8nVSdb21e0a2B8/SfKZlnZOS/KzgfU3HCi+DmLc1rw3+ttdMUdbr09yS5I7klyS5LAxx/dHSf5fcwzuTnL2HO3Mex8uVk/iZuBC4JJZ6o6mNzkzDayl952KS/vqA5zdrPcsYFOSsw7Q3kOqalXzuGABYtwInEHvdN+TgOcBr5qtkSTrgA/Qu1TJccBdwPtGjA/ga8BLgVsGK6rq2X37YxXwdeDyA7R3sPtw3vE1Pt4fY1X942wrjbD/Ro1xIY7DUeJb1GMwybH0zmw8DzgG2AF8fKa+qtb1HX9HAd9j7mPw5oHj4UNDxDdSjI0/HGj3p7M1kuSZwLnA6fQ+Ex4JvGXM8e2n93f9JWAD8CdJ/t1cbc1rH1bVoj2anbPtAOv8a+DOOerfA/xpS900UMDKhYyR3ofuxr7l3wX+quW1bwc+1rf8KHoXRDyqi/iAm4DT5qifBn4KTI9jH84nPmAL8JEhtz/S/utiH477OJznPlzUY5Bekvp63/KRwN3AibO8/in0/tE6smX7pwE3zWffjRIjsA24cMjtfwx4e9/y6cAt44xvlm18GnhD1/twKcxJPJmWL9813edT2+r73JjkpiSXNtm5a4MxrqN3jaoZO5uy2fzCulV1Hb036GM7jrHN2cBXq+qGA6w37n046HnpDeftSvLqOdZb7P03Scdhv8U+Bge3uR+4riWGDcD/btZp89AkP2yG9v57ehcPHdUwMb6mOQ6vSXLmsNtqnh+X5JfHHB8ASY4A/g1zH4Pz2ocTnSSSnAScD7SN926h9ztc2lJ/K70dtxZ4Ar1u7UcXIMZVwN6+5b3AqpYx4cF1Z9Y/qss453A2vf+Y2ox9H87iE8DjgCnglcD5SV7Usu5i7z+YgONwFot9DA61zSQPBl7I3Mfgbnq3GTgeeBq9ffjuEWIbNsb3AI8BHkpvyGdbel8IHmZbM8/Hvg8b76eXUP6yZVvz3oedJ4kkX0pvIm22x9cOYjuPBj4HvK6qvjpL/SZ6H3DPrap7ZttG9S5PvqOqflJVPwQ2Ac9I8tUxxzh4zarVwL5q+n0DZru+1XHA1aPGN0T8/x54GPDnbevMsQ9H3n9ztPn3VXVzVf20qr4O/Am9D5LZtF0f7I/GGeOMEY/Dcca32MfgsNdtewHwI+DLbRuqqluaY+JnVXU98EbgzA4+a+aMsar+pqpua/5un6WX2F8w5LZmnl8/7n2Y5J3A44Hfafn7tu7DIWLo/gJ/NXB11/lIbwb/KuCCqrrfJTySvILeJNGTq+qmgwmv+fkfqmowQ3cZ4y56E4bfaJbnul7VzLoz230k8DPg6Kq634UQO7YB2F5V+w7iNTP78CGj7sODbLPtzJzZ9t9hwFPHvf86OA7HuQ8X+xjcRe/4mtnmkfTmOgZj2ABc1vbh1qKAB3TwWTNsjP3tHug4/ESzfDLww+q7AvY44kvyFuDZwFOq6o6D2HYxZCdhsU6BXZnkcGAFsCLJ4UlWNnUnAF8ELq6q98/y2pfQm2j7rWo546Vv3Scm+dUkD2jGBt8DfGmYN+YoMQKXAeckOSHJw4E30N6d/ii98fdTm4PgrfQ+uOd8c84VX1N/WFMP8KCmPn31RwC/M0dcM+vNax+OEl+S5yc5Oj2/AbwW+FRLU/Pafx3EOPbjcMS/8WIfg58EHp/kzGad84Frq2p33+sfATwVmPMsmyRPTbK2OR5+BXgH7cdDZzEmeWGSVc3f7Rn0ziT7dEtTlwG/m+TXkjwE2MwB3lsdxPcm4MXA06vqtgO0M+99OO+zBUZ50BvDrYHHlqbuzc3yvv5H32uvB+4bqH9/X/0ueverAHhRs/5+4Af0/pAPW4AYA/whvW70j5rn6avfB5zat/xi4LtNnJ8Cjhklvqb+hlnqp/vqXwTc2B9Xl/twlPjoXePrtmY/7QZeO7DtkfdfBzGO/TgcMb5JOAaf3vz97ga+xMAZdMCb6J00Mdu2fx4fcA7wfXqn5n6PXpId9syreccIfJXeHMAd9Mb7z+qrW9PEuKav7Bzgh836lwKHjTm+Au4ZOAb/W9f70Gs3SZJaTfTZTZKkxWWSkCS1MklIklqZJCRJrUwSkqRWJglJUiuThCSplUlCktTKJCFJavX/AYOY/7Na8zyIAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# =======================================\n",
    "# LENS MODEL POSTERIORS FROM IMAGING DATA\n",
    "# =======================================\n",
    "\n",
    "# read in list of MCMC samples from the modelling of the imaging data (see appropriate notebook)\n",
    "# this are the job names executed and this notebook can handle an arbitrary list of job names corresponding to different model outputs\n",
    "# the model outputs should be stored in the conventions of the modelling notebooks and will be loaded in this notebook with pickle.\n",
    "# Attention: pickle is not platform (and python version) independent and should be avoided for public available data.=\n",
    "# Parameter conventions may varry from different lenstronomy versions. Be adviced to run your own chains to analyse them with this notebook.\n",
    "job_name_loaded_list = ['test']\n",
    "num_models = len(job_name_loaded_list)\n",
    "\n",
    "# ============================================\n",
    "# VELOCITY DISPERSION MEASUREMENT AND ANALYSIS\n",
    "# ============================================\n",
    "\n",
    "# velocity disperions measurement in km/s\n",
    "vel_disp = 290  # from Agnello et al.\n",
    "sigma_vel_disp = 30  # from Agnello et al.\n",
    "R_slit = 1.0 # 3.8  # slit lenght in arc sec (we only use part of the slit length in this analysis)\n",
    "dR_slit = 1.  # slit width in arc sec\n",
    "psf_fwhm = 1.0  # sseing conditions of the observations\n",
    "\n",
    "kwargs_aperture = {'length': R_slit, 'width': dR_slit, 'center_ra': 0, 'center_dec': 0, 'angle': 0}\n",
    "anisotropy_model = 'OsipkovMerritt'  # anisotropy model applied\n",
    "aperture_type = 'slit'  # type of aperture used\n",
    "# numerical options to perform the numerical integrals\n",
    "kwargs_galkin_numerics = {'sampling_number': 1000, 'interpol_grid_num': 100, 'log_integration': True,\n",
    "                           'max_integrate': 100, 'min_integrate': 0.001}\n",
    "MGE_light = True  # we use a multi-Gaussian expansion to perform the luminous light de-projection\n",
    "MGE_mass = True # we use a multi-Gaussian expansion to perform the mass distribution de-projection\n",
    "\n",
    "\n",
    "\n",
    "# anisotropy priors\n",
    "aniso_param_min = 0.5\n",
    "aniso_param_max = 5\n",
    "def draw_anisotropy():\n",
    "    # draw of the anisotropy prior distribution\n",
    "    return np.random.uniform(aniso_param_min, aniso_param_max)\n",
    "\n",
    "def draw_vel_disp():\n",
    "    # sampling of the measured velocity dispersion distribution\n",
    "    return vel_disp + np.random.randn(1)[0]*sigma_vel_disp\n",
    "\n",
    "\n",
    "# ====================================\n",
    "# LENS REDSHIFTS AND DEFAULT COSMOLOGY\n",
    "# ====================================\n",
    "\n",
    "# redshifts\n",
    "z_d = 0.745\n",
    "z_s = 1.789\n",
    "# instance of the LCDM cosmology class\n",
    "from lenstronomy.Cosmo.lens_cosmo import LCDM\n",
    "cosmoProp = LCDM(z_lens=z_d, z_source=z_s)\n",
    "\n",
    "\n",
    "# ===========================\n",
    "# TIME DELAY AND MICROLENSING\n",
    "# ===========================\n",
    "\n",
    "# time delay\n",
    "time_delay_path = os.path.join(base_path, 'Data', 'TimeDelay', \"time_delay.npy\")\n",
    "[days, probas] = np.load(time_delay_path)\n",
    "# we compute the CDF of the time-delay distribution\n",
    "from lenstronomy.Util.prob_density import Approx\n",
    "dt_draw = Approx(days, probas)\n",
    "\n",
    "def draw_dt_measured(n=1):\n",
    "    # sampling from the time-delay measured posteriors\n",
    "    return dt_draw.draw(n=n)\n",
    "# here a quick test of the distribution\n",
    "dt_sample = draw_dt_measured(n=10000)\n",
    "plt.hist(dt_sample, bins=200)\n",
    "plt.show()\n",
    "\n",
    "\n",
    "# micro-lensing time-delays\n",
    "# see Birrer et al. 2018 for details on the assumption that went into this computation\n",
    "# micro-lensing specs:\n",
    "# M_bh, kappa, gamma, accretion size\n",
    "\n",
    "#Image A\n",
    "micro_file_A = os.path.join(base_path, 'Data', 'TimeDelay', 'MicroLensing', 'ImgA', 'TDmap_1.0R0_incl0.0_pa0.0.fits')\n",
    "micro_lensing_A = fits.open(micro_file_A)\n",
    "micro_lensing_A = micro_lensing_A[0].data\n",
    "A_micro_pdf = micro_lensing_A.flatten()\n",
    "\n",
    "#Image B\n",
    "micro_file_B = os.path.join(base_path, 'Data', 'TimeDelay', 'MicroLensing', 'ImgB', 'TDmap_1.0R0_incl0.0_pa0.0.fits')\n",
    "micro_lensing_B = fits.open(micro_file_B)\n",
    "micro_lensing_B = micro_lensing_B[0].data\n",
    "B_micro_pdf = micro_lensing_B.flatten()\n",
    "\n",
    "\n",
    "def draw_micro_lensing(num=1):\n",
    "    \"\"\"\n",
    "    return a realisation of the micro-lensing delay for image A and B\n",
    "    \"\"\"\n",
    "    nA = len(A_micro_pdf)\n",
    "    iA_random = np.random.uniform(low=0, high=nA, size=num)\n",
    "    dt_A = A_micro_pdf[iA_random.astype(int)]\n",
    "    \n",
    "    nB = len(B_micro_pdf)\n",
    "    iB_random = np.random.uniform(low=0, high=nB, size=num)\n",
    "    dt_B = B_micro_pdf[iB_random.astype(int)]\n",
    "    return dt_A, dt_B\n",
    "\n",
    "def draw_dt():\n",
    "    \"\"\"\n",
    "    joint draw of the measured time delay PDF and microlensingn PDFs of the two images\n",
    "    \"\"\"\n",
    "    dt_A_micro, dt_B_micro = draw_micro_lensing()\n",
    "    return draw_dt_measured() + dt_A_micro - dt_B_micro\n",
    "\n",
    "def BIC_compute(num_data, num_model, max_logL):\n",
    "    \"\"\"\n",
    "    Bayesian information criterion\n",
    "    \"\"\"\n",
    "    return np.log(num_data)*num_model - 2 * max_logL"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## read in lens model output\n",
    "Re-computes one parameter position in each sample"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "7589.441109423646 47 -3598.1327232613885 4296.0 BIC num_model logL num_data\n",
      "0.34578935813584555 r_eff\n",
      "test\n",
      "=====\n"
     ]
    }
   ],
   "source": [
    "num_samples_read = 1000  # depending on the number of different chains/models, we do not need all of the\n",
    "# posteriors to post-process for computational reasons. As long as the sample is representative of the full\n",
    "# posteriors, everyting should be fine.\n",
    "\n",
    "# evaluate goodness of fit with shared mask.\n",
    "mask_shared_bool = True\n",
    "\n",
    "# a list of attributs that are collected for all the different samples.\n",
    "kwargs_list = []\n",
    "source_result_list = []\n",
    "lens_light_result_list = []\n",
    "r_eff_list = []\n",
    "lensProp_list = []\n",
    "lensAnalysis_list = []\n",
    "\n",
    "chi2_list = []\n",
    "BIC_list = []\n",
    "source_type_list = []\n",
    "lens_type_list = []\n",
    "numerics_list = []\n",
    "mask_list = []\n",
    "perturber_number_list = []\n",
    "\n",
    "#sample_folder = 'Samples'\n",
    "sample_folder = os.path.join(base_path, 'Temp')\n",
    "#sample_folder = '/Users/sibirrer/mount/external/TDSL/SDSSJ1206'\n",
    "for i, job_name in enumerate(job_name_loaded_list):\n",
    "    # read in posterior sampling file\n",
    "    output_temp = job_name +'_out.txt'\n",
    "    output_temp = os.path.join(sample_folder, output_temp)\n",
    "    f = open(output_temp, 'rb')\n",
    "    [input_, output_] = pickle.load(f)\n",
    "    f.close()\n",
    " \n",
    "    # get back all the kwargs and files in the same format as beeing pickled\n",
    "    fitting_kwargs_list, multi_band_list, kwargs_model, kwargs_constraints, kwargs_likelihood, kwargs_params, init_samples = input_\n",
    "    lens_result, source_result, lens_light_result, ps_result, cosmo_result, multi_band_list_out, chain_list, param_list, samples_mcmc, param_mcmc, dist_mcmc, kwargs_params_out = output_\n",
    "    # make an instance of the LensProp class with the configuration of the model being read in\n",
    "    lensProp_list.append(LensProp(z_d, z_s, kwargs_model))\n",
    "    # make an instance of the LensProp class with the configuration of the model being read in\n",
    "    lensAnalysis = LensAnalysis(kwargs_model)\n",
    "    lensAnalysis_list.append(lensAnalysis)\n",
    "    # read in the parameter configurations as passed to the FittingSequence class\n",
    "    lens_params = kwargs_params_out['lens_model']\n",
    "    source_params = kwargs_params_out['source_model']\n",
    "    lens_light_params = kwargs_params_out['lens_light_model']\n",
    "    ps_params = kwargs_params_out['point_source_model']\n",
    "    cosmo_params = kwargs_params_out['cosmography']\n",
    "    \n",
    "    # make an instance of the Param() class with the same configuration as being sampled by the MCMC\n",
    "    kwargs_fixed_lens, kwargs_fixed_source, kwargs_fixed_lens_light, kwargs_fixed_ps, kwargs_fixed_cosmo = lens_params[2], source_params[2], lens_light_params[2], ps_params[2], cosmo_params[2]\n",
    "    param_class = Param(kwargs_model, kwargs_constraints, kwargs_fixed_lens, kwargs_fixed_source, kwargs_fixed_lens_light, kwargs_fixed_ps, kwargs_fixed_cosmo, kwargs_lens_init=lens_result)\n",
    "    \n",
    "    # now we loop through the posterior samples and expand the sampled parameters to recover the full lenstronomy keyword arguments\n",
    "    # we append those full list of keyword arguments for later use\n",
    "    kwargs_mcmc_list = []    \n",
    "    for k in range(num_samples_read):\n",
    "        j = len(samples_mcmc) - num_samples_read + k - 1\n",
    "        kwargs_mcmc_list.append(param_class.args2kwargs(samples_mcmc[j,:]))\n",
    "    kwargs_list.append(kwargs_mcmc_list)\n",
    "    # we select the zeroth band (well, we only did a single band modelling here)\n",
    "    band_i = 0\n",
    "    [kwargs_data, kwargs_psf, kwargs_numerics] = multi_band_list[band_i]\n",
    "    # we extract the mask. This will be used later to assess the goodness of fit\n",
    "    mask = kwargs_numerics['mask']\n",
    "    mask_list.append(np.sum(mask))\n",
    "        \n",
    "    # to evaluate the goodness of fit, we make sure to do so by using always the same mask\n",
    "    if i == 0:\n",
    "        mask_shared = mask\n",
    "    if mask_shared_bool is True:\n",
    "        mask = mask_shared\n",
    "    kwargs_numerics['mask'] = mask_shared\n",
    "    # we find the posterior sample index of the maximul likelihood\n",
    "    i_plot = np.argmax(dist_mcmc)\n",
    "    lens_result, source_result, lens_light_result, ps_result, cosmo_result = param_class.args2kwargs(samples_mcmc[i_plot,:])\n",
    "    # we creat an image modelling class based on our options chosen\n",
    "    imageModel = class_creator.create_image_model(kwargs_data, kwargs_psf, kwargs_numerics, kwargs_model)\n",
    "    # we compute the logL for the given model\n",
    "    max_logL = imageModel.likelihood_data_given_model(lens_result, source_result, lens_light_result, ps_result, source_marg=False)\n",
    "    # here we compute the full linear inversion\n",
    "    model, error_map, cov_param, param = imageModel.image_linear_solve(lens_result, source_result, lens_light_result, ps_result, inv_bool=True)\n",
    "    # we compute some goodness of fit quantities\n",
    "    chi2_list.append(imageModel.reduced_chi2(model, error_map=error_map))    \n",
    "    num_data = np.sum(mask)\n",
    "    num, param_list = param_class.num_param()\n",
    "    num_model = num + param_class.num_param_linear()\n",
    "    BIC = BIC_compute(num_data, num_model, max_logL)\n",
    "    BIC_list.append(BIC)\n",
    "    print(BIC, num_model, max_logL,  num_data, 'BIC', 'num_model', 'logL', 'num_data')\n",
    "    \n",
    "    # we want to keep track of the source type, here specified by n_max of the shapelet order\n",
    "    n_max = -1\n",
    "    for kwargs in source_result:\n",
    "        if 'n_max' in kwargs:\n",
    "            n_max = kwargs['n_max']\n",
    "    source_type_list.append(n_max)\n",
    "    \n",
    "    # we want to keep track of the number of perturbers modelled for a specific model\n",
    "    lens_model_temp = kwargs_model['lens_model_list']\n",
    "    num_pert = 0\n",
    "    for model in lens_model_temp:\n",
    "        if model in ['SIS', 'NFW']:\n",
    "            num_pert += 1\n",
    "    perturber_number_list.append(num_pert)\n",
    "    lens_type_list.append(kwargs_model['lens_model_list'])\n",
    "    numerics_list.append(kwargs_numerics.get('subgrid_res', 1))\n",
    "    # here we keep track of the parameters of the source and lens light of the best fit model, this includes the linear amplitude parameters that were not tracked in the original MCMC output\n",
    "    # this is important when using the light models to infere the kinematics or other light quantity related properties.\n",
    "    source_result_list.append(source_result)\n",
    "    lens_light_result_list.append(lens_light_result)\n",
    "    # here we compute the projected half-light radius of the model. We specify the center and only include light models that describe part of the main deflector\n",
    "    r_eff = lensAnalysis.half_light_radius_lens(lens_light_result,\n",
    "                                                    center_x=lens_light_result[0]['center_x'],\n",
    "                                                    center_y=lens_light_result[0]['center_x'],\n",
    "                                                    model_bool_list=kwargs_model['light_model_deflector_bool'], deltaPix=0.01, numPix=500)\n",
    "    print(r_eff, 'r_eff')\n",
    "    r_eff_list.append(r_eff) \n",
    "    print(job_name)\n",
    "    print('=====')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD/CAYAAAD2Qb01AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAD+1JREFUeJzt3X+o3fV9x/Hna6ZGmxg7zSWd2jR2VWgDTSihtIbAmIx2K0Od/wytbWlHVkUq7SaIVGad9IfQDuZEF6hKpXODzXRK6I9BN5pQBqaDtAZpCrO2WmJS42Ju8MfSvffHuRlnl5Pc7835nntMPs8HHLjfz31/z3l/vPe+8j2f7/d8TVUhSWrHb0y7AUnS0jL4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY1ZNu0GAFavXl3r1q2bdhuSdFr54Q9/+Kuqmlnsfm+I4F+3bh27d++edhuSdFpJ8uyp7OdSjyQ1xuCXpMYY/JLUGINfkhpj8EtSYzoFf5Kbk+xO8lqShxeo/UyS/UleTvJgkuW9dCpJ6kXXI/5fAncDD56sKMkHgduAK4G3A+8APj9Og5KkfnUK/qp6rKq+Cby4QOnHgK9V1d6qegn4S+Dj47UoSepT32v864E9Q9t7gDVJLuz5dSRJp6jvT+6uBA4PbR//+jzmvVtIshXYCrB27dqe25D6s+62HVN53Z996cNTeV2d+fo+4p8FVg1tH//6yPzCqtpWVZuqatPMzKJvNSFJOkV9B/9eYMPQ9gbghapa6NyAJGmJdL2cc1mSc4CzgLOSnJNk1DLR14FPJnl3krcAnwMe7q1bSdLYuh7xfw54hcGlmh+Z+/pzSdYmmU2yFqCqvg3cA/wr8HPgWeAveu9aknTKOp3crao7gTtP8O2V82q/Cnx1rK4kSRPjLRskqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWpMp+BPckGS7UmOJnk2yXUnqFue5IEkLyQ5lOSJJBf327IkaRxdj/jvA14H1gDXA/cnWT+i7hbgA8B7gIuAl4B7e+hTktSTBYM/yQrgWuCOqpqtql3A48ANI8ovBb5TVS9U1avAPwCj/oGQJE1JlyP+y4FjVbVvaGwPowP9a8DmJBcleTODdwffGr9NSVJflnWoWQm8PG/sMHDeiNqfAr8Angd+DfwYuHnUkybZCmwFWLt2bcd2JUnj6nLEPwusmje2CjgyovY+YDlwIbACeIwTHPFX1baq2lRVm2ZmZrp3LEkaS5fg3wcsS3LZ0NgGYO+I2o3Aw1V1qKpeY3Bi931JVo/fqiSpDwsGf1UdZXDkfleSFUk2A1cBj4wofxL4aJLzk7wJuAn4ZVX9qs+mJUmnruvlnDcB5wIHgEeBG6tqb5ItSWaH6v4ceJXBWv9B4A+Aa3rsV5I0pi4nd6mqQ8DVI8Z3Mjj5e3z7RQZX8kiS3qC8ZYMkNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMZ2CP8kFSbYnOZrk2STXnaT2vUm+n2Q2yQtJbumvXUnSuJZ1rLsPeB1YA2wEdiTZU1V7h4uSrAa+DXwG+EfgbOCS/tqVJI1rwSP+JCuAa4E7qmq2qnYBjwM3jCj/LPCdqvpGVb1WVUeq6ul+W5YkjaPLUs/lwLGq2jc0tgdYP6L2/cChJD9IciDJE0nW9tGoJKkfXYJ/JfDyvLHDwHkjai8BPgbcAqwFngEeHfWkSbYm2Z1k98GDB7t3LEkaS5fgnwVWzRtbBRwZUfsKsL2qnqyqV4HPA1ckOX9+YVVtq6pNVbVpZmZmsX1Lkk5Rl+DfByxLctnQ2AZg74jaHwE1tF0jaiRJU7Rg8FfVUeAx4K4kK5JsBq4CHhlR/hBwTZKNSd4E3AHsqqrDfTYtSTp1XT/AdRNwLnCAwZr9jVW1N8mWJLPHi6rqe8DtwI652ncCJ7zmX5K09Dpdx19Vh4CrR4zvZHDyd3jsfuD+XrqTJPXOWzZIUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktSYTsGf5IIk25McTfJskusWqD87ydNJnuunTUlSX5Z1rLsPeB1YA2wEdiTZU1V7T1B/K3AQOG/8FiVJfVrwiD/JCuBa4I6qmq2qXcDjwA0nqL8U+AjwxT4blST1o8tSz+XAsaraNzS2B1h/gvp7gduBV8bsTZI0AV2CfyXw8ryxw4xYxklyDXBWVW1f6EmTbE2yO8nugwcPdmpWkjS+LsE/C6yaN7YKODI8MLckdA/w6S4vXFXbqmpTVW2amZnpsoskqQddTu7uA5Yluayqfjo3tgGYf2L3MmAdsDMJwNnA+Un2A++vqp/10rEkaSwLBn9VHU3yGHBXkj9hcFXPVcAV80qfAt42tH0F8DfAexlc4SNJegPo+gGum4BzgQPAo8CNVbU3yZYkswBVdayq9h9/AIeA/5nb/vVEupckLVqn6/ir6hBw9YjxnQxO/o7a59+AS8ZpTpLUP2/ZIEmNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWpMp+BPckGS7UmOJnk2yXUnqLs1yVNJjiR5Jsmt/bYrSRrXso519wGvA2uAjcCOJHuqau+8ugAfBX4E/Dbw3SS/qKq/76thSdJ4FjziT7ICuBa4o6pmq2oX8Dhww/zaqrqnqv6jqo5V1U+AfwY29920JOnUdVnquRw4VlX7hsb2AOtPtlOSAFuA+e8Kjn9/a5LdSXYfPHiwa7+SpDF1Cf6VwMvzxg4D5y2w351zz//QqG9W1baq2lRVm2ZmZjq0IUnqQ5c1/llg1byxVcCRE+2Q5GYGa/1bquq1U29PktS3Lkf8+4BlSS4bGtvAiZdwPgHcBlxZVc+N36IkqU8LBn9VHQUeA+5KsiLJZuAq4JH5tUmuB74A/F5V/WffzUqSxtf1A1w3AecCB4BHgRuram+SLUlmh+ruBi4EnkwyO/d4oN+WJUnj6HQdf1UdAq4eMb6Twcnf49uX9teaJGkSvGWDJDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mN6RT8SS5Isj3J0STPJrnuBHVJ8uUkL849vpwk/bYsSRrHso519wGvA2uAjcCOJHuqau+8uq3A1cAGoIB/AZ4BHuinXUnSuBY84k+yArgWuKOqZqtqF/A4cMOI8o8BX6mq56rqeeArwMd77FeSNKYuSz2XA8eqat/Q2B5g/Yja9XPfW6hOkjQlXZZ6VgIvzxs7DJx3gtrD8+pWJklV1XBhkq0MloYAZpP8pFvLvVkN/GqJX3PaWpvzaT3ffPmUdjut53yKWp7z209l5y7BPwusmje2CjjSoXYVMDs/9AGqahuwrWOfvUuyu6o2Tev1p6G1Obc2X3DOrRh3zl2WevYBy5JcNjS2AZh/Ype5sQ0d6iRJU7Jg8FfVUeAx4K4kK5JsBq4CHhlR/nXgs0kuTnIR8GfAwz32K0kaU9cPcN0EnAscAB4FbqyqvUm2JJkdqvtb4Angx8BTwI65sTeiqS0zTVFrc25tvuCcWzHWnDNi+V2SdAbzlg2S1BiDX5Iac8YGf4v3F1rEnG9N8lSSI0meSXLrUvfal65zHqo/O8nTSZ5bqh77tpg5J3lvku8nmU3yQpJblrLXPizi93p5kgfm5nkoyRNJLl7qfvuQ5OYku5O8luThBWo/k2R/kpeTPJhk+ULPf8YGP////kLXA/cnGfUp4uH7C70H+EPgT5eqyZ51nXOAjwK/CXwIuDnJHy9Zl/3qOufjbgUOLkVjE9RpzklWA99mcIHFhcA7ge8uYZ996fozvgX4AIO/44uAl4B7l6rJnv0SuBt48GRFST4I3AZcyeDDXO8APr/gs1fVGfcAVjD4Rbl8aOwR4Esjan8AbB3a/iTw79OewyTnPGLfvwbunfYcJj1n4FLgaeD3geem3f+k5wx8AXhk2j0v4XzvB+4Z2v4w8JNpz2HM+d8NPHyS7/8d8IWh7SuB/Qs975l6xN/i/YUWM+f/M7estYXT84N2i53zvcDtwCuTbmyCFjPn9wOHkvwgyYG5pY+1S9JlfxYz368Bm5NclOTNDN4dfGsJepymUfm1JsmFJ9vpTA3+Xu4vNKHeJmUxcx52J4Pfg4cm0NOkdZ5zkmuAs6pq+1I0NkGL+TlfwuCOubcAaxncIv3RiXbXv8XM96fAL4Dn5/Z5F3DXRLubvlH5BQv83Z+pwT+R+wu9wS1mzsDgBBKDtf4PV9VrE+xtUjrNee7W4vcAn16iviZpMT/nV4DtVfVkVb3KYO33iiTnT7jHPi1mvvcByxmcz1jB4I4DZ/oR/6j8gpP83cOZG/wt3l9oMXMmySeYOylUVafrFS5d53wZsA7YmWQ/g0D4rbkrIdYtQZ99WszP+UcM/odIx51uBzOwuPluZLAefmjuQOZe4H1zJ7nPVKPy64WqevGke0375MUET4r8PYO3tSuAzQzeAq0fUfcpBif8LmZwJcBe4FPT7n/Cc74e2A+8a9o9L8WcGdyF9q1Djz9icNXEWxks/0x9HhP6Of8ugytbNgJvAv4K2Dnt/ic434eAfwLOn5vv7cDz0+7/FOe8DDgH+CKDk9nnAMtG1H1o7m/53cBbgO/R5YKOaU9wgv/hLgC+CRwFfg5cNze+hcFSzvG6MFgGODT3uIe5W1mcbo9FzPkZ4L8ZvE08/nhg2v1Pcs7z9vkdTtOrehY7Z+BGBmveLzG4j9bbpt3/pObLYInnGwzuKfZfwC7gfdPu/xTnfCeDd2jDjzsZnKuZBdYO1X4WeIHBeY2HgOULPb/36pGkxpypa/ySpBMw+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mN+V9QmRUjopbNLAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "# computing the half-light radius of the reconstructed source\n",
    "# this is simply a statistics that ensures that the properties of the source galaxy is not fastly deviating.\n",
    "R_h_list = []\n",
    "for i, source_result in enumerate(source_result_list):\n",
    "    deltaPix_source_norm = 0.01\n",
    "    R_h = lensAnalysis_list[i].half_light_radius_source(source_result_list[i], deltaPix=0.01, numPix=100)\n",
    "    R_h_list.append(R_h)\n",
    "plt.hist(R_h_list)\n",
    "plt.show()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test\n"
     ]
    }
   ],
   "source": [
    "\"\"\"\n",
    "Now we compute derived quantities from the lens model, such as log power-law slope at the Einstein radius etc.\n",
    "This computation is mainly to compare different lens models and for presenting modelling results \n",
    "\"\"\"\n",
    "\n",
    "mcmc_file_name = '_derived_lens_model.txt'\n",
    "new_compute = False   # if filenmae is already there, read out existing file.\n",
    "\n",
    "# here are the parameters being saved in a new file\n",
    "labels_compressed = [r\"$\\theta_E$\", r\"$\\gamma'$\", r\"$\\gamma_{ext}$\", r\"$\\phi_{ext}$\", r\"$\\gamma_{ext foreground}$\", r\"$\\phi_{ext foreground}$\",r\"$\\Delta\\phi_{fermat}$\",  r\"$\\theta_{pert}$\", r\"$\\kappa_{G1+G2}$\"]\n",
    "num_param_compressed = len(labels_compressed)\n",
    "mcmc_compressed_list = []\n",
    "\n",
    "# we loop through all the model choices\n",
    "for k, job_name in enumerate(job_name_loaded_list):\n",
    "    mcmc_out_temp = job_name + mcmc_file_name\n",
    "    mcmc_out_temp = os.path.join(base_path, 'Temp', mcmc_out_temp)\n",
    "    if new_compute is True or not os.path.exists(mcmc_out_temp):\n",
    "        # we get the instances of the correct lensAnalysis and LensModel classes to perform the computations\n",
    "        lensAnalysis = lensAnalysis_list[k]\n",
    "        lensModelExtensions = LensModelExtensions(lensAnalysis.LensModel)\n",
    "        mcmc_compressed = np.zeros((num_samples_read, num_param_compressed))\n",
    "        # we loop through all samples selected\n",
    "        for l in range(num_samples_read):\n",
    "            # read out the full keyword arguments from the previous block\n",
    "            kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps, kwargs_cosmo = kwargs_list[k][l]\n",
    "            # Fermat potential at the quasar image positions\n",
    "            fermat_pot = lensAnalysis.fermat_potential(kwargs_lens, kwargs_ps)\n",
    "            delta_fermat = fermat_pot[1] - fermat_pot[0]  # relative Fermat potential (B-A)\n",
    "            # external shear parameters in angle and shear strength\n",
    "            phi_ext, gamma_ext = lensModelExtensions.external_shear(kwargs_lens)\n",
    "            # same for foreground shear\n",
    "            phi_ext_foreground, gamma_ext_foreground = lensModelExtensions.external_shear(kwargs_lens, foreground=True)\n",
    "            # Einstein radius, as defined as the spherical averaged region with <kappa> = 1 centered at main profile center\n",
    "            theta_E = lensModelExtensions.effective_einstein_radius(kwargs_lens)\n",
    "            # log power-law slope at the Einstein radius\n",
    "            gamma = lensModelExtensions.profile_slope(kwargs_lens)  # local logarithmic slope at Einstein radius\n",
    "            gamma = np.mean(gamma)\n",
    "            num_pert = perturber_number_list[k]\n",
    "            \n",
    "            # now we compute the sum of Einstein radii of the galaxy triplet\n",
    "            theta_E_pert = 0\n",
    "            num_lens_models = len(kwargs_lens)\n",
    "            for i in range(3):\n",
    "                theta_E_pert += kwargs_lens[num_lens_models-num_pert-1+1]['theta_E']  # we used the convention that the perturber models are the last models in the list\n",
    "            if num_pert == 6:  # if we add other perturber models G3 and G4\n",
    "                i_G3 = len(kwargs_lens)-3\n",
    "                i_G4 = len(kwargs_lens)-2\n",
    "                # we compute the convergence of G3 and G4 induced at the lens center\n",
    "                kappa_g3_g4 = lensAnalysis.LensModel.kappa(0, 0, kwargs_lens, k=[i_G3, i_G4])\n",
    "            else:\n",
    "                kappa_g3_g4 = 0\n",
    "            # we add the values to the parameter array\n",
    "            mcmc_compressed[l] = [theta_E, gamma, gamma_ext, phi_ext, gamma_ext_foreground, phi_ext_foreground, delta_fermat, theta_E_pert, kappa_g3_g4]\n",
    " \n",
    "        print(job_name_loaded_list[k])\n",
    "        f = open(mcmc_out_temp,'wb')\n",
    "        pickle.dump(mcmc_compressed, f)\n",
    "        f.close()\n",
    "    try:\n",
    "        f = open(mcmc_out_temp, 'rb')\n",
    "        mcmc_compressed = pickle.load(f)\n",
    "        f.close()\n",
    "    except:\n",
    "        print(mcmc_out_temp)\n",
    "        raise ValueError(mcmc_out_temp)\n",
    "    mcmc_compressed_list.append(mcmc_compressed)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "test\n"
     ]
    }
   ],
   "source": [
    "\"\"\"\n",
    "Now we compute the angular diameter distance posteriors by combining the image model posteriors with predictions\n",
    "of the velocity dispersion and by simple sampling the measured time delay (incl. microlensing) and the velocity dispersion\n",
    "\"\"\"\n",
    "\n",
    "# file name for each model that contains this output\n",
    "mcmc_file_name = '_diameter_distances.txt'\n",
    "new_compute = False  # if file exists, we do not need to re-copmpute this (optional)\n",
    "\n",
    "# Each posterior can be re-simple sampled by drawing other values of the PDFs of the data. This requires less computations \n",
    "# of the kinematicds (expensive). This is valid as long as the smallest sample is representative of its PDF.\n",
    "num_resampling = 20\n",
    "\n",
    "# parameter names in computed chain\n",
    "labels_new = [ r\"$D_d$\", r\"$D_{\\Delta t}$\", r\"$r_{ani} / r_{eff}$\"]\n",
    "\n",
    "num_param_new = len(labels_new)\n",
    "i_Dd = 0 # index of Dd parameter\n",
    "i_DdDs_Dds = 1 # index of DdDs/Dds parameter (almost Ddt without (1+z))\n",
    "mcmc_new_list = []\n",
    "\n",
    "for k, job_name in enumerate(job_name_loaded_list):\n",
    "    mcmc_out_temp = job_name + mcmc_file_name\n",
    "    mcmc_out_temp = os.path.join(sample_folder, mcmc_out_temp)\n",
    "    if new_compute is True or not os.path.exists(mcmc_out_temp):\n",
    "        # get the right analysis classes for the kinematic computation\n",
    "        lensProp = lensProp_list[k]\n",
    "        #lensAnalysis = lensAnalysis_list[k]\n",
    "        #lensModelExtensions = LensModelExtensions(lensAnalysis.LensModel)\n",
    "        mcmc_new = np.zeros((num_samples_read*num_resampling, num_param_new))\n",
    "        r_eff = r_eff_list[k]\n",
    "        \n",
    "        i = 0\n",
    "        for l in range(num_samples_read):\n",
    "            # read model parameters in lenstronomy conventions\n",
    "            kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps, kwargs_cosmo = kwargs_list[k][l]\n",
    "            # compute Fermat potential at the positions of the point sources\n",
    "            fermat_pot = lensAnalysis.fermat_potential(kwargs_lens, kwargs_ps)\n",
    "            # compute relative Fermat potential between images A and B (attention of sign)\n",
    "            delta_fermat = fermat_pot[1] - fermat_pot[0]\n",
    "            # draw an anisotropy radius from the PDF specified\n",
    "            aniso_param = draw_anisotropy()\n",
    "            # set the anisotropy radius. r_eff is pre-computed half-light radius of the lens light\n",
    "            kwargs_anisotropy = {'r_ani': r_eff * aniso_param}\n",
    "            # compute the velocity disperson in a pre-specified cosmology (see lenstronomy function)\n",
    "            # We read out the lens light model kwargs that has previously been solved for the linear amplitude parameters.\n",
    "            # This is necessary to perform an accurate kinematic estimate as we are using a superposition of different light profiles and their relative amplitudes matter.\n",
    "            vel_disp_temp = lensProp.velocity_dispersion_numerical(kwargs_lens, lens_light_result_list[k], kwargs_anisotropy, kwargs_aperture,\n",
    "                                                                       psf_fwhm, aperture_type, anisotropy_model, MGE_light=MGE_light, MGE_mass=MGE_mass, r_eff=r_eff,\n",
    "                                                                       kwargs_numerics=kwargs_galkin_numerics, lens_model_kinematics_bool=lensProp.kwargs_options['lens_model_deflector_bool'],\n",
    "                                                                       light_model_kinematics_bool=lensProp.kwargs_options['light_model_deflector_bool'])\n",
    "            # compute the predicted time delays in the SAME cosmology as the kinematic prediction\n",
    "            time_delay_temp = lensProp.time_delays(kwargs_lens, kwargs_ps, kappa_ext=0)\n",
    "            time_delay_temp= time_delay_temp[1] - time_delay_temp[0]\n",
    "            \n",
    "            # now we draw values of the time delay and velocity dispersion from the uncertainties given by the data\n",
    "            # we can do this multiple times to enhance the sampling\n",
    "            for m in range(num_resampling):\n",
    "                # the distance ratios that can be measured. Inputs are a realisation of measured time delays and kinematics and their model predictions \n",
    "                Ds_Dds, DdDs_Dds = lensProp.angular_distances(sigma_v_measured=draw_vel_disp(), time_delay_measured=draw_dt(), kappa_ext=0, sigma_v_modeled=vel_disp_temp, fermat_pot=delta_fermat)\n",
    "                # algebra to get D_d\n",
    "                Dd = DdDs_Dds/Ds_Dds\n",
    "                mcmc_new[i] = [Dd, DdDs_Dds, aniso_param]\n",
    "                i += 1\n",
    "        print(job_name_loaded_list[k])\n",
    "        f = open(mcmc_out_temp,'wb')\n",
    "        pickle.dump(mcmc_new, f)\n",
    "        f.close()\n",
    "    f = open(mcmc_out_temp, 'rb')\n",
    "    mcmc_new = pickle.load(f)\n",
    "    f.close()\n",
    "    mcmc_new_list.append(mcmc_new)\n",
    "    \n",
    "    \n",
    "\n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## sub-selection of posteriors\n",
    "Previously we have computed the cosmographic posteriors of all the models separately.\n",
    "In the next section, we merge the posteriors in various ways (e.g. sorting by same model options, BIC selection etc)."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "chi2_cut = 1.1\n",
    "chi2_cut_minimal = 2.5  # whenever a model is above this reduced chi2 value, we ignore it. This is just to make sure\n",
    "# that when something went wrong in the sampling.\n",
    "\n",
    "select_all = [True] * len(lens_type_list)\n",
    "select_chi2_list = [False] * len(lens_type_list)\n",
    "\n",
    "#select_BIC_power_law_list = np.zeros(len(lens_type_list), dtype=bool)\n",
    "#select_BIC_composite_list = np.zeros(len(lens_type_list), dtype=bool)\n",
    "\n",
    "select_composite_list = np.zeros(len(lens_type_list), dtype=bool)# [False] * len(lens_type_list)\n",
    "select_power_law_list = [False] * len(lens_type_list)\n",
    "select_foreground_list = [False] * len(lens_type_list)\n",
    "select_noforeground_list = [False] * len(lens_type_list)\n",
    "select_3_pert_list = [False] * len(lens_type_list)\n",
    "select_5_pert_list = [False] * len(lens_type_list)\n",
    "#select_8_pert_list = [False] * len(lens_type_list)\n",
    "select_source_0_list = [False] * len(lens_type_list)\n",
    "select_source_2_list = [False] * len(lens_type_list)\n",
    "select_source_5_list = [False] * len(lens_type_list)\n",
    "select_source_8_list = [False] * len(lens_type_list)\n",
    "select_mask_1_list = [False] * len(lens_type_list)\n",
    "select_mask_2_list = [False] * len(lens_type_list)\n",
    "select_power_law_5_list = [False] * len(lens_type_list)\n",
    "select_power_law_8_list = [False] * len(lens_type_list)\n",
    "select_composite_5_list = [False] * len(lens_type_list)\n",
    "select_composite_8_list = [False] * len(lens_type_list)\n",
    "\n",
    "\n",
    "\n",
    "for i, _ in enumerate(lens_type_list):\n",
    "    if chi2_list[i] < chi2_cut_minimal:\n",
    "        if lens_type_list[i][0] == 'SPEMD':\n",
    "            select_power_law_list[i] = True\n",
    "            if source_type_list[i] == 5:\n",
    "                select_power_law_5_list[i] = True\n",
    "            elif source_type_list[i] == 8:\n",
    "                select_power_law_8_list[i] = True\n",
    "        else:\n",
    "            select_composite_list[i] = True\n",
    "            if source_type_list[i] == 5:\n",
    "                select_composite_5_list[i] = True\n",
    "            elif source_type_list[i] == 8:\n",
    "                select_composite_8_list[i] = True\n",
    "        if chi2_list[i] < chi2_cut:\n",
    "            select_chi2_list[i] = True\n",
    "        if 'FOREGROUND_SHEAR' in lens_type_list[i]:\n",
    "            select_foreground_list[i] = True\n",
    "        else:\n",
    "            select_noforeground_list[i] = True\n",
    "        if perturber_number_list[i] == 4:\n",
    "            select_3_pert_list[i] = True\n",
    "        if perturber_number_list[i] == 6:\n",
    "            select_5_pert_list[i] = True\n",
    "        #if perturber_number_list[i] == 9:\n",
    "        #    select_8_pert_list[i] = True    \n",
    "        if source_type_list[i] == -1:\n",
    "            select_source_0_list[i] = True\n",
    "        if source_type_list[i] == 2:\n",
    "            select_source_2_list[i] = True\n",
    "        if source_type_list[i] == 5:\n",
    "            select_source_5_list[i] = True\n",
    "        if source_type_list[i] == 8:\n",
    "            select_source_8_list[i] = True\n",
    "        if mask_list[i] == 4296:\n",
    "            select_mask_1_list[i] = True\n",
    "        else:\n",
    "            select_mask_2_list[i] = True\n",
    "            \n",
    "            \n",
    "selection_list = [select_all, select_chi2_list, select_composite_list, select_power_law_list, select_foreground_list, select_noforeground_list, select_3_pert_list, select_5_pert_list, select_source_0_list, select_source_2_list, select_source_5_list, select_source_8_list, select_mask_1_list, select_mask_2_list, select_power_law_5_list, select_power_law_8_list, select_composite_5_list, select_composite_8_list]        \n",
    "selection_name_list = ['all samples', r'$\\chi^2$ cut', 'composite', 'power-law', 'foreground shear', 'simple shear','triplet', 'triplet+2', r'double Sersic', r'+ $n_{max} = 2$', r'+ $n_{max} = 5$', r'+ $n_{max} = 8$', '3.0\" mask', '3.2\" mask', r'PL $n_{max}=5$', r'PL $n_{max}=9$', r'composite $n_{max}=5$', r'composite $n_{max}=8$']   \n",
    "         \n",
    "            \n",
    "delta_BIC_cut = 0\n",
    "sample_size_cut_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 20, 50]   \n",
    "#select_BIC_list_list = [np.zeros(len(lens_type_list), dtype=bool)] * len(sample_size_cut_list)\n",
    "\n",
    "BIC_list = np.array(BIC_list)\n",
    "BIC_min_composite = np.min(BIC_list[select_composite_list])\n",
    "BIC_composite_sorted = np.sort(BIC_list[select_composite_list])\n",
    "#BIC_composite_cut = np.maximum(BIC_min_composite + delta_BIC_cut, BIC_composite_sorted[sample_size_cut])\n",
    "\n",
    "BIC_min_power_law = np.min(BIC_list[select_power_law_list])\n",
    "BIC_power_law_sorted = np.sort(BIC_list[select_power_law_list])\n",
    "#BIC_power_law_cut = np.maximum(BIC_min_power_law + delta_BIC_cut, BIC_power_law_sorted[sample_size_cut])\n",
    "print(BIC_min_power_law, BIC_min_composite)\n",
    "#print(BIC_power_law_cut, BIC_composite_cut)\n",
    "for k, sample_size_cut in enumerate(sample_size_cut_list):\n",
    "    select_BIC_list = np.zeros(len(lens_type_list), dtype=bool)\n",
    "    select_BIC_power_law_list = np.zeros(len(lens_type_list), dtype=bool)\n",
    "    select_BIC_composite_list = np.zeros(len(lens_type_list), dtype=bool)\n",
    "    BIC_power_law_cut = BIC_power_law_sorted[sample_size_cut]\n",
    "    BIC_composite_cut = BIC_composite_sorted[sample_size_cut]\n",
    "    print(BIC_power_law_cut, BIC_composite_cut)\n",
    "    for i, _ in enumerate(lens_type_list):\n",
    "        if select_power_law_list[i] is True:\n",
    "            if BIC_list[i] < BIC_power_law_cut:\n",
    "                select_BIC_list[i] = True\n",
    "                select_BIC_power_law_list[i] = True\n",
    "        else:\n",
    "            if BIC_list[i] < BIC_composite_cut:\n",
    "                select_BIC_list[i] = True \n",
    "                select_BIC_composite_list[i] = True\n",
    "    selection_list.append(select_BIC_list)\n",
    "    selection_name_list.append('BIC cut ' +str(sample_size_cut))\n",
    "    selection_list.append(select_BIC_composite_list)\n",
    "    selection_name_list.append('BIC composite cut ' +str(sample_size_cut))\n",
    "    selection_list.append(select_BIC_power_law_list)\n",
    "    selection_name_list.append('BIC power-law cut ' +str(sample_size_cut))\n",
    "selection_name_list.append('BIC power-law')\n",
    "selection_name_list.append('BIC composite')\n",
    "selection_name_list.append('BIC combined')\n",
    "\n",
    "selection_name_list.append('BIC nmax=5 power-law')\n",
    "selection_name_list.append('BIC nmax=5 composite')\n",
    "selection_name_list.append('BIC nmax=5 combined')\n",
    "\n",
    "selection_name_list.append('BIC nmax=8 power-law')\n",
    "selection_name_list.append('BIC nmax=8 composite')\n",
    "selection_name_list.append('BIC nmax=8 combined')\n",
    "\n",
    "\n",
    "    \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def find_pairs_of_equal_models():\n",
    "    \"\"\"\n",
    "    find identical selection criteria and group them accordingly in lists\n",
    "    [[i_select, j_select], [...], ...]\n",
    "    the selectin criteria are:\n",
    "    - mask (2)\n",
    "    - main deflector type (2)\n",
    "    - perturber number (3)\n",
    "    - source type (4)\n",
    "    - shear model (2)\n",
    "    leading to 84 distinct pairs\n",
    "    \n",
    "    \"\"\"\n",
    "    twin_list = []\n",
    "    _selected_list = []\n",
    "    for i in range(len(select_all)):\n",
    "        if i in _selected_list:  # the i'th model is already asigned as a twin\n",
    "            pass\n",
    "        else:\n",
    "            twins = [i]\n",
    "            _selected_list.append(i)\n",
    "            for j in range(i+1, len(select_all)):\n",
    "                if select_mask_1_list[i] == select_mask_1_list[j] and select_power_law_list[i] == select_power_law_list[j] and perturber_number_list[i] == perturber_number_list[j] and source_type_list[i] == source_type_list[j] and select_foreground_list[i] == select_foreground_list[j]:\n",
    "                    if BIC_list[i] < 5500 and select_power_law_list[i] is False:\n",
    "                        twins.append(j)\n",
    "                        _selected_list.append(j)\n",
    "            twin_list.append(twins)\n",
    "    return twin_list\n",
    "\n",
    "twin_list = find_pairs_of_equal_models()\n",
    "#print twin_list\n",
    "\n",
    "def delta_BIC_twin_compute(twin_list, BIC_list):\n",
    "    \"\"\"\n",
    "    if a pairs is listed, reads the BIC values and computes the difference\n",
    "    \"\"\"\n",
    "    delta_BIC_list = []\n",
    "    for twin in twin_list:\n",
    "        if len(twin) == 2:\n",
    "            delta_BIC = BIC_list[twin[0]] - BIC_list[twin[1]]\n",
    "            delta_BIC_list.append(delta_BIC)\n",
    "    return delta_BIC_list\n",
    "    \n",
    "delta_BIC_twin = delta_BIC_twin_compute(twin_list, BIC_list)\n",
    "#print delta_BIC_twin\n",
    "delta_BIC_twin = np.array(delta_BIC_twin)\n",
    "delta_BIC_twin = delta_BIC_twin[np.abs(delta_BIC_twin) < 1000]\n",
    "delta_BIC_twin = np.append(delta_BIC_twin, -delta_BIC_twin)\n",
    "plt.hist(delta_BIC_twin)\n",
    "plt.show()\n",
    "\n",
    "print(np.mean(delta_BIC_twin), np.std(delta_BIC_twin))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "sigma_BIC = np.std(delta_BIC_twin) / np.sqrt(2)\n",
    "print sigma_BIC, 'sigma_BIC'\n",
    "\n",
    "def BIC_weighted_posteriors(BIC_list, mcmc_list, sigma_BIC, split_models=True, bool_list=None):\n",
    "    \"\"\"\n",
    "    weights samples according to BIC_criteria\n",
    "    \"\"\"\n",
    "    if bool_list is None:\n",
    "        bool_list = [True]*len(BIC_list)\n",
    "    num = 4000  # number of draws to compute the numerical convolution\n",
    "    delta_BIC_norm = np.random.normal(loc=0, scale=sigma_BIC, size=num)\n",
    "    p_norm = (np.sum(np.exp(-delta_BIC_norm[delta_BIC_norm > 0]/2.)) + len(delta_BIC_norm[delta_BIC_norm <= 0])) / num\n",
    "    Dd_list_power_law = []\n",
    "    Dd_ds_dds_list_power_law = []\n",
    "    Dd_list_composite = []\n",
    "    Dd_ds_dds_list_composite = []\n",
    "    Dd_ds_dds_kappa_ext_weight_list_pl = []\n",
    "    Dd_ds_dds_kappa_ext_weight_list_comp = []\n",
    "    acceptance_ratio_list = []\n",
    "    for i, samples in enumerate(mcmc_new_list):\n",
    "        if bool_list[i] is True:\n",
    "            Dd = samples[:,i_Dd]\n",
    "            DdDs_Dds = samples[:,i_DdDs_Dds]\n",
    "            num = len(Dd)\n",
    "            if split_models is True:  # we apply different BIC weights for composite and SPEMD models\n",
    "                if select_power_law_list[i] is True:\n",
    "                    #BIC_min = BIC_min_power_law\n",
    "                    BIC_min = np.min(BIC_list[np.array(bool_list) & np.array(select_power_law_list)])\n",
    "                else:\n",
    "                    #BIC_min =  BIC_min_composite\n",
    "                    BIC_min = np.min(BIC_list[np.array(bool_list) & np.array(select_composite_list)])\n",
    "            else:\n",
    "                BIC_min = np.min(BIC_list)\n",
    "            delta_BIC = BIC_list[i] - BIC_min + np.random.normal(loc=0, scale=sigma_BIC, size=num)\n",
    "            p = np.exp(-delta_BIC[delta_BIC > 0]/2.)\n",
    "            n_select =  int((np.sum(p) + len(delta_BIC[delta_BIC <= 0])) / p_norm)\n",
    "            acceptance_ratio_list.append(n_select/float(num))\n",
    "            #print(n_select, 'n_select')\n",
    "            idex = np.random.choice(num, n_select)\n",
    "            kappa_ext_pert = mcmc_compressed_list[i][:,8]\n",
    "            kappa_ext_pert = np.repeat(kappa_ext_pert, num_resampling)\n",
    "            if select_power_law_list[i] is True:\n",
    "                Dd_list_power_law = np.append(Dd_list_power_law, Dd[idex])\n",
    "                Dd_ds_dds_list_power_law = np.append(Dd_ds_dds_list_power_law, DdDs_Dds[idex])\n",
    "                Dd_ds_dds_kappa_ext_weight_list_pl = np.append(Dd_ds_dds_kappa_ext_weight_list_pl, kappa_ext_pert[idex])\n",
    "            else:\n",
    "                Dd_list_composite = np.append(Dd_list_composite, Dd[idex])\n",
    "                Dd_ds_dds_list_composite = np.append(Dd_ds_dds_list_composite, DdDs_Dds[idex])\n",
    "                Dd_ds_dds_kappa_ext_weight_list_comp = np.append(Dd_ds_dds_kappa_ext_weight_list_comp, kappa_ext_pert[idex])\n",
    "    num_pl = len(Dd_list_power_law)\n",
    "    num_comp = len(Dd_list_composite)\n",
    "    num_min = np.minimum(num_pl, num_comp)\n",
    "    idex_pl = np.random.choice(num_pl, num_min)\n",
    "    idex_comp = np.random.choice(num_comp, num_min)\n",
    "    return Dd_list_power_law[idex_pl], Dd_ds_dds_list_power_law[idex_pl], Dd_ds_dds_kappa_ext_weight_list_pl[idex_pl], Dd_list_composite[idex_comp], Dd_ds_dds_list_composite[idex_comp], Dd_ds_dds_kappa_ext_weight_list_comp[idex_comp], acceptance_ratio_list\n",
    "\n",
    "Dd_weight_list_pl, Dd_ds_dds_weight_list_pl, Dd_ds_dds_kappa_ext_weight_list_pl, Dd_weight_list_comp, Dd_ds_dds_weight_list_comp, Dd_ds_dds_kappa_ext_weight_list_comp, acceptance_ratio_list = BIC_weighted_posteriors(BIC_list, mcmc_new_list, sigma_BIC=sigma_BIC)\n",
    "Dd_weight_list = np.append(Dd_weight_list_pl, Dd_weight_list_comp)\n",
    "Dd_ds_dds_weight_list = np.append(Dd_ds_dds_weight_list_pl, Dd_ds_dds_weight_list_comp)\n",
    "Dd_ds_dds_kappa_ext_weight_list = np.append(Dd_ds_dds_kappa_ext_weight_list_pl, Dd_ds_dds_kappa_ext_weight_list_comp)\n",
    "print(len(Dd_weight_list))\n",
    "\n",
    "Dd_samples_list = []\n",
    "Dd_ds_dds_samples_list = []\n",
    "Dd_ds_dds_samples_kappa_ext_list = []\n",
    "for i, selection in enumerate(selection_list):\n",
    "    Dd_list = []\n",
    "    Dd_ds_dds_list = []\n",
    "    kappa_pert_list = []\n",
    "    for k, samples in enumerate(mcmc_new_list):\n",
    "        if selection[k] == True:\n",
    "            Dd_list = np.append(Dd_list, samples[:,i_Dd])\n",
    "            mcmc_param = mcmc_compressed_list[k]\n",
    "            Dd_ds_dds_list = np.append(Dd_ds_dds_list, samples[:,i_DdDs_Dds])\n",
    "            kappa_pert_list = np.append(kappa_pert_list, mcmc_param[:,8])\n",
    "    Dd_samples_list.append(Dd_list)\n",
    "    Dd_ds_dds_samples_list.append(Dd_ds_dds_list)\n",
    "    Dd_ds_dds_samples_kappa_ext_list.append(kappa_pert_list)\n",
    "    \n",
    "    \n",
    "Dd_samples_list.append(Dd_weight_list_pl)\n",
    "Dd_samples_list.append(Dd_weight_list_comp)\n",
    "Dd_samples_list.append(Dd_weight_list)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_pl)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_comp)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_pl)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_comp)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list)\n",
    "\n",
    "\n",
    "Dd_weight_list_pl_5, Dd_ds_dds_weight_list_pl_5, Dd_ds_dds_kappa_ext_weight_list_pl_5, Dd_weight_list_comp_5, Dd_ds_dds_weight_list_comp_5, Dd_ds_dds_kappa_ext_weight_list_comp_5, acceptance_ratio_list_5 = BIC_weighted_posteriors(BIC_list, mcmc_new_list, sigma_BIC=sigma_BIC, bool_list=select_source_5_list)\n",
    "Dd_weight_list_pl_8, Dd_ds_dds_weight_list_pl_8, Dd_ds_dds_kappa_ext_weight_list_pl_8, Dd_weight_list_comp_8, Dd_ds_dds_weight_list_comp_8, Dd_ds_dds_kappa_ext_weight_list_comp_8, acceptance_ratio_list_8 = BIC_weighted_posteriors(BIC_list, mcmc_new_list, sigma_BIC=sigma_BIC, bool_list=select_source_8_list)\n",
    "\n",
    "Dd_ds_dds_weight_list_5 = np.append(Dd_ds_dds_weight_list_pl_5, Dd_ds_dds_weight_list_comp_5)\n",
    "Dd_ds_dds_weight_list_8 = np.append(Dd_ds_dds_weight_list_pl_8, Dd_ds_dds_weight_list_comp_8)\n",
    "Dd_weight_list_5 = np.append(Dd_weight_list_pl_5, Dd_weight_list_comp_5)\n",
    "Dd_weight_list_8 = np.append(Dd_weight_list_pl_8, Dd_weight_list_comp_8)\n",
    "Dd_ds_dds_kappa_ext_weight_list_5 = np.append(Dd_ds_dds_kappa_ext_weight_list_pl_5, Dd_ds_dds_kappa_ext_weight_list_comp_5)\n",
    "Dd_ds_dds_kappa_ext_weight_list_8 = np.append(Dd_ds_dds_kappa_ext_weight_list_pl_8, Dd_ds_dds_kappa_ext_weight_list_comp_8)\n",
    "\n",
    "\n",
    "\n",
    "Dd_samples_list.append(Dd_weight_list_pl_5)\n",
    "Dd_samples_list.append(Dd_weight_list_comp_5)\n",
    "Dd_samples_list.append(Dd_weight_list_5)\n",
    "Dd_samples_list.append(Dd_weight_list_pl_8)\n",
    "Dd_samples_list.append(Dd_weight_list_comp_8)\n",
    "Dd_samples_list.append(Dd_weight_list_8)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_pl_5)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_comp_5)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_5)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_pl_8)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_comp_8)\n",
    "Dd_ds_dds_samples_list.append(Dd_ds_dds_weight_list_8)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_pl_5)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_comp_5)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_5)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_pl_8)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_comp_8)\n",
    "Dd_ds_dds_samples_kappa_ext_list.append(Dd_ds_dds_kappa_ext_weight_list_8)\n",
    "\n",
    "\n",
    "\n",
    "mcmc_joint_list = []\n",
    "plotting_undersampling = 1\n",
    "print(len(mcmc_compressed_list), 'test')\n",
    "for i, selection in enumerate(selection_list):\n",
    "    mcmc_list = []\n",
    "    for k, samples in enumerate(mcmc_compressed_list):  #mcmc_new_list\n",
    "        if selection[k] == True:\n",
    "            if mcmc_list == []:\n",
    "                mcmc_list = samples[::plotting_undersampling]\n",
    "            else:\n",
    "                mcmc_list = np.append(mcmc_list, samples[::plotting_undersampling], axis=0)\n",
    "    mcmc_joint_list.append(mcmc_list)\n",
    "\n",
    "\n",
    "# and now we add the big weighted samples\n",
    "def BIC_select_param(bool_list, split_models=True):\n",
    "    mcmc_list_pl = []\n",
    "    mcmc_list_comp = []\n",
    "    num = 4000\n",
    "    delta_BIC_norm = np.random.normal(loc=0, scale=sigma_BIC, size=num)\n",
    "    p_norm = (np.sum(np.exp(-delta_BIC_norm[delta_BIC_norm > 0]/2.)) + len(delta_BIC_norm[delta_BIC_norm <= 0])) / num\n",
    "    for i, samples in enumerate(mcmc_compressed_list):\n",
    "        if bool_list[i] is True:\n",
    "            samples_small = samples[::plotting_undersampling]\n",
    "            Dd = samples[:,i_Dd]\n",
    "            DdDs_Dds = samples[:,i_DdDs_Dds]\n",
    "            num = len(samples_small)\n",
    "            if split_models is True:\n",
    "                if select_power_law_list[i] is True:\n",
    "                    #BIC_min = BIC_min_power_law\n",
    "                    BIC_min = np.min(BIC_list[np.array(bool_list) & np.array(select_power_law_list)])\n",
    "                else:\n",
    "                    #BIC_min =  BIC_min_composite\n",
    "                    BIC_min = np.min(BIC_list[np.array(bool_list) & np.array(select_composite_list)])\n",
    "            else:\n",
    "                BIC_min = np.min(BIC_list)\n",
    "            delta_BIC = BIC_list[i] - BIC_min + np.random.normal(loc=0, scale=sigma_BIC, size=num)\n",
    "            p = np.exp(-delta_BIC[delta_BIC > 0]/2.)\n",
    "            n_select =  int((np.sum(p) + len(delta_BIC[delta_BIC <= 0])) / p_norm)\n",
    "            idex = np.random.choice(num, n_select)\n",
    "            if select_power_law_list[i] is True:\n",
    "                if mcmc_list_pl == []:\n",
    "                    mcmc_list_pl = samples_small[idex]\n",
    "                else:\n",
    "                    mcmc_list_pl = np.append(mcmc_list_pl, samples_small[idex], axis=0)\n",
    "            else:\n",
    "                if mcmc_list_comp == []:\n",
    "                    mcmc_list_comp = samples_small[idex]\n",
    "                else:\n",
    "                    mcmc_list_comp = np.append(mcmc_list_comp, samples_small[idex], axis=0)\n",
    "    num_pl = len(mcmc_list_pl)\n",
    "    num_comp = len(mcmc_list_comp)\n",
    "    num_min = np.minimum(num_pl, num_comp)\n",
    "    print num_min, 'test'\n",
    "    idex_pl = np.random.choice(num_pl, num_min)\n",
    "    idex_comp = np.random.choice(num_comp, num_min)\n",
    "    return mcmc_list_pl[idex_pl], mcmc_list_comp[idex_comp]\n",
    "   \n",
    "mcmc_list_pl, mcmc_list_comp = BIC_select_param(select_all)\n",
    "mcmc_list_bic_all = np.append(mcmc_list_comp, mcmc_list_pl, axis=0)\n",
    "mcmc_joint_list.append(mcmc_list_pl)\n",
    "mcmc_joint_list.append(mcmc_list_comp)\n",
    "mcmc_joint_list.append(mcmc_list_bic_all)\n",
    "\"\"\"\n",
    "\n",
    "54 BIC power-law\n",
    "55 BIC composite\n",
    "56 BIC combined\n",
    "57 BIC nmax=5 power-law\n",
    "58 BIC nmax=5 composite\n",
    "59 BIC nmax=5 combined\n",
    "60 BIC nmax=8 power-law\n",
    "61 BIC nmax=8 composite\n",
    "62 BIC nmax=8 combined\n",
    "\"\"\"\n",
    "\n",
    "mcmc_joint_list_old = []\n",
    "plotting_undersampling = 100\n",
    "for i, selection in enumerate(selection_list):\n",
    "    mcmc_list = []\n",
    "    for k, samples in enumerate(mcmc_new_list):  #mcmc_new_list\n",
    "        if selection[k] == True:\n",
    "            if mcmc_list == []:\n",
    "                mcmc_list = samples[::plotting_undersampling]\n",
    "            else:\n",
    "                mcmc_list = np.append(mcmc_list, samples[::plotting_undersampling], axis=0)\n",
    "    mcmc_joint_list_old.append(mcmc_list)\n",
    "\n",
    "for i, sample in enumerate(mcmc_joint_list):\n",
    "    print len(sample), selection_name_list[i]\n",
    "# color contours\n",
    "color_scale_list=[\"Blues\", \"Greens\", \"Purples\", \"Oranges\", \"Reds\", \"BuPu\", \"Greys\"]\n",
    "color_list = ['b', 'g', 'purple', 'orange', 'r', 'k']\n",
    "\n",
    "# blind the angular diameter distance constraints for plotting\n",
    "i_Ddt = 1\n",
    "i_Dd = 0\n",
    "Dd_sub = np.median(Dd_samples_list[0])\n",
    "Dd_ds_dds_sub = np.median(Dd_ds_dds_samples_list[0])\n",
    "\n",
    "for mcmc_list in mcmc_joint_list_old:\n",
    "    mcmc_list[:, i_Ddt] = (mcmc_list[:, i_Ddt] - Dd_ds_dds_sub) / Dd_ds_dds_sub\n",
    "    mcmc_list[:, i_Dd] = (mcmc_list[:, i_Dd] - Dd_sub) / Dd_sub\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# specific combinations of the posteriors we save for plotting and analysis purposes\n",
    "\n",
    "# for those selections we save the lens model posteriors\n",
    "filename = 'mcmc_all.txt'\n",
    "f = open(os.path.join(base_path, 'Plotting', filename),'wb')\n",
    "pickle.dump(mcmc_joint_list[0], f)\n",
    "f.close()\n",
    "\n",
    "filename = 'mcmc_power_law.txt'\n",
    "f = open(os.path.join(base_path, 'Plotting', filename),'wb')\n",
    "pickle.dump(mcmc_joint_list[3], f)\n",
    "f.close()\n",
    "\n",
    "filename = 'mcmc_composite.txt'\n",
    "f = open(os.path.join(base_path, 'Plotting', filename),'wb')\n",
    "pickle.dump(mcmc_joint_list[2], f)\n",
    "f.close()\n",
    "\n",
    "filename = 'mcmc_bic.txt'\n",
    "f = open(os.path.join(base_path, 'Plotting', filename),'wb')\n",
    "pickle.dump(mcmc_joint_list[56], f)\n",
    "f.close()\n",
    "\n",
    "\n",
    "\n",
    "# for those selections we save the cosmographic posteriors\n",
    "i_save = 0\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_all'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 56\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_bic'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 54\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_bic_power_law'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 55\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_bic_composite'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 2\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_composite'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 3\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_power_law'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 4\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_foreground_shear'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 5\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_simple_shear'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 6\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_triplet'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 7\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_triplet_2'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 8\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_double_sersic'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 9\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_double_sersic_2'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 10\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_double_sersic_5'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 11\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_double_sersic_8'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "\n",
    "i_save = 12\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_mask_30'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n",
    "i_save = 13\n",
    "f = open(os.path.join(base_path, 'Plotting', 'Dd_Ddt_mask_32'),'wb')\n",
    "pickle.dump([Dd_samples_list[i_save], Dd_ds_dds_samples_list[i_save]], f)\n",
    "f.close()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# and specific samples we save as \"final versions\" of the selected posteriors, including their external convergence corrections\n",
    "\n",
    "i_final = 51\n",
    "print(selection_name_list[i_final])\n",
    "\n",
    "Dd_ds_dds_samples_final = Dd_ds_dds_samples_list[i_final]\n",
    "Dd_samples_final = Dd_samples_list[i_final]\n",
    "kappa_pert_final = Dd_ds_dds_samples_kappa_ext_list[i_final]\n",
    "file_name = 'test.txt'\n",
    "f = open(os.path.join(base_path, 'Posteriors', file_name),'wb')\n",
    "pickle.dump([Dd_ds_dds_samples_final, Dd_samples_final, kappa_pert_final], f)\n",
    "f.close()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# make sorted list according to BIC value as plotted in the appendix of Birrer et al. 2018\n",
    "idex_BIC_sorted = np.argsort(BIC_list)\n",
    "# power-law sort\n",
    "idex_BIC_pl_sorted = []\n",
    "idex_BIC_comp_sorted = []\n",
    "for idex in idex_BIC_sorted:\n",
    "    if select_power_law_list[idex] == True:\n",
    "        idex_BIC_pl_sorted.append(idex)\n",
    "    else:\n",
    "        idex_BIC_comp_sorted.append(idex)\n",
    "\n",
    "\n",
    "print idex_BIC_pl_sorted\n",
    "# composite sort\n",
    "print len(Dd_ds_dds_samples_list)\n",
    "\n",
    "# print quantities\n",
    "def print_model_type(i_model, table_style=False, i_min_BIC=0):\n",
    "    \"\"\"\n",
    "    i_model corresponds to the index of the mcmc chain\n",
    "    \"\"\"\n",
    "    if select_power_law_list[i_model] == True:\n",
    "        lens_type = 'SPEMD\\_SERSIC'\n",
    "        BIC_min = BIC_min_power_law\n",
    "    else:\n",
    "        lens_type = 'COMPOSITE'\n",
    "        BIC_min = BIC_min_composite\n",
    "    #print lens_type, 'model deflector type'\n",
    "    if select_3_pert_list[i_model] == True:\n",
    "        pert_type = 'TRIPLET'\n",
    "    else:\n",
    "        pert_type = 'TRIPLET+2'\n",
    "    #print pert_type, 'perturber type'\n",
    "    if select_mask_1_list[i_model] == True:\n",
    "        mask_type = '3.0\"'\n",
    "    else:\n",
    "        mask_type = '3.2\"'\n",
    "    #print mask_type, 'mask type'\n",
    "    n_max = source_type_list[i_model]\n",
    "    if n_max > -1:\n",
    "        source_type = 'SERSIC + n$_{max}$='+str(n_max)\n",
    "    else:\n",
    "        source_type = 'SERSIC'\n",
    "    #print source_type, 'source type'\n",
    "    if select_foreground_list[i_model] == True:\n",
    "        shear_type = 'FOREGROUND'\n",
    "    else:\n",
    "        shear_type = 'SIMPLE'\n",
    "    #print shear_type, 'shear type'\n",
    "    #print BIC_list[i_model], 'BIC'\n",
    "    delta_BIC = BIC_list[i_model] - BIC_min\n",
    "    #print delta_BIC, 'delta BIC'\n",
    "    acceptance_ratio = acceptance_ratio_list[i_model]\n",
    "    #print acceptance_ratio, 'acceptance ratio'\n",
    "    Ddt_mean = np.mean(mcmc_new_list[i_min_BIC][:, i_Ddt])\n",
    "    delta_Ddt = (np.mean(mcmc_new_list[i_model][:, i_Ddt]) - Ddt_mean) / Ddt_mean\n",
    "    if table_style is True:\n",
    "        print(lens_type +' & ' + source_type +' & ' + pert_type +' & ' +  shear_type +' & ' + mask_type +' & ' + str(int(BIC_list[i_model]))  +' & ' + str(int(delta_BIC)) +' & ' +  str(acceptance_ratio) +' & ' + '%.3f' % delta_Ddt + ' \\\\\\ ' % delta_Ddt)\n",
    "    else:\n",
    "        print lens_type, source_type, pert_type, shear_type, mask_type, int(BIC_list[i_model]), int(delta_BIC), acceptance_ratio\n",
    "\n",
    "for i in range(64):\n",
    "    print_model_type(idex_BIC_pl_sorted[i], table_style=True, i_min_BIC=idex_BIC_pl_sorted[0])\n",
    "print '======='\n",
    "#for i in range(64):\n",
    "#    print_model_type(idex_BIC_comp_sorted[i], table_style=True)\n",
    "    \n",
    "    \n",
    "print np.sum(acceptance_ratio_list), \"effective number of samples\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
