Skip to content

Commit

Permalink
Merge branch 'master' into component_tests
Browse files Browse the repository at this point in the history
* master:
  fix pandas to 0.23.4 for the time being (cta-observatory#937)
  slimmed down some notebooks and removed poor-quality ones (cta-observatory#933)
  clean up environment.yml (cta-observatory#932)
  Print duration of notebooks on travis (cta-observatory#928)
  • Loading branch information
watsonjj committed Jan 28, 2019
2 parents f6614c2 + 5d57cb1 commit 3539a3d
Show file tree
Hide file tree
Showing 18 changed files with 511 additions and 3,552 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -81,3 +81,6 @@ distribute-*.tar.gz
*.pstats
target
.mypy_cache

examples/notebooks/*.html
examples/notebooks/*.png
12 changes: 6 additions & 6 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
# A conda environment with all useful package for ctapipe developers
name: cta-dev
channels:
- default
- cta-observatory
- conda-forge
dependencies:
- ctapipe-extra=0.2.16
- zeromq
- pyzmq>=17 # needed for correct function of notebooks on OSX
- astropy
- bokeh
- cython
- gammapy
- conda-forge::gammapy
- graphviz
- h5py
- iminuit
Expand All @@ -20,15 +21,14 @@ dependencies:
- numba
- numpy>=1.15.4
- numpydoc
- pandas
- pyhessio
- pandas=0.23.4
- pytest
- pytest-cov
- psutil
- scikit-learn
- scipy
- setuptools
- sphinx=1.7
- sphinx
- sphinx_rtd_theme
- sphinx-automodapi
- traitlets
Expand All @@ -42,4 +42,4 @@ dependencies:
- pip:
- pytest_runner
- eventio==0.11.0
- https://github.com/cta-observatory/ctapipe-extra/archive/v0.2.16.tar.gz

10 changes: 2 additions & 8 deletions examples/notebooks/2018_lst_analysis_bootcamp.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1248,13 +1248,7 @@
" Hmax: 7547 m\n",
" CoreX: -308.8 m\n",
" CoreY: -1091.5 m\n",
"Id: 95, E = 1.620 TeV, Telescopes: 3\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Id: 95, E = 1.620 TeV, Telescopes: 3\n",
" Alt: 68.61°\n",
" Az: -7.34°\n",
" Hmax: 7955 m\n",
Expand Down Expand Up @@ -2262,7 +2256,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.7.1"
}
},
"nbformat": 4,
Expand Down
86 changes: 31 additions & 55 deletions examples/notebooks/HDF5 Images in TableDataset.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -21,7 +21,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -31,7 +31,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -44,7 +44,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -55,11 +55,11 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"maxevents = 30 # max number of events to save in hdf5 file\n",
"maxevents = 2 # max number of events to save in hdf5 file\n",
"imagestels = {} # tels dict where 2d images will be stored\n",
"imagesevts = {} # event dict where 2d images will be stored\n",
"count = 1"
Expand All @@ -74,19 +74,11 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"29it [01:04, 2.22s/it]"
]
}
],
"outputs": [],
"source": [
"for event in tqdm(source):\n",
"\n",
Expand Down Expand Up @@ -130,7 +122,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": null,
"metadata": {
"scrolled": false
},
Expand Down Expand Up @@ -160,7 +152,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": null,
"metadata": {
"scrolled": true
},
Expand All @@ -172,26 +164,11 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": null,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "2ae4c0b1ad3f4f15a5fdf0f1c5b4f17d",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"interactive(children=(IntSlider(value=2, description='event', max=5), Output()), _dom_classes=('widget-interac…"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"outputs": [],
"source": [
"# datacube = load_hdf5_data_from_somewhere() # assuming shape is (N, X, Y)\n",
"# no need to load fiie \n",
Expand All @@ -212,7 +189,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": null,
"metadata": {
"scrolled": true
},
Expand All @@ -224,34 +201,33 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "4f4cde501c014c029f061de96f9d2af3",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"interactive(children=(IntSlider(value=8, description='tel', max=16), Output()), _dom_classes=('widget-interact…"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"outputs": [],
"source": [
"# datacube = load_hdf5_data_from_somewhere() # assuming shape is (N, X, Y)\n",
"# no need to load fiie \n",
"datacube = np.array(imagesevts['31012'])\n",
"datacube = np.array(imagesevts['23703'])\n",
"\n",
"@interact\n",
"def displaycube(tel=(0, len(datacube)-1)):\n",
" plt.imshow(datacube[tel])\n",
" plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -270,7 +246,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.5"
"version": "3.6.8"
},
"toc": {
"nav_menu": {
Expand Down
Loading

0 comments on commit 3539a3d

Please sign in to comment.