all notebooks from rc4, exactly
parent
bb1e71a410
commit
903fb92341
|
@ -0,0 +1,112 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Arrows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n",
|
||||
"\n",
|
||||
"__tags__ = ['Vector data']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"def sample_data(shape=(20, 30)):\n",
|
||||
" \"\"\"\n",
|
||||
" Return ``(x, y, u, v, crs)`` of some vector data\n",
|
||||
" computed mathematically. The returned crs will be a rotated\n",
|
||||
" pole CRS, meaning that the vectors will be unevenly spaced in\n",
|
||||
" regular PlateCarree space.\n",
|
||||
"\n",
|
||||
" \"\"\"\n",
|
||||
" crs = ccrs.RotatedPole(pole_longitude=177.5, pole_latitude=37.5)\n",
|
||||
"\n",
|
||||
" x = np.linspace(311.9, 391.1, shape[1])\n",
|
||||
" y = np.linspace(-23.6, 24.8, shape[0])\n",
|
||||
"\n",
|
||||
" x2d, y2d = np.meshgrid(x, y)\n",
|
||||
" u = 10 * (2 * np.cos(2 * np.deg2rad(x2d) + 3 * np.deg2rad(y2d + 30)) ** 2)\n",
|
||||
" v = 20 * np.cos(6 * np.deg2rad(x2d))\n",
|
||||
"\n",
|
||||
" return x, y, u, v, crs\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.Orthographic(-10, 45))\n",
|
||||
"\n",
|
||||
" ax.add_feature(cfeature.OCEAN, zorder=0)\n",
|
||||
" ax.add_feature(cfeature.LAND, zorder=0, edgecolor='black')\n",
|
||||
"\n",
|
||||
" ax.set_global()\n",
|
||||
" ax.gridlines()\n",
|
||||
"\n",
|
||||
" x, y, u, v, vector_crs = sample_data()\n",
|
||||
" ax.quiver(x, y, u, v, transform=vector_crs)\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
2019 aug; osgeolive 13 edition
|
||||
|
||||
These example notebooks include code snippets from the Cartopy documentation,
|
||||
and are reproduced here, with the following Cartopy v0.17 LICENSE.txt
|
||||
|
||||
|
||||
--
|
||||
Cartopy documentation and examples
|
||||
|
||||
All documentation, examples and sample data found on this website
|
||||
and in source repository are licensed under the
|
||||
UK’s Open Government Licence:
|
||||
|
||||
-----------------------------------
|
||||
© British Crown copyright, 2016.
|
||||
|
||||
You may use and re-use the information featured on this website
|
||||
(not including logos) free of charge in any format or medium, under the terms
|
||||
of the Open Government Licence. We encourage users to establish hypertext links
|
||||
to this website.
|
||||
|
||||
Any email enquiries regarding the use and re-use of this information resource
|
||||
should be sent to: psi@nationalarchives.gsi.gov.uk.
|
||||
|
||||
|
||||
Open Government License for public sector information
|
||||
delivered by The National Archives
|
||||
|
||||
|
||||
You are encouraged to use and re-use the Information that is available under this licence freely and flexibly, with only a few conditions.
|
||||
|
||||
Using Information under this licence
|
||||
|
||||
Use of copyright and database right material expressly made available under this licence (the ‘Information’) indicates your acceptance of the terms and conditions below.
|
||||
|
||||
The Licensor grants you a worldwide, royalty-free, perpetual, non-exclusive licence to use the Information subject to the conditions below.
|
||||
|
||||
This licence does not affect your freedom under fair dealing or fair use or any other copyright or database right exceptions and limitations.
|
||||
You are free to:
|
||||
|
||||
copy, publish, distribute and transmit the Information;
|
||||
adapt the Information;
|
||||
exploit the Information commercially and non-commercially for example, by combining it with other Information, or by including it in your own product or application.
|
||||
|
||||
You must, where you do any of the above:
|
||||
|
||||
acknowledge the source of the Information by including any attribution statement specified by the Information Provider(s) and, where possible, provide a link to this licence;
|
||||
|
||||
If the Information Provider does not provide a specific attribution statement, or if you are using Information from several Information Providers and multiple attributions are not practical in your product or application, you may use the following:
|
||||
|
||||
Contains public sector information licensed under the Open Government Licence v2.0.
|
||||
|
||||
These are important conditions of this licence and if you fail to comply with them the rights granted to you under this licence, or any similar licence granted by the Licensor, will end automatically.
|
||||
Exemptions
|
||||
|
||||
This licence does not cover:
|
||||
|
||||
personal data in the Information;
|
||||
information that has neither been published nor disclosed under information access legislation (including the Freedom of Information Acts for the UK and Scotland) by or with the consent of the Information Provider;
|
||||
departmental or public sector organisation logos, crests and the Royal Arms except where they form an integral part of a document or dataset;
|
||||
military insignia;
|
||||
third party rights the Information Provider is not authorised to license;
|
||||
other intellectual property rights, including patents, trade marks, and design rights; and
|
||||
identity documents such as the British Passport
|
||||
|
||||
Non-endorsement
|
||||
|
||||
This licence does not grant you any right to use the Information in a way that suggests any official status or that the Information Provider endorses you or your use of the Information.
|
||||
Non warranty
|
||||
|
||||
The Information is licensed ‘as is’ and the Information Provider excludes all representations, warranties, obligations and liabilities in relation to the Information to the maximum extent permitted by law.
|
||||
|
||||
The Information Provider is not liable for any errors or omissions in the Information and shall not be liable for any loss, injury or damage of any kind caused by its use. The Information Provider does not guarantee the continued supply of the Information.
|
||||
Governing Law
|
||||
|
||||
This licence is governed by the laws of the jurisdiction in which the Information Provider has its principal place of business, unless otherwise specified by the Information Provider.
|
||||
Definitions
|
||||
|
||||
In this licence, the terms below have the following meanings:
|
||||
|
||||
‘Information’
|
||||
means information protected by copyright or by database right (for example, literary and artistic works, content, data and source code) offered for use under the terms of this licence.
|
||||
|
||||
‘Information Provider’
|
||||
means the person or organisation providing the Information under this licence.
|
||||
|
||||
‘Licensor’
|
||||
means any Information Provider who has the authority to offer Information under the terms of this licence. It includes the Controller of Her Majesty’s Stationery Office, who has the authority to offer Information subject to Crown copyright and Crown database rights, and Information subject to copyright and database rights which have been assigned to or acquired by the Crown, under the terms of this licence.
|
||||
|
||||
‘Use’
|
||||
means doing any act which is restricted by copyright or database right, whether in the original medium or in any other medium, and includes without limitation distributing, copying, adapting, modifying as may be technically necessary to use it in a different mode or format.
|
||||
|
||||
‘You’
|
||||
means the natural or legal person, or body of persons corporate or incorporate, acquiring rights under this licence.
|
||||
About the Open Government Licence
|
||||
|
||||
The Controller of Her Majesty’s Stationery Office (HMSO) has developed this licence as a tool to enable Information Providers in the public sector to license the use and re-use of their Information under a common open licence. The Controller invites public sector bodies owning their own copyright and database rights to permit the use of their Information under this licence.
|
||||
|
||||
The Controller of HMSO has authority to license Information subject to copyright and database right owned by the Crown. The extent of the Controller’s offer to license this Information under the terms of this licence is set out on The National Archives website.
|
||||
|
||||
This is version 2.0 of the Open Government Licence. The Controller of HMSO may, from time to time, issue new versions of the Open Government Licence. If you are already using Information under a previous version of the Open Government Licence, the terms of that licence will continue to apply.
|
||||
|
||||
These terms are compatible with the Creative Commons Attribution License 4.0 and the Open Data Commons Attribution License, both of which license copyright and database rights. This means that when the Information is adapted and licensed under either of those licences, you automatically satisfy the conditions of the OGL when you comply with the other licence. The OGLv2.0 is Open Definition compliant.
|
||||
|
||||
Further context, best practice and guidance can be found in the UK Government Licensing Framework section on The National Archives website.
|
||||
Open Government License for public sector information
|
||||
|
||||
--
|
||||
|
||||
https://scitools.org.uk/cartopy/docs/v0.17/copyright.html
|
|
@ -0,0 +1,108 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Custom Boundary Shape\n",
|
||||
"\n",
|
||||
"This example demonstrates how a custom shape geometry may be used\n",
|
||||
"instead of the projection's default boundary.\n",
|
||||
"\n",
|
||||
"In this instance, we define the boundary as a circle in axes coordinates. This means that no matter the extent of the map itself, the boundary will always be a circle.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.path as mpath\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n",
|
||||
"\n",
|
||||
"__tags__ = ['Lines and polygons']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=[10, 5])\n",
|
||||
" ax1 = fig.add_subplot(1, 2, 1, projection=ccrs.SouthPolarStereo())\n",
|
||||
" ax2 = fig.add_subplot(1, 2, 2, projection=ccrs.SouthPolarStereo(),\n",
|
||||
" sharex=ax1, sharey=ax1)\n",
|
||||
" fig.subplots_adjust(bottom=0.05, top=0.95,\n",
|
||||
" left=0.04, right=0.95, wspace=0.02)\n",
|
||||
"\n",
|
||||
" # Limit the map to -60 degrees latitude and below.\n",
|
||||
" ax1.set_extent([-180, 180, -90, -60], ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" ax1.add_feature(cfeature.LAND)\n",
|
||||
" ax1.add_feature(cfeature.OCEAN)\n",
|
||||
"\n",
|
||||
" ax1.gridlines()\n",
|
||||
" ax2.gridlines()\n",
|
||||
"\n",
|
||||
" ax2.add_feature(cfeature.LAND)\n",
|
||||
" ax2.add_feature(cfeature.OCEAN)\n",
|
||||
"\n",
|
||||
" # Compute a circle in axes coordinates, which we can use as a boundary\n",
|
||||
" # for the map. We can pan/zoom as much as we like - the boundary will be\n",
|
||||
" # permanently circular.\n",
|
||||
" theta = np.linspace(0, 2*np.pi, 100)\n",
|
||||
" center, radius = [0.5, 0.5], 0.5\n",
|
||||
" verts = np.vstack([np.sin(theta), np.cos(theta)]).T\n",
|
||||
" circle = mpath.Path(verts * radius + center)\n",
|
||||
"\n",
|
||||
" ax2.set_boundary(circle, transform=ax2.transAxes)\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 4
|
||||
}
|
|
@ -0,0 +1,145 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Interactive WMTS (Web Map Tile Service)\n",
|
||||
"---------------------------------------\n",
|
||||
"\n",
|
||||
"This example demonstrates the interactive pan and zoom capability\n",
|
||||
"supported by an OGC web services Web Map Tile Service (WMTS) aware axes.\n",
|
||||
"\n",
|
||||
"The example WMTS layer is a single composite of data sampled over nine days\n",
|
||||
"in April 2012 and thirteen days in October 2012 showing the Earth at night.\n",
|
||||
"It does not vary over time.\n",
|
||||
"\n",
|
||||
"The imagery was collected by the Suomi National Polar-orbiting Partnership\n",
|
||||
"(Suomi NPP) weather satellite operated by the United States National Oceanic\n",
|
||||
"and Atmospheric Administration (NOAA)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import cartopy.crs as ccrs\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" url = 'https://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'\n",
|
||||
" layer = 'VIIRS_CityLights_2012'\n",
|
||||
"\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())\n",
|
||||
" ax.add_wmts(url, layer)\n",
|
||||
" ax.set_extent([-15, 25, 35, 60], crs=ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" ax.set_title('Suomi NPP Earth at night April/October 2012')\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "SSLError",
|
||||
"evalue": "HTTPSConnectionPool(host='map1c.vis.earthdata.nasa.gov', port=443): Max retries exceeded with url: /wmts-geo/wmts.cgi?service=WMTS&request=GetCapabilities&version=1.0.0 (Caused by SSLError(SSLError(\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\")))",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/contrib/pyopenssl.py\u001b[0m in \u001b[0;36mwrap_socket\u001b[0;34m(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname)\u001b[0m\n\u001b[1;32m 484\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 485\u001b[0;31m \u001b[0mcnx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdo_handshake\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 486\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mOpenSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mWantReadError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/OpenSSL/SSL.py\u001b[0m in \u001b[0;36mdo_handshake\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1914\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_lib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSL_do_handshake\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ssl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1915\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_raise_ssl_error\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ssl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1916\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/OpenSSL/SSL.py\u001b[0m in \u001b[0;36m_raise_ssl_error\u001b[0;34m(self, ssl, result)\u001b[0m\n\u001b[1;32m 1646\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1647\u001b[0;31m \u001b[0m_raise_current_error\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1648\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/OpenSSL/_util.py\u001b[0m in \u001b[0;36mexception_from_error_queue\u001b[0;34m(exception_type)\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mexception_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0merrors\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mError\u001b[0m: [('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')]",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36murlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[1;32m 664\u001b[0m \u001b[0;31m# Make the request on the httplib connection object.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 665\u001b[0;31m httplib_response = self._make_request(\n\u001b[0m\u001b[1;32m 666\u001b[0m \u001b[0mconn\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36m_make_request\u001b[0;34m(self, conn, method, url, timeout, chunked, **httplib_request_kw)\u001b[0m\n\u001b[1;32m 375\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 376\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_validate_conn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 377\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mSocketTimeout\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mBaseSSLError\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36m_validate_conn\u001b[0;34m(self, conn)\u001b[0m\n\u001b[1;32m 995\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"sock\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# AppEngine might not have `.sock`\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 996\u001b[0;31m \u001b[0mconn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconnect\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 997\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connection.py\u001b[0m in \u001b[0;36mconnect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 365\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 366\u001b[0;31m self.sock = ssl_wrap_socket(\n\u001b[0m\u001b[1;32m 367\u001b[0m \u001b[0msock\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconn\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/util/ssl_.py\u001b[0m in \u001b[0;36mssl_wrap_socket\u001b[0;34m(sock, keyfile, certfile, cert_reqs, ca_certs, server_hostname, ssl_version, ciphers, ssl_context, ca_cert_dir, key_password)\u001b[0m\n\u001b[1;32m 369\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mHAS_SNI\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mserver_hostname\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 370\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwrap_socket\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msock\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mserver_hostname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mserver_hostname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 371\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/contrib/pyopenssl.py\u001b[0m in \u001b[0;36mwrap_socket\u001b[0;34m(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname)\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mOpenSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mssl\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSLError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"bad handshake: %r\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m: (\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\",)",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mMaxRetryError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/adapters.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 438\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunked\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 439\u001b[0;31m resp = conn.urlopen(\n\u001b[0m\u001b[1;32m 440\u001b[0m \u001b[0mmethod\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36murlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[1;32m 718\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 719\u001b[0;31m retries = retries.increment(\n\u001b[0m\u001b[1;32m 720\u001b[0m \u001b[0mmethod\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0merror\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_pool\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_stacktrace\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc_info\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/util/retry.py\u001b[0m in \u001b[0;36mincrement\u001b[0;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[1;32m 435\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mnew_retry\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_exhausted\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 436\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mMaxRetryError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_pool\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0merror\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mResponseError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcause\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 437\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mMaxRetryError\u001b[0m: HTTPSConnectionPool(host='map1c.vis.earthdata.nasa.gov', port=443): Max retries exceeded with url: /wmts-geo/wmts.cgi?service=WMTS&request=GetCapabilities&version=1.0.0 (Caused by SSLError(SSLError(\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\")))",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m<ipython-input-3-263240bbee7e>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
|
||||
"\u001b[0;32m<ipython-input-2-cfef7875dd8a>\u001b[0m in \u001b[0;36mmain\u001b[0;34m()\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mfig\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mplt\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfigure\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0max\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_subplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprojection\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mccrs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mPlateCarree\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0max\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_wmts\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlayer\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0max\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_extent\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m15\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m25\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m35\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m60\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcrs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mccrs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mPlateCarree\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/cartopy/mpl/geoaxes.py\u001b[0m in \u001b[0;36madd_wmts\u001b[0;34m(self, wmts, layer_name, wmts_kwargs, **kwargs)\u001b[0m\n\u001b[1;32m 2013\u001b[0m \"\"\"\n\u001b[1;32m 2014\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mcartopy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mio\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mogc_clients\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mWMTSRasterSource\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2015\u001b[0;31m wmts = WMTSRasterSource(wmts, layer_name,\n\u001b[0m\u001b[1;32m 2016\u001b[0m gettile_extra_kwargs=wmts_kwargs)\n\u001b[1;32m 2017\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madd_raster\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwmts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/cartopy/io/ogc_clients.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, wmts, layer_name, gettile_extra_kwargs)\u001b[0m\n\u001b[1;32m 370\u001b[0m \u001b[0mhasattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwmts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'contents'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mand\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 371\u001b[0m hasattr(wmts, 'gettile')):\n\u001b[0;32m--> 372\u001b[0;31m \u001b[0mwmts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mowslib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwmts\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mWebMapTileService\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwmts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 373\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 374\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/owslib/wmts.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, url, version, xml, username, password, parse_remote_metadata, vendor_kwargs, headers, auth, timeout)\u001b[0m\n\u001b[1;32m 175\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_capabilities\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mreader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreadString\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mxml\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 176\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# read from server\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 177\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_capabilities\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mreader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvendor_kwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 178\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 179\u001b[0m \u001b[0;31m# Avoid building capabilities metadata if the response is a\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/owslib/wmts.py\u001b[0m in \u001b[0;36mread\u001b[0;34m(self, service_url, vendor_kwargs)\u001b[0m\n\u001b[1;32m 827\u001b[0m \u001b[0;31m# now split it up again to use the generic openURL function...\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 828\u001b[0m \u001b[0mspliturl\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetcaprequest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'?'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 829\u001b[0;31m \u001b[0mu\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mopenURL\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mspliturl\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mspliturl\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmethod\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Get'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mheaders\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mauth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 830\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0metree\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfromstring\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mu\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 831\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/owslib/util.py\u001b[0m in \u001b[0;36mopenURL\u001b[0;34m(url_base, data, method, cookies, username, password, timeout, headers, verify, cert, auth)\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[0mrkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'cookies'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcookies\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 203\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 204\u001b[0;31m \u001b[0mreq\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrequests\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl_base\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mheaders\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mrkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 205\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 206\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mreq\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstatus_code\u001b[0m \u001b[0;32min\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m400\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m401\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/api.py\u001b[0m in \u001b[0;36mrequest\u001b[0;34m(method, url, **kwargs)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0;31m# cases, and look like a memory leak in others.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0msessions\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSession\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 60\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 61\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36mrequest\u001b[0;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[1;32m 531\u001b[0m }\n\u001b[1;32m 532\u001b[0m \u001b[0msend_kwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msettings\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 533\u001b[0;31m \u001b[0mresp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprep\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0msend_kwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 534\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 535\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, **kwargs)\u001b[0m\n\u001b[1;32m 644\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 645\u001b[0m \u001b[0;31m# Send the request\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 646\u001b[0;31m \u001b[0mr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0madapter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 647\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 648\u001b[0m \u001b[0;31m# Total elapsed time of the request (approximately)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/adapters.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 512\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreason\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_SSLError\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 513\u001b[0m \u001b[0;31m# This branch is for urllib3 v1.22 and later.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 514\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mSSLError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrequest\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 515\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 516\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mConnectionError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrequest\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m: HTTPSConnectionPool(host='map1c.vis.earthdata.nasa.gov', port=443): Max retries exceeded with url: /wmts-geo/wmts.cgi?service=WMTS&request=GetCapabilities&version=1.0.0 (Caused by SSLError(SSLError(\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\")))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAAC1CAYAAAD86CzsAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAACvklEQVR4nO3YsVEDQRBFQS1FCAR1KZPDXSabw5CAJEs8qkS3+52xnjFrZm4AND7++gCA/0R0AUKiCxASXYCQ6AKEPp+Nx3HM3ru6BeAtXNf1PTPHve1pdPfet/M8f+cqgDe11vp6tHkvAIREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQqILEBJdgJDoAoREFyAkugAh0QUIiS5ASHQBQmtmHo9rneEtAO9iz8xxb3gaXQBey3sBICS6ACHRBQiJLkBIdAFCP0aIH6LDexPPAAAAAElFTkSuQmCC\n",
|
||||
"text/plain": [
|
||||
"<Figure size 432x288 with 1 Axes>"
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"needs_background": "light"
|
||||
},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,171 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n",
|
||||
"from cartopy.io.img_tiles import Stamen\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"from matplotlib.lines import Line2D as Line\n",
|
||||
"from matplotlib.patheffects import Stroke\n",
|
||||
"import numpy as np\n",
|
||||
"import shapely.geometry as sgeom\n",
|
||||
"from shapely.ops import transform as geom_transform\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The effect of badly referencing an ellipse\n",
|
||||
"------------------------------------------\n",
|
||||
"\n",
|
||||
"This example demonstrates the effect of referencing your data to an incorrect\n",
|
||||
"ellipse.\n",
|
||||
"\n",
|
||||
"First we define two coordinate systems - one using the World Geodetic System\n",
|
||||
"established in 1984 and the other using a spherical globe. Next we extract\n",
|
||||
"data from the Natural Earth land dataset and convert the Geodetic\n",
|
||||
"coordinates (referenced in WGS84) into the respective coordinate systems\n",
|
||||
"that we have defined. Finally, we plot these datasets onto a map assuming\n",
|
||||
"that they are both referenced to the WGS84 ellipse and compare how the\n",
|
||||
"coastlines are shifted as a result of referencing the incorrect ellipse."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"def transform_fn_factory(target_crs, source_crs):\n",
|
||||
" \"\"\"\n",
|
||||
" Return a function which can be used by ``shapely.op.transform``\n",
|
||||
" to transform the coordinate points of a geometry.\n",
|
||||
"\n",
|
||||
" The function explicitly *does not* do any interpolation or clever\n",
|
||||
" transformation of the coordinate points, so there is no guarantee\n",
|
||||
" that the resulting geometry would make any sense.\n",
|
||||
"\n",
|
||||
" \"\"\"\n",
|
||||
" def transform_fn(x, y, z=None):\n",
|
||||
" new_coords = target_crs.transform_points(source_crs,\n",
|
||||
" np.asanyarray(x),\n",
|
||||
" np.asanyarray(y))\n",
|
||||
" return new_coords[:, 0], new_coords[:, 1], new_coords[:, 2]\n",
|
||||
"\n",
|
||||
" return transform_fn\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" # Define the two coordinate systems with different ellipses.\n",
|
||||
" wgs84 = ccrs.PlateCarree(globe=ccrs.Globe(datum='WGS84',\n",
|
||||
" ellipse='WGS84'))\n",
|
||||
" sphere = ccrs.PlateCarree(globe=ccrs.Globe(datum='WGS84',\n",
|
||||
" ellipse='sphere'))\n",
|
||||
"\n",
|
||||
" # Define the coordinate system of the data we have from Natural Earth and\n",
|
||||
" # acquire the 1:10m physical coastline shapefile.\n",
|
||||
" geodetic = ccrs.Geodetic(globe=ccrs.Globe(datum='WGS84'))\n",
|
||||
" dataset = cfeature.NaturalEarthFeature(category='physical',\n",
|
||||
" name='coastline',\n",
|
||||
" scale='10m')\n",
|
||||
"\n",
|
||||
" # Create a Stamen map tiler instance, and use its CRS for the GeoAxes.\n",
|
||||
" tiler = Stamen('terrain-background')\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=tiler.crs)\n",
|
||||
" ax.set_title('The effect of incorrectly referencing the Solomon Islands')\n",
|
||||
"\n",
|
||||
" # Pick the area of interest. In our case, roughly the Solomon Islands, and\n",
|
||||
" # get hold of the coastlines for that area.\n",
|
||||
" extent = [155, 163, -11.5, -6]\n",
|
||||
" ax.set_extent(extent, geodetic)\n",
|
||||
" geoms = list(dataset.intersecting_geometries(extent))\n",
|
||||
"\n",
|
||||
" # Add the Stamen aerial imagery at zoom level 7.\n",
|
||||
" ax.add_image(tiler, 7)\n",
|
||||
"\n",
|
||||
" # Transform the geodetic coordinates of the coastlines into the two\n",
|
||||
" # projections of differing ellipses.\n",
|
||||
" wgs84_geoms = [geom_transform(transform_fn_factory(wgs84, geodetic),\n",
|
||||
" geom) for geom in geoms]\n",
|
||||
" sphere_geoms = [geom_transform(transform_fn_factory(sphere, geodetic),\n",
|
||||
" geom) for geom in geoms]\n",
|
||||
"\n",
|
||||
" # Using these differently referenced geometries, assume that they are\n",
|
||||
" # both referenced to WGS84.\n",
|
||||
" ax.add_geometries(wgs84_geoms, wgs84, edgecolor='white', facecolor='none')\n",
|
||||
" ax.add_geometries(sphere_geoms, wgs84, edgecolor='gray', facecolor='none')\n",
|
||||
"\n",
|
||||
" # Create a legend for the coastlines.\n",
|
||||
" legend_artists = [Line([0], [0], color=color, linewidth=3)\n",
|
||||
" for color in ('white', 'gray')]\n",
|
||||
" legend_texts = ['Correct ellipse\\n(WGS84)', 'Incorrect ellipse\\n(sphere)']\n",
|
||||
" legend = ax.legend(legend_artists, legend_texts, fancybox=True,\n",
|
||||
" loc='lower left', framealpha=0.75)\n",
|
||||
" legend.legendPatch.set_facecolor('wheat')\n",
|
||||
"\n",
|
||||
" # Create an inset GeoAxes showing the location of the Solomon Islands.\n",
|
||||
" sub_ax = fig.add_axes([0.7, 0.625, 0.2, 0.2],\n",
|
||||
" projection=ccrs.PlateCarree())\n",
|
||||
" sub_ax.set_extent([110, 180, -50, 10], geodetic)\n",
|
||||
"\n",
|
||||
" # Make a nice border around the inset axes.\n",
|
||||
" effect = Stroke(linewidth=4, foreground='wheat', alpha=0.5)\n",
|
||||
" sub_ax.outline_patch.set_path_effects([effect])\n",
|
||||
"\n",
|
||||
" # Add the land, coastlines and the extent of the Solomon Islands.\n",
|
||||
" sub_ax.add_feature(cfeature.LAND)\n",
|
||||
" sub_ax.coastlines()\n",
|
||||
" extent_box = sgeom.box(extent[0], extent[2], extent[1], extent[3])\n",
|
||||
" sub_ax.add_geometries([extent_box], ccrs.PlateCarree(), facecolor='none',\n",
|
||||
" edgecolor='blue', linewidth=2)\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import matplotlib.textpath\n",
|
||||
"import matplotlib.patches\n",
|
||||
"from matplotlib.font_manager import FontProperties\n",
|
||||
"import numpy as np\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=[8, 8])\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.SouthPolarStereo())\n",
|
||||
"\n",
|
||||
" ax.coastlines()\n",
|
||||
" ax.gridlines()\n",
|
||||
"\n",
|
||||
" im = ax.stock_img()\n",
|
||||
"\n",
|
||||
" def on_draw(event=None):\n",
|
||||
" \"\"\"\n",
|
||||
" Hook into matplotlib's event mechanism to define the clip path of the\n",
|
||||
" background image.\n",
|
||||
"\n",
|
||||
" \"\"\"\n",
|
||||
" # Clip the image to the current background boundary.\n",
|
||||
" im.set_clip_path(ax.background_patch.get_path(),\n",
|
||||
" transform=ax.background_patch.get_transform())\n",
|
||||
"\n",
|
||||
" # Register the on_draw method and call it once now.\n",
|
||||
" fig.canvas.mpl_connect('draw_event', on_draw)\n",
|
||||
" on_draw()\n",
|
||||
"\n",
|
||||
" # Generate a matplotlib path representing the character \"C\".\n",
|
||||
" fp = FontProperties(family='Bitstream Vera Sans', weight='bold')\n",
|
||||
" logo_path = matplotlib.textpath.TextPath((-4.5e7, -3.7e7),\n",
|
||||
" 'C', size=1, prop=fp)\n",
|
||||
"\n",
|
||||
" # Scale the letter up to an appropriate X and Y scale.\n",
|
||||
" logo_path._vertices *= np.array([103250000, 103250000])\n",
|
||||
"\n",
|
||||
" # Add the path as a patch, drawing black outlines around the text.\n",
|
||||
" patch = matplotlib.patches.PathPatch(logo_path, facecolor='white',\n",
|
||||
" edgecolor='black', linewidth=10,\n",
|
||||
" transform=ccrs.SouthPolarStereo())\n",
|
||||
" ax.add_patch(patch)\n",
|
||||
" plt.show()\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,110 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Feature Creation\n",
|
||||
"----------------\n",
|
||||
"\n",
|
||||
"This example manually instantiates a\n",
|
||||
":class:`cartopy.feature.NaturalEarthFeature` to access administrative\n",
|
||||
"boundaries (states and provinces).\n",
|
||||
"\n",
|
||||
"Note that this example is intended to illustrate the ability to construct\n",
|
||||
"Natural Earth features that cartopy does not necessarily know about\n",
|
||||
"*a priori*.\n",
|
||||
"In this instance however, it would be possible to make use of the\n",
|
||||
"pre-defined :data:`cartopy.feature.STATES` constant."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n",
|
||||
"from matplotlib.offsetbox import AnchoredText\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())\n",
|
||||
" ax.set_extent([80, 170, -45, 30], crs=ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" # Put a background image on for nice sea rendering.\n",
|
||||
" ax.stock_img()\n",
|
||||
"\n",
|
||||
" # Create a feature for States/Admin 1 regions at 1:50m from Natural Earth\n",
|
||||
" states_provinces = cfeature.NaturalEarthFeature(\n",
|
||||
" category='cultural',\n",
|
||||
" name='admin_1_states_provinces_lines',\n",
|
||||
" scale='50m',\n",
|
||||
" facecolor='none')\n",
|
||||
"\n",
|
||||
" SOURCE = 'Natural Earth'\n",
|
||||
" LICENSE = 'public domain'\n",
|
||||
"\n",
|
||||
" ax.add_feature(cfeature.LAND)\n",
|
||||
" ax.add_feature(cfeature.COASTLINE)\n",
|
||||
" ax.add_feature(states_provinces, edgecolor='gray')\n",
|
||||
"\n",
|
||||
" # Add a text annotation for the license information to the\n",
|
||||
" # the bottom right corner.\n",
|
||||
" text = AnchoredText(r'$\\mathcircled{{c}}$ {}; license: {}'\n",
|
||||
" ''.format(SOURCE, LICENSE),\n",
|
||||
" loc=4, prop={'size': 12}, frameon=True)\n",
|
||||
" ax.add_artist(text)\n",
|
||||
"\n",
|
||||
" plt.show()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,77 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=(10, 5))\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.Robinson())\n",
|
||||
"\n",
|
||||
" # make the map global rather than have it zoom in to\n",
|
||||
" # the extents of any plotted data\n",
|
||||
" ax.set_global()\n",
|
||||
"\n",
|
||||
" ax.stock_img()\n",
|
||||
" ax.coastlines()\n",
|
||||
"\n",
|
||||
" ax.plot(-0.08, 51.53, 'o', transform=ccrs.PlateCarree())\n",
|
||||
" ax.plot([-0.08, 132], [51.53, 43.17], transform=ccrs.PlateCarree())\n",
|
||||
" ax.plot([-0.08, 132], [51.53, 43.17], transform=ccrs.Geodetic())\n",
|
||||
"\n",
|
||||
" plt.show()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"\n",
|
||||
"from cartopy.io.img_tiles import Stamen\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" tiler = Stamen('terrain-background')\n",
|
||||
" mercator = tiler.crs\n",
|
||||
"\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=mercator)\n",
|
||||
" ax.set_extent([-90, -73, 22, 34], crs=ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" ax.add_image(tiler, 6)\n",
|
||||
"\n",
|
||||
" ax.coastlines('10m')\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,68 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n",
|
||||
"\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"## Natural Earth 2 cache test\n",
|
||||
"## Live 8.5 * darkblue-b\n",
|
||||
"##"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ax = plt.axes(projection=ccrs.PlateCarree() ) \n",
|
||||
"\n",
|
||||
"ax.add_feature(cfeature.LAND)\n",
|
||||
"ax.add_feature(cfeature.OCEAN)\n",
|
||||
"ax.add_feature(cfeature.COASTLINE)\n",
|
||||
"ax.add_feature(cfeature.BORDERS, linestyle=':')\n",
|
||||
"ax.add_feature(cfeature.LAKES, alpha=0.5)\n",
|
||||
"ax.add_feature(cfeature.RIVERS)\n",
|
||||
"\n",
|
||||
"plt.show()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,110 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Custom Boundary Shape\n",
|
||||
"---------------------\n",
|
||||
"\n",
|
||||
"This example demonstrates how a custom shape geometry may be used\n",
|
||||
"instead of the projection's default boundary.\n",
|
||||
"\n",
|
||||
"In this instance, we define the boundary as a circle in axes coordinates.\n",
|
||||
"This means that no matter the extent of the map itself, the boundary will\n",
|
||||
"always be a circle.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.path as mpath\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=[10, 5])\n",
|
||||
" ax1 = fig.add_subplot(1, 2, 1, projection=ccrs.SouthPolarStereo())\n",
|
||||
" ax2 = fig.add_subplot(1, 2, 2, projection=ccrs.SouthPolarStereo(),\n",
|
||||
" sharex=ax1, sharey=ax1)\n",
|
||||
" fig.subplots_adjust(bottom=0.05, top=0.95,\n",
|
||||
" left=0.04, right=0.95, wspace=0.02)\n",
|
||||
"\n",
|
||||
" # Limit the map to -60 degrees latitude and below.\n",
|
||||
" ax1.set_extent([-180, 180, -90, -60], ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" ax1.add_feature(cfeature.LAND)\n",
|
||||
" ax1.add_feature(cfeature.OCEAN)\n",
|
||||
"\n",
|
||||
" ax1.gridlines()\n",
|
||||
" ax2.gridlines()\n",
|
||||
"\n",
|
||||
" ax2.add_feature(cfeature.LAND)\n",
|
||||
" ax2.add_feature(cfeature.OCEAN)\n",
|
||||
"\n",
|
||||
" # Compute a circle in axes coordinates, which we can use as a boundary\n",
|
||||
" # for the map. We can pan/zoom as much as we like - the boundary will be\n",
|
||||
" # permanently circular.\n",
|
||||
" theta = np.linspace(0, 2*np.pi, 100)\n",
|
||||
" center, radius = [0.5, 0.5], 0.5\n",
|
||||
" verts = np.vstack([np.sin(theta), np.cos(theta)]).T\n",
|
||||
" circle = mpath.Path(verts * radius + center)\n",
|
||||
"\n",
|
||||
" ax2.set_boundary(circle, transform=ax2.transAxes)\n",
|
||||
"\n",
|
||||
" plt.show()\n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Regridding vectors with quiver\n",
|
||||
"------------------------------\n",
|
||||
"\n",
|
||||
"This example demonstrates the regridding functionality in quiver (there exists\n",
|
||||
"equivalent functionality in :meth:`cartopy.mpl.geoaxes.GeoAxes.barbs`).\n",
|
||||
"\n",
|
||||
"Regridding can be an effective way of visualising a vector field, particularly\n",
|
||||
"if the data is dense or warped.\n",
|
||||
"\n",
|
||||
"### http://scitools.org.uk/iris/docs/v1.9.0/html/gallery.html\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def sample_data(shape=(20, 30)):\n",
|
||||
" \"\"\"\n",
|
||||
" Returns ``(x, y, u, v, crs)`` of some vector data\n",
|
||||
" computed mathematically. The returned CRS will be a North Polar\n",
|
||||
" Stereographic projection, meaning that the vectors will be unevenly\n",
|
||||
" spaced in a PlateCarree projection.\n",
|
||||
"\n",
|
||||
" \"\"\"\n",
|
||||
" crs = ccrs.NorthPolarStereo()\n",
|
||||
" scale = 1e7\n",
|
||||
" x = np.linspace(-scale, scale, shape[1])\n",
|
||||
" y = np.linspace(-scale, scale, shape[0])\n",
|
||||
"\n",
|
||||
" x2d, y2d = np.meshgrid(x, y)\n",
|
||||
" u = 10 * np.cos(2 * x2d / scale + 3 * y2d / scale)\n",
|
||||
" v = 20 * np.cos(6 * x2d / scale)\n",
|
||||
"\n",
|
||||
" return x, y, u, v, crs\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" plt.figure(figsize=(8, 10))\n",
|
||||
"\n",
|
||||
" x, y, u, v, vector_crs = sample_data(shape=(50, 50))\n",
|
||||
" ax1 = plt.subplot(2, 1, 1, projection=ccrs.PlateCarree())\n",
|
||||
" ax1.coastlines()\n",
|
||||
" ax1.set_extent([-45, 55, 20, 80], ccrs.PlateCarree())\n",
|
||||
" ax1.quiver(x, y, u, v, transform=vector_crs)\n",
|
||||
"\n",
|
||||
" ax2 = plt.subplot(2, 1, 2, projection=ccrs.PlateCarree())\n",
|
||||
" plt.title('The same vector field regridded')\n",
|
||||
" ax2.coastlines()\n",
|
||||
" ax2.set_extent([-45, 55, 20, 80], ccrs.PlateCarree())\n",
|
||||
" ax2.quiver(x, y, u, v, transform=vector_crs, regrid_shape=20)\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,221 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"from osgeo import gdal\n",
|
||||
"from osgeo import gdal_array\n",
|
||||
"\n",
|
||||
"import rasterio\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"\n",
|
||||
"## DEM plot via cartopy, rasterio, pyplot imshow\n",
|
||||
"## Live 8.5 * darkblue-b\n",
|
||||
"##"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!mkdir sample_data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"=="
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Rasterio\n",
|
||||
"## Clean and fast\n",
|
||||
"## geospatial raster I/O for Python programmers who use Numpy\n",
|
||||
"\n",
|
||||
"with rasterio.open('sample_data/SanMateo_CA.tif') as src:\n",
|
||||
" data = src.read()\n",
|
||||
" data = np.transpose( data, [1,2,0])\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"## show selected data attributes\n",
|
||||
"## -------------------------------------\n",
|
||||
"# type(data) numpy.ma.core.MaskedArray\n",
|
||||
"# data.ndim 3\n",
|
||||
"# data.shape (1080, 864, 1)\n",
|
||||
"# data.dtype dtype('float32')\n",
|
||||
"\n",
|
||||
"## NOTE: when using rasterio and matplotlib only,\n",
|
||||
"## the column order for the trivial case of lat/lon/measure\n",
|
||||
"## is NOT handled automatically.. \n",
|
||||
"## column reordering is REQUIRED np.transpose()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'driver': 'GTiff',\n",
|
||||
" 'dtype': 'float32',\n",
|
||||
" 'nodata': -3.4028234663852886e+38,\n",
|
||||
" 'width': 864,\n",
|
||||
" 'height': 1080,\n",
|
||||
" 'count': 1,\n",
|
||||
" 'crs': CRS.from_epsg(4269),\n",
|
||||
" 'transform': Affine(0.0002777777777780012, 0.0, -122.44000000003291,\n",
|
||||
" 0.0, -0.0002777777777779992, 37.69999999999724)}"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"## Rasterio supplies a simple dictionary of important GeoTIFF metadata\n",
|
||||
"src.meta"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"array([[203.4132 , 203.94624],\n",
|
||||
" [198.2123 , 197.35855]], dtype=float32)"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"## numpy ndarray indexing example\n",
|
||||
"## show the measure values for a 2x2 area\n",
|
||||
"## no-data options are available using a masked array\n",
|
||||
"## http://docs.scipy.org/doc/numpy/reference/maskedarray.generic.html#what-is-a-masked-array\n",
|
||||
"\n",
|
||||
"data[0:2,0:2,0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## pyplot Image-Show \n",
|
||||
"## http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.imshow\n",
|
||||
"##\n",
|
||||
"## Example of using matplotlib to directly display a GeoTIFF\n",
|
||||
"##\n",
|
||||
"## Cartopy supplies a library of mapping transformations\n",
|
||||
"## use an idiom to calculate the correct display bounds\n",
|
||||
"##\n",
|
||||
"## data[:,:,0] refers to a numpy ndarray: all-x, all-y, 0th measure\n",
|
||||
"##\n",
|
||||
"\n",
|
||||
"xmin = src.transform[0]\n",
|
||||
"xmax = src.transform[0] + src.transform[1]*src.width\n",
|
||||
"ymin = src.transform[3] + src.transform[5]*src.height\n",
|
||||
"ymax = src.transform[3]\n",
|
||||
"\n",
|
||||
"## Mercator etc..\n",
|
||||
"crs = ccrs.PlateCarree()\n",
|
||||
"\n",
|
||||
"ax = plt.axes(projection=crs)\n",
|
||||
"plt.imshow( data[:,:,0], origin='upper', \n",
|
||||
" extent=[xmin, xmax, ymin, ymax], \n",
|
||||
" cmap=plt.get_cmap('gist_earth'),\n",
|
||||
" transform=crs, interpolation='nearest')\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"=="
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Show the same content, but with a colorbar legend for the data\n",
|
||||
"##\n",
|
||||
"plt.figure(figsize=(15, 10))\n",
|
||||
"ax = plt.subplot(111, projection=ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
"#elev, crs, extent = srtm_composite(12, 47, 1, 1)\n",
|
||||
"plt.imshow( data[:,:,0], transform=ccrs.PlateCarree(),\n",
|
||||
" cmap='gist_earth')\n",
|
||||
"cb = plt.colorbar(orientation='vertical')\n",
|
||||
"cb.set_label('Altitude')\n",
|
||||
"plt.title(\"DEM\")\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%whos"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.patches as mpatches\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import shapely.geometry as sgeom\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.io.shapereader as shpreader\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def sample_data():\n",
|
||||
" \"\"\"\n",
|
||||
" Return a list of latitudes and a list of longitudes (lons, lats)\n",
|
||||
" for Hurricane Katrina (2005).\n",
|
||||
"\n",
|
||||
" The data was originally sourced from the HURDAT2 dataset from AOML/NOAA:\n",
|
||||
" http://www.aoml.noaa.gov/hrd/hurdat/newhurdat-all.html on 14th Dec 2012.\n",
|
||||
"\n",
|
||||
" \"\"\"\n",
|
||||
" lons = [-75.1, -75.7, -76.2, -76.5, -76.9, -77.7, -78.4, -79.0,\n",
|
||||
" -79.6, -80.1, -80.3, -81.3, -82.0, -82.6, -83.3, -84.0,\n",
|
||||
" -84.7, -85.3, -85.9, -86.7, -87.7, -88.6, -89.2, -89.6,\n",
|
||||
" -89.6, -89.6, -89.6, -89.6, -89.1, -88.6, -88.0, -87.0,\n",
|
||||
" -85.3, -82.9]\n",
|
||||
"\n",
|
||||
" lats = [23.1, 23.4, 23.8, 24.5, 25.4, 26.0, 26.1, 26.2, 26.2, 26.0,\n",
|
||||
" 25.9, 25.4, 25.1, 24.9, 24.6, 24.4, 24.4, 24.5, 24.8, 25.2,\n",
|
||||
" 25.7, 26.3, 27.2, 28.2, 29.3, 29.5, 30.2, 31.1, 32.6, 34.1,\n",
|
||||
" 35.6, 37.0, 38.6, 40.1]\n",
|
||||
"\n",
|
||||
" return lons, lats\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.LambertConformal())\n",
|
||||
"\n",
|
||||
" ax.set_extent([-125, -66.5, 20, 50], ccrs.Geodetic())\n",
|
||||
"\n",
|
||||
" shapename = 'admin_1_states_provinces_lakes_shp'\n",
|
||||
" states_shp = shpreader.natural_earth(resolution='110m',\n",
|
||||
" category='cultural', name=shapename)\n",
|
||||
"\n",
|
||||
" lons, lats = sample_data()\n",
|
||||
"\n",
|
||||
" # to get the effect of having just the states without a map \"background\"\n",
|
||||
" # turn off the outline and background patches\n",
|
||||
" ax.background_patch.set_visible(False)\n",
|
||||
" ax.outline_patch.set_visible(False)\n",
|
||||
"\n",
|
||||
" ax.set_title('US States which intersect the track of '\n",
|
||||
" 'Hurricane Katrina (2005)')\n",
|
||||
"\n",
|
||||
" # turn the lons and lats into a shapely LineString\n",
|
||||
" track = sgeom.LineString(zip(lons, lats))\n",
|
||||
"\n",
|
||||
" # buffer the linestring by two degrees (note: this is a non-physical\n",
|
||||
" # distance)\n",
|
||||
" track_buffer = track.buffer(2)\n",
|
||||
"\n",
|
||||
" def colorize_state(geometry):\n",
|
||||
" facecolor = (0.9375, 0.9375, 0.859375)\n",
|
||||
" if geometry.intersects(track):\n",
|
||||
" facecolor = 'red'\n",
|
||||
" elif geometry.intersects(track_buffer):\n",
|
||||
" facecolor = '#FF7E00'\n",
|
||||
" return {'facecolor': facecolor, 'edgecolor': 'black'}\n",
|
||||
"\n",
|
||||
" ax.add_geometries(\n",
|
||||
" shpreader.Reader(states_shp).geometries(),\n",
|
||||
" ccrs.PlateCarree(),\n",
|
||||
" styler=colorize_state)\n",
|
||||
"\n",
|
||||
" ax.add_geometries([track_buffer], ccrs.PlateCarree(),\n",
|
||||
" facecolor='#C8A2C8', alpha=0.5)\n",
|
||||
" ax.add_geometries([track], ccrs.PlateCarree(),\n",
|
||||
" facecolor='none', edgecolor='k')\n",
|
||||
"\n",
|
||||
" # make two proxy artists to add to a legend\n",
|
||||
" direct_hit = mpatches.Rectangle((0, 0), 1, 1, facecolor=\"red\")\n",
|
||||
" within_2_deg = mpatches.Rectangle((0, 0), 1, 1, facecolor=\"#FF7E00\")\n",
|
||||
" labels = ['State directly intersects\\nwith track',\n",
|
||||
" 'State is within \\n2 degrees of track']\n",
|
||||
" ax.legend([direct_hit, within_2_deg], labels,\n",
|
||||
" loc='lower left', bbox_to_anchor=(0.025, -0.1), fancybox=True)\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Modifying the boundary/neatline of a map in cartopy\n",
|
||||
"---------------------------------------------------\n",
|
||||
"\n",
|
||||
"This example demonstrates how to modify the boundary/neatline\n",
|
||||
"of an axes. We construct a star with coordinates in a Plate Carree\n",
|
||||
"coordinate system, and use the star as the outline of the map.\n",
|
||||
"\n",
|
||||
"Notice how changing the projection of the map represents a *projected*\n",
|
||||
"star shaped boundary."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.path as mpath\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure()\n",
|
||||
" ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.PlateCarree())\n",
|
||||
" ax.coastlines()\n",
|
||||
"\n",
|
||||
" # Construct a star in longitudes and latitudes.\n",
|
||||
" star_path = mpath.Path.unit_regular_star(5, 0.5)\n",
|
||||
" star_path = mpath.Path(star_path.vertices.copy() * 80,\n",
|
||||
" star_path.codes.copy())\n",
|
||||
"\n",
|
||||
" # Use the star as the boundary.\n",
|
||||
" ax.set_boundary(star_path, transform=ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"from cartopy.examples.arrows import sample_data\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=(10, 5))\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())\n",
|
||||
" ax.set_extent([-90, 75, 10, 85], crs=ccrs.PlateCarree())\n",
|
||||
" ax.coastlines()\n",
|
||||
"\n",
|
||||
" x, y, u, v, vector_crs = sample_data(shape=(80, 100))\n",
|
||||
" magnitude = (u ** 2 + v ** 2) ** 0.5\n",
|
||||
" ax.streamplot(x, y, u, v, transform=vector_crs,\n",
|
||||
" linewidth=2, density=2, color=magnitude)\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"def sample_data(shape=(73, 145)):\n",
|
||||
" \"\"\"Return ``lons``, ``lats`` and ``data`` of some fake data.\"\"\"\n",
|
||||
" nlats, nlons = shape\n",
|
||||
" lats = np.linspace(-np.pi / 2, np.pi / 2, nlats)\n",
|
||||
" lons = np.linspace(0, 2 * np.pi, nlons)\n",
|
||||
" lons, lats = np.meshgrid(lons, lats)\n",
|
||||
" wave = 0.75 * (np.sin(2 * lats) ** 8) * np.cos(4 * lons)\n",
|
||||
" mean = 0.5 * np.cos(2 * lats) * ((np.sin(2 * lats)) ** 2 + 2)\n",
|
||||
"\n",
|
||||
" lats = np.rad2deg(lats)\n",
|
||||
" lons = np.rad2deg(lons)\n",
|
||||
" data = wave + mean\n",
|
||||
"\n",
|
||||
" return lons, lats, data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=(10, 5))\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.Mollweide())\n",
|
||||
"\n",
|
||||
" lons, lats, data = sample_data()\n",
|
||||
"\n",
|
||||
" ax.contourf(lons, lats, data,\n",
|
||||
" transform=ccrs.PlateCarree(),\n",
|
||||
" cmap='nipy_spectral')\n",
|
||||
" ax.coastlines()\n",
|
||||
" ax.set_global()\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Map tile acquisition\n",
|
||||
"--------------------\n",
|
||||
"\n",
|
||||
"Demonstrates cartopy's ability to draw map tiles which are downloaded on\n",
|
||||
"demand from the Stamen tile server. Internally these tiles are then combined\n",
|
||||
"into a single image and displayed in the cartopy GeoAxes."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"from matplotlib.transforms import offset_copy\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.io.img_tiles as cimgt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" # Create a Stamen terrain background instance.\n",
|
||||
" stamen_terrain = cimgt.Stamen('terrain-background')\n",
|
||||
"\n",
|
||||
" fig = plt.figure()\n",
|
||||
"\n",
|
||||
" # Create a GeoAxes in the tile's projection.\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=stamen_terrain.crs)\n",
|
||||
"\n",
|
||||
" # Limit the extent of the map to a small longitude/latitude range.\n",
|
||||
" ax.set_extent([-22, -15, 63, 65], crs=ccrs.Geodetic())\n",
|
||||
"\n",
|
||||
" # Add the Stamen data at zoom level 8.\n",
|
||||
" ax.add_image(stamen_terrain, 8)\n",
|
||||
"\n",
|
||||
" # Add a marker for the Eyjafjallajökull volcano.\n",
|
||||
" ax.plot(-19.613333, 63.62, marker='o', color='red', markersize=12,\n",
|
||||
" alpha=0.7, transform=ccrs.Geodetic())\n",
|
||||
"\n",
|
||||
" # Use the cartopy interface to create a matplotlib transform object\n",
|
||||
" # for the Geodetic coordinate system. We will use this along with\n",
|
||||
" # matplotlib's offset_copy function to define a coordinate system which\n",
|
||||
" # translates the text by 25 pixels to the left.\n",
|
||||
" geodetic_transform = ccrs.Geodetic()._as_mpl_transform(ax)\n",
|
||||
" text_transform = offset_copy(geodetic_transform, units='dots', x=-25)\n",
|
||||
"\n",
|
||||
" # Add text 25 pixels to the left of the volcano.\n",
|
||||
" ax.text(-19.613333, 63.62, u'Eyjafjallajökull',\n",
|
||||
" verticalalignment='center', horizontalalignment='right',\n",
|
||||
" transform=text_transform,\n",
|
||||
" bbox=dict(facecolor='sandybrown', alpha=0.5, boxstyle='round'))\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Tick Labels\n",
|
||||
"-----------\n",
|
||||
"\n",
|
||||
"This example demonstrates adding tick labels to maps on rectangular\n",
|
||||
"projections using special tick formatters."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter\n",
|
||||
"import matplotlib.pyplot as plt\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=(8, 10))\n",
|
||||
"\n",
|
||||
" # Label axes of a Plate Carree projection with a central longitude of 180:\n",
|
||||
" ax1 = fig.add_subplot(2, 1, 1,\n",
|
||||
" projection=ccrs.PlateCarree(central_longitude=180))\n",
|
||||
" ax1.set_global()\n",
|
||||
" ax1.coastlines()\n",
|
||||
" ax1.set_xticks([0, 60, 120, 180, 240, 300, 360], crs=ccrs.PlateCarree())\n",
|
||||
" ax1.set_yticks([-90, -60, -30, 0, 30, 60, 90], crs=ccrs.PlateCarree())\n",
|
||||
" lon_formatter = LongitudeFormatter(zero_direction_label=True)\n",
|
||||
" lat_formatter = LatitudeFormatter()\n",
|
||||
" ax1.xaxis.set_major_formatter(lon_formatter)\n",
|
||||
" ax1.yaxis.set_major_formatter(lat_formatter)\n",
|
||||
"\n",
|
||||
" # Label axes of a Mercator projection without degree symbols in the labels\n",
|
||||
" # and formatting labels to include 1 decimal place:\n",
|
||||
" ax2 = fig.add_subplot(2, 1, 2, projection=ccrs.Mercator())\n",
|
||||
" ax2.set_global()\n",
|
||||
" ax2.coastlines()\n",
|
||||
" ax2.set_xticks([-180, -120, -60, 0, 60, 120, 180], crs=ccrs.PlateCarree())\n",
|
||||
" ax2.set_yticks([-78.5, -60, -25.5, 25.5, 60, 80], crs=ccrs.PlateCarree())\n",
|
||||
" lon_formatter = LongitudeFormatter(number_format='.1f',\n",
|
||||
" degree_symbol='',\n",
|
||||
" dateline_direction_label=True)\n",
|
||||
" lat_formatter = LatitudeFormatter(number_format='.1f',\n",
|
||||
" degree_symbol='')\n",
|
||||
" ax2.xaxis.set_major_formatter(lon_formatter)\n",
|
||||
" ax2.yaxis.set_major_formatter(lat_formatter)\n",
|
||||
"\n",
|
||||
" plt.show()\n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=(10, 5))\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" # make the map global rather than have it zoom in to\n",
|
||||
" # the extents of any plotted data\n",
|
||||
" ax.set_global()\n",
|
||||
"\n",
|
||||
" ax.stock_img()\n",
|
||||
" ax.coastlines()\n",
|
||||
"\n",
|
||||
" ax.tissot(facecolor='orange', alpha=0.4)\n",
|
||||
"\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,225 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"UN Flag\n",
|
||||
"-------\n",
|
||||
"\n",
|
||||
"A demonstration of the power of Matplotlib combined with cartopy's Azimuthal\n",
|
||||
"Equidistant projection to reproduce the UN flag.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import cartopy.feature as cfeature\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"from matplotlib.patches import PathPatch\n",
|
||||
"import matplotlib.path\n",
|
||||
"import matplotlib.ticker\n",
|
||||
"from matplotlib.transforms import BboxTransform, Bbox\n",
|
||||
"import numpy as np\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"# When drawing the flag, we can either use white filled land, or be a little\n",
|
||||
"# more fancy and use the Natural Earth shaded relief imagery.\n",
|
||||
"filled_land = True\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def olive_path():\n",
|
||||
" \"\"\"\n",
|
||||
" Return a Matplotlib path representing a single olive branch from the\n",
|
||||
" UN Flag. The path coordinates were extracted from the SVG at\n",
|
||||
" https://commons.wikimedia.org/wiki/File:Flag_of_the_United_Nations.svg.\n",
|
||||
"\n",
|
||||
" \"\"\"\n",
|
||||
" olives_verts = np.array(\n",
|
||||
" [[0, 2, 6, 9, 30, 55, 79, 94, 104, 117, 134, 157, 177,\n",
|
||||
" 188, 199, 207, 191, 167, 149, 129, 109, 87, 53, 22, 0, 663,\n",
|
||||
" 245, 223, 187, 158, 154, 150, 146, 149, 154, 158, 181, 184, 197,\n",
|
||||
" 181, 167, 153, 142, 129, 116, 119, 123, 127, 151, 178, 203, 220,\n",
|
||||
" 237, 245, 663, 280, 267, 232, 209, 205, 201, 196, 196, 201, 207,\n",
|
||||
" 211, 224, 219, 230, 220, 212, 207, 198, 195, 176, 197, 220, 239,\n",
|
||||
" 259, 277, 280, 663, 295, 293, 264, 250, 247, 244, 240, 240, 243,\n",
|
||||
" 244, 249, 251, 250, 248, 242, 245, 233, 236, 230, 228, 224, 222,\n",
|
||||
" 234, 249, 262, 275, 285, 291, 295, 296, 295, 663, 294, 293, 292,\n",
|
||||
" 289, 294, 277, 271, 269, 268, 265, 264, 264, 264, 272, 260, 248,\n",
|
||||
" 245, 243, 242, 240, 243, 245, 247, 252, 256, 259, 258, 257, 258,\n",
|
||||
" 267, 285, 290, 294, 297, 294, 663, 285, 285, 277, 266, 265, 265,\n",
|
||||
" 265, 277, 266, 268, 269, 269, 269, 268, 268, 267, 267, 264, 248,\n",
|
||||
" 235, 232, 229, 228, 229, 232, 236, 246, 266, 269, 271, 285, 285,\n",
|
||||
" 663, 252, 245, 238, 230, 246, 245, 250, 252, 255, 256, 256, 253,\n",
|
||||
" 249, 242, 231, 214, 208, 208, 227, 244, 252, 258, 262, 262, 261,\n",
|
||||
" 262, 264, 265, 252, 663, 185, 197, 206, 215, 223, 233, 242, 237,\n",
|
||||
" 237, 230, 220, 202, 185, 663],\n",
|
||||
" [8, 5, 3, 0, 22, 46, 46, 46, 35, 27, 16, 10, 18,\n",
|
||||
" 22, 28, 38, 27, 26, 33, 41, 52, 52, 52, 30, 8, 595,\n",
|
||||
" 77, 52, 61, 54, 53, 52, 53, 55, 55, 57, 65, 90, 106,\n",
|
||||
" 96, 81, 68, 58, 54, 51, 50, 51, 50, 44, 34, 43, 48,\n",
|
||||
" 61, 77, 595, 135, 104, 102, 83, 79, 76, 74, 74, 79, 84,\n",
|
||||
" 90, 109, 135, 156, 145, 133, 121, 100, 77, 62, 69, 67, 80,\n",
|
||||
" 92, 113, 135, 595, 198, 171, 156, 134, 129, 124, 120, 123, 126,\n",
|
||||
" 129, 138, 149, 161, 175, 188, 202, 177, 144, 116, 110, 105, 99,\n",
|
||||
" 108, 116, 126, 136, 147, 162, 173, 186, 198, 595, 249, 255, 261,\n",
|
||||
" 267, 241, 222, 200, 192, 183, 175, 175, 175, 175, 199, 221, 240,\n",
|
||||
" 245, 250, 256, 245, 233, 222, 207, 194, 180, 172, 162, 153, 154,\n",
|
||||
" 171, 184, 202, 216, 233, 249, 595, 276, 296, 312, 327, 327, 327,\n",
|
||||
" 327, 308, 284, 262, 240, 240, 239, 239, 242, 244, 247, 265, 277,\n",
|
||||
" 290, 293, 296, 300, 291, 282, 274, 253, 236, 213, 235, 252, 276,\n",
|
||||
" 595, 342, 349, 355, 357, 346, 326, 309, 303, 297, 291, 290, 297,\n",
|
||||
" 304, 310, 321, 327, 343, 321, 305, 292, 286, 278, 270, 276, 281,\n",
|
||||
" 287, 306, 328, 342, 595, 379, 369, 355, 343, 333, 326, 318, 328,\n",
|
||||
" 340, 349, 366, 373, 379, 595]]).T\n",
|
||||
" olives_codes = np.array([1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 2, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,\n",
|
||||
" 4, 4, 79], dtype=np.uint8)\n",
|
||||
"\n",
|
||||
" return matplotlib.path.Path(olives_verts, olives_codes)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" blue = '#4b92db'\n",
|
||||
"\n",
|
||||
" # We're drawing a flag with a 3:5 aspect ratio.\n",
|
||||
" fig = plt.figure(figsize=[7.5, 4.5], facecolor=blue)\n",
|
||||
" # Put a blue background on the figure.\n",
|
||||
" blue_background = PathPatch(matplotlib.path.Path.unit_rectangle(),\n",
|
||||
" transform=fig.transFigure, color=blue,\n",
|
||||
" zorder=-1)\n",
|
||||
" fig.patches.append(blue_background)\n",
|
||||
"\n",
|
||||
" # Set up the Azimuthal Equidistant and Plate Carree projections\n",
|
||||
" # for later use.\n",
|
||||
" az_eq = ccrs.AzimuthalEquidistant(central_latitude=90)\n",
|
||||
" pc = ccrs.PlateCarree()\n",
|
||||
"\n",
|
||||
" # Pick a suitable location for the map (which is in an Azimuthal\n",
|
||||
" # Equidistant projection).\n",
|
||||
" ax = fig.add_axes([0.25, 0.24, 0.5, 0.54], projection=az_eq)\n",
|
||||
"\n",
|
||||
" # The background patch and outline patch are not needed in this example.\n",
|
||||
" ax.background_patch.set_facecolor('none')\n",
|
||||
" ax.outline_patch.set_edgecolor('none')\n",
|
||||
"\n",
|
||||
" # We want the map to go down to -60 degrees latitude.\n",
|
||||
" ax.set_extent([-180, 180, -60, 90], ccrs.PlateCarree())\n",
|
||||
"\n",
|
||||
" # Importantly, we want the axes to be circular at the -60 latitude\n",
|
||||
" # rather than cartopy's default behaviour of zooming in and becoming\n",
|
||||
" # square.\n",
|
||||
" _, patch_radius = az_eq.transform_point(0, -60, pc)\n",
|
||||
" circular_path = matplotlib.path.Path.circle(0, patch_radius)\n",
|
||||
" ax.set_boundary(circular_path)\n",
|
||||
"\n",
|
||||
" if filled_land:\n",
|
||||
" ax.add_feature(\n",
|
||||
" cfeature.LAND, facecolor='white', edgecolor='none')\n",
|
||||
" else:\n",
|
||||
" ax.stock_img()\n",
|
||||
"\n",
|
||||
" gl = ax.gridlines(crs=pc, linewidth=3, color='white', linestyle='-')\n",
|
||||
" # Meridians every 45 degrees, and 5 parallels.\n",
|
||||
" gl.xlocator = matplotlib.ticker.FixedLocator(np.arange(0, 361, 45))\n",
|
||||
" parallels = np.linspace(-60, 70, 5, endpoint=True)\n",
|
||||
" gl.ylocator = matplotlib.ticker.FixedLocator(parallels)\n",
|
||||
"\n",
|
||||
" # Now add the olive branches around the axes. We do this in normalised\n",
|
||||
" # figure coordinates\n",
|
||||
" olive_leaf = olive_path()\n",
|
||||
"\n",
|
||||
" olives_bbox = Bbox.null()\n",
|
||||
" olives_bbox.update_from_path(olive_leaf)\n",
|
||||
"\n",
|
||||
" # The first olive branch goes from left to right.\n",
|
||||
" olive1_axes_bbox = Bbox([[0.45, 0.15], [0.725, 0.75]])\n",
|
||||
" olive1_trans = BboxTransform(olives_bbox, olive1_axes_bbox)\n",
|
||||
"\n",
|
||||
" # THe second olive branch goes from right to left (mirroring the first).\n",
|
||||
" olive2_axes_bbox = Bbox([[0.55, 0.15], [0.275, 0.75]])\n",
|
||||
" olive2_trans = BboxTransform(olives_bbox, olive2_axes_bbox)\n",
|
||||
"\n",
|
||||
" olive1 = PathPatch(olive_leaf, facecolor='white', edgecolor='none',\n",
|
||||
" transform=olive1_trans + fig.transFigure)\n",
|
||||
" olive2 = PathPatch(olive_leaf, facecolor='white', edgecolor='none',\n",
|
||||
" transform=olive2_trans + fig.transFigure)\n",
|
||||
"\n",
|
||||
" fig.patches.append(olive1)\n",
|
||||
" fig.patches.append(olive2)\n",
|
||||
"\n",
|
||||
" plt.show()\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"scrolled": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Displaying all 60 zones of the UTM projection\n",
|
||||
"---------------------------------------------\n",
|
||||
"\n",
|
||||
"This example displays all 60 zones of the Universal Transverse Mercator\n",
|
||||
"projection next to each other in a figure.\n",
|
||||
"\n",
|
||||
"First we create a figure with 60 subplots in one row.\n",
|
||||
"Next we set the projection of each axis in the figure to a specific UTM zone.\n",
|
||||
"Then we add coastlines, gridlines and the number of the zone.\n",
|
||||
"Finally we add a supertitle and display the figure."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import matplotlib.pyplot as plt\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" # Create a list of integers from 1 - 60\n",
|
||||
" zones = range(1, 61)\n",
|
||||
"\n",
|
||||
" # Create a figure\n",
|
||||
" fig = plt.figure(figsize=(18, 6))\n",
|
||||
"\n",
|
||||
" # Loop through each zone in the list\n",
|
||||
" for zone in zones:\n",
|
||||
"\n",
|
||||
" # Add GeoAxes object with specific UTM zone projection to the figure\n",
|
||||
" ax = fig.add_subplot(1, len(zones), zone,\n",
|
||||
" projection=ccrs.UTM(zone=zone,\n",
|
||||
" southern_hemisphere=True))\n",
|
||||
"\n",
|
||||
" # Add coastlines, gridlines and zone number for the subplot\n",
|
||||
" ax.coastlines(resolution='110m')\n",
|
||||
" ax.gridlines()\n",
|
||||
" ax.set_title(zone)\n",
|
||||
"\n",
|
||||
" # Add a supertitle for the figure\n",
|
||||
" fig.suptitle(\"UTM Projection - Zones\")\n",
|
||||
"\n",
|
||||
" # Display the figure\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import cartopy.crs as ccrs\n",
|
||||
"import matplotlib.pyplot as plt\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" fig = plt.figure(figsize=(10, 5))\n",
|
||||
" ax = fig.add_subplot(1, 1, 1, projection=ccrs.InterruptedGoodeHomolosine())\n",
|
||||
" ax.coastlines()\n",
|
||||
"\n",
|
||||
" ax.add_wms(wms='http://vmap0.tiles.osgeo.org/wms/vmap0',\n",
|
||||
" layers=['basic'])\n",
|
||||
"\n",
|
||||
" plt.show()\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
"""
|
||||
Custom Boundary Shape
|
||||
---------------------
|
||||
|
||||
This example demonstrates how a custom shape geometry may be used
|
||||
instead of the projection's default boundary.
|
||||
|
||||
In this instance, we define the boundary as a circle in axes coordinates.
|
||||
This means that no matter the extent of the map itself, the boundary will
|
||||
always be a circle.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import matplotlib.path as mpath
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=[10, 5])
|
||||
ax1 = fig.add_subplot(1, 2, 1, projection=ccrs.SouthPolarStereo())
|
||||
ax2 = fig.add_subplot(1, 2, 2, projection=ccrs.SouthPolarStereo(),
|
||||
sharex=ax1, sharey=ax1)
|
||||
fig.subplots_adjust(bottom=0.05, top=0.95,
|
||||
left=0.04, right=0.95, wspace=0.02)
|
||||
|
||||
# Limit the map to -60 degrees latitude and below.
|
||||
ax1.set_extent([-180, 180, -90, -60], ccrs.PlateCarree())
|
||||
|
||||
ax1.add_feature(cfeature.LAND)
|
||||
ax1.add_feature(cfeature.OCEAN)
|
||||
|
||||
ax1.gridlines()
|
||||
ax2.gridlines()
|
||||
|
||||
ax2.add_feature(cfeature.LAND)
|
||||
ax2.add_feature(cfeature.OCEAN)
|
||||
|
||||
# Compute a circle in axes coordinates, which we can use as a boundary
|
||||
# for the map. We can pan/zoom as much as we like - the boundary will be
|
||||
# permanently circular.
|
||||
theta = np.linspace(0, 2*np.pi, 100)
|
||||
center, radius = [0.5, 0.5], 0.5
|
||||
verts = np.vstack([np.sin(theta), np.cos(theta)]).T
|
||||
circle = mpath.Path(verts * radius + center)
|
||||
|
||||
ax2.set_boundary(circle, transform=ax2.transAxes)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,54 @@
|
|||
"""
|
||||
Arrows
|
||||
------
|
||||
|
||||
Plotting arrows.
|
||||
|
||||
"""
|
||||
__tags__ = ['Vector data']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
|
||||
|
||||
def sample_data(shape=(20, 30)):
|
||||
"""
|
||||
Return ``(x, y, u, v, crs)`` of some vector data
|
||||
computed mathematically. The returned crs will be a rotated
|
||||
pole CRS, meaning that the vectors will be unevenly spaced in
|
||||
regular PlateCarree space.
|
||||
|
||||
"""
|
||||
crs = ccrs.RotatedPole(pole_longitude=177.5, pole_latitude=37.5)
|
||||
|
||||
x = np.linspace(311.9, 391.1, shape[1])
|
||||
y = np.linspace(-23.6, 24.8, shape[0])
|
||||
|
||||
x2d, y2d = np.meshgrid(x, y)
|
||||
u = 10 * (2 * np.cos(2 * np.deg2rad(x2d) + 3 * np.deg2rad(y2d + 30)) ** 2)
|
||||
v = 20 * np.cos(6 * np.deg2rad(x2d))
|
||||
|
||||
return x, y, u, v, crs
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.Orthographic(-10, 45))
|
||||
|
||||
ax.add_feature(cfeature.OCEAN, zorder=0)
|
||||
ax.add_feature(cfeature.LAND, zorder=0, edgecolor='black')
|
||||
|
||||
ax.set_global()
|
||||
ax.gridlines()
|
||||
|
||||
x, y, u, v, vector_crs = sample_data()
|
||||
ax.quiver(x, y, u, v, transform=vector_crs)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,119 @@
|
|||
"""
|
||||
Plotting the Aurora Forecast from NOAA on Orthographic Polar Projection
|
||||
-----------------------------------------------------------------------
|
||||
|
||||
The National Oceanic and Atmospheric Administration (NOAA) monitors the
|
||||
solar wind conditions using the ACE spacecraft orbiting close to the L1
|
||||
Lagrangian point of the Sun-Earth system. This data is fed into the
|
||||
OVATION-Prime model to forecast the probability of visible aurora at
|
||||
various locations on Earth. Every five minutes a new forecast is
|
||||
published for the coming 30 minutes. The data is provided as a
|
||||
1024 by 512 grid of probabilities in percent of visible aurora. The
|
||||
data spaced equally in degrees from -180 to 180 and -90 to 90.
|
||||
|
||||
"""
|
||||
__tags__ = ["Scalar data"]
|
||||
try:
|
||||
from urllib2 import urlopen
|
||||
except ImportError:
|
||||
from urllib.request import urlopen
|
||||
|
||||
from io import StringIO
|
||||
|
||||
import numpy as np
|
||||
from datetime import datetime
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.feature.nightshade import Nightshade
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.colors import LinearSegmentedColormap
|
||||
|
||||
|
||||
def aurora_forecast():
|
||||
"""
|
||||
Get the latest Aurora Forecast from https://www.swpc.noaa.gov.
|
||||
|
||||
Returns
|
||||
-------
|
||||
img : numpy array
|
||||
The pixels of the image in a numpy array.
|
||||
img_proj : cartopy CRS
|
||||
The rectangular coordinate system of the image.
|
||||
img_extent : tuple of floats
|
||||
The extent of the image ``(x0, y0, x1, y1)`` referenced in
|
||||
the ``img_proj`` coordinate system.
|
||||
origin : str
|
||||
The origin of the image to be passed through to matplotlib's imshow.
|
||||
dt : datetime
|
||||
Time of forecast validity.
|
||||
|
||||
"""
|
||||
|
||||
# GitHub gist to download the example data from
|
||||
url = ('https://gist.githubusercontent.com/belteshassar/'
|
||||
'c7ea9e02a3e3934a9ddc/raw/aurora-nowcast-map.txt')
|
||||
# To plot the current forecast instead, uncomment the following line
|
||||
# url = 'https://services.swpc.noaa.gov/text/aurora-nowcast-map.txt'
|
||||
|
||||
response_text = StringIO(urlopen(url).read().decode('utf-8'))
|
||||
img = np.loadtxt(response_text)
|
||||
# Read forecast date and time
|
||||
response_text.seek(0)
|
||||
for line in response_text:
|
||||
if line.startswith('Product Valid At:', 2):
|
||||
dt = datetime.strptime(line[-17:-1], '%Y-%m-%d %H:%M')
|
||||
|
||||
img_proj = ccrs.PlateCarree()
|
||||
img_extent = (-180, 180, -90, 90)
|
||||
return img, img_proj, img_extent, 'lower', dt
|
||||
|
||||
|
||||
def aurora_cmap():
|
||||
"""Return a colormap with aurora like colors"""
|
||||
stops = {'red': [(0.00, 0.1725, 0.1725),
|
||||
(0.50, 0.1725, 0.1725),
|
||||
(1.00, 0.8353, 0.8353)],
|
||||
|
||||
'green': [(0.00, 0.9294, 0.9294),
|
||||
(0.50, 0.9294, 0.9294),
|
||||
(1.00, 0.8235, 0.8235)],
|
||||
|
||||
'blue': [(0.00, 0.3843, 0.3843),
|
||||
(0.50, 0.3843, 0.3843),
|
||||
(1.00, 0.6549, 0.6549)],
|
||||
|
||||
'alpha': [(0.00, 0.0, 0.0),
|
||||
(0.50, 1.0, 1.0),
|
||||
(1.00, 1.0, 1.0)]}
|
||||
|
||||
return LinearSegmentedColormap('aurora', stops)
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=[10, 5])
|
||||
|
||||
# We choose to plot in an Orthographic projection as it looks natural
|
||||
# and the distortion is relatively small around the poles where
|
||||
# the aurora is most likely.
|
||||
|
||||
# ax1 for Northern Hemisphere
|
||||
ax1 = fig.add_subplot(1, 2, 1, projection=ccrs.Orthographic(0, 90))
|
||||
|
||||
# ax2 for Southern Hemisphere
|
||||
ax2 = fig.add_subplot(1, 2, 2, projection=ccrs.Orthographic(180, -90))
|
||||
|
||||
img, crs, extent, origin, dt = aurora_forecast()
|
||||
|
||||
for ax in [ax1, ax2]:
|
||||
ax.coastlines(zorder=3)
|
||||
ax.stock_img()
|
||||
ax.gridlines()
|
||||
ax.add_feature(Nightshade(dt))
|
||||
ax.imshow(img, vmin=0, vmax=100, transform=crs,
|
||||
extent=extent, origin=origin, zorder=2,
|
||||
cmap=aurora_cmap())
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,77 @@
|
|||
"""
|
||||
Using Cartopy and AxesGrid toolkit
|
||||
----------------------------------
|
||||
|
||||
This example demonstrates how to use cartopy `GeoAxes` with
|
||||
`AxesGrid` from the `mpl_toolkits.axes_grid1`.
|
||||
The script constructs an `axes_class` kwarg with Plate Carree projection
|
||||
and passes it to the `AxesGrid` instance. The `AxesGrid` built-in
|
||||
labelling is switched off, and instead a standard procedure
|
||||
of creating grid lines is used. Then some fake data is plotted.
|
||||
"""
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.mpl.geoaxes import GeoAxes
|
||||
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
|
||||
import matplotlib.pyplot as plt
|
||||
from mpl_toolkits.axes_grid1 import AxesGrid
|
||||
import numpy as np
|
||||
|
||||
|
||||
def sample_data_3d(shape):
|
||||
"""Return `lons`, `lats`, `times` and fake `data`"""
|
||||
ntimes, nlats, nlons = shape
|
||||
lats = np.linspace(-np.pi / 2, np.pi / 2, nlats)
|
||||
lons = np.linspace(0, 2 * np.pi, nlons)
|
||||
lons, lats = np.meshgrid(lons, lats)
|
||||
wave = 0.75 * (np.sin(2 * lats) ** 8) * np.cos(4 * lons)
|
||||
mean = 0.5 * np.cos(2 * lats) * ((np.sin(2 * lats)) ** 2 + 2)
|
||||
|
||||
lats = np.rad2deg(lats)
|
||||
lons = np.rad2deg(lons)
|
||||
data = wave + mean
|
||||
|
||||
times = np.linspace(-1, 1, ntimes)
|
||||
new_shape = data.shape + (ntimes, )
|
||||
data = np.rollaxis(data.repeat(ntimes).reshape(new_shape), -1)
|
||||
data *= times[:, np.newaxis, np.newaxis]
|
||||
|
||||
return lons, lats, times, data
|
||||
|
||||
|
||||
def main():
|
||||
projection = ccrs.PlateCarree()
|
||||
axes_class = (GeoAxes,
|
||||
dict(map_projection=projection))
|
||||
|
||||
lons, lats, times, data = sample_data_3d((6, 73, 145))
|
||||
|
||||
fig = plt.figure()
|
||||
axgr = AxesGrid(fig, 111, axes_class=axes_class,
|
||||
nrows_ncols=(3, 2),
|
||||
axes_pad=0.6,
|
||||
cbar_location='right',
|
||||
cbar_mode='single',
|
||||
cbar_pad=0.2,
|
||||
cbar_size='3%',
|
||||
label_mode='') # note the empty label_mode
|
||||
|
||||
for i, ax in enumerate(axgr):
|
||||
ax.coastlines()
|
||||
ax.set_xticks(np.linspace(-180, 180, 5), crs=projection)
|
||||
ax.set_yticks(np.linspace(-90, 90, 5), crs=projection)
|
||||
lon_formatter = LongitudeFormatter(zero_direction_label=True)
|
||||
lat_formatter = LatitudeFormatter()
|
||||
ax.xaxis.set_major_formatter(lon_formatter)
|
||||
ax.yaxis.set_major_formatter(lat_formatter)
|
||||
|
||||
p = ax.contourf(lons, lats, data[i, ...],
|
||||
transform=projection,
|
||||
cmap='RdBu')
|
||||
|
||||
axgr.cbar_axes[0].colorbar(p)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,32 @@
|
|||
"""
|
||||
Barbs
|
||||
-----
|
||||
|
||||
Plotting barbs.
|
||||
|
||||
"""
|
||||
__tags__ = ['Vector data']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.examples.arrows import sample_data
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
ax.set_extent([-90, 80, 10, 85], crs=ccrs.PlateCarree())
|
||||
ax.stock_img()
|
||||
ax.coastlines()
|
||||
|
||||
x, y, u, v, vector_crs = sample_data(shape=(10, 14))
|
||||
ax.barbs(x, y, u, v, length=5,
|
||||
sizes=dict(emptybarb=0.25, spacing=0.2, height=0.5),
|
||||
linewidth=0.95, transform=vector_crs)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,56 @@
|
|||
"""
|
||||
Contour labels
|
||||
--------------
|
||||
|
||||
An example of adding contour labels to matplotlib contours.
|
||||
|
||||
"""
|
||||
__tags__ = ['Scalar data']
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
from cartopy.examples.waves import sample_data
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
|
||||
# Setup a global EckertIII map with faint coastlines.
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.EckertIII())
|
||||
ax.set_global()
|
||||
ax.coastlines('110m', alpha=0.1)
|
||||
|
||||
# Use the waves example to provide some sample data, but make it
|
||||
# more dependent on y for more interesting contours.
|
||||
x, y, z = sample_data((20, 40))
|
||||
z = z * -1.5 * y
|
||||
|
||||
# Add colourful filled contours.
|
||||
filled_c = ax.contourf(x, y, z, transform=ccrs.PlateCarree())
|
||||
|
||||
# And black line contours.
|
||||
line_c = ax.contour(x, y, z, levels=filled_c.levels,
|
||||
colors=['black'],
|
||||
transform=ccrs.PlateCarree())
|
||||
|
||||
# Uncomment to make the line contours invisible.
|
||||
# plt.setp(line_c.collections, visible=False)
|
||||
|
||||
# Add a colorbar for the filled contour.
|
||||
fig.colorbar(filled_c, orientation='horizontal')
|
||||
|
||||
# Use the line contours to place contour labels.
|
||||
ax.clabel(
|
||||
line_c, # Typically best results when labelling line contours.
|
||||
colors=['black'],
|
||||
manual=False, # Automatic placement vs manual placement.
|
||||
inline=True, # Cut the line where the label will be placed.
|
||||
fmt=' {:.0f} '.format, # Labes as integers, with some extra space.
|
||||
)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,74 @@
|
|||
"""
|
||||
Displaying data on an eccentric ellipse
|
||||
---------------------------------------
|
||||
|
||||
This example demonstrates plotting data on an eccentric ellipse. The data
|
||||
plotted is a topography map of the asteroid Vesta. The map is actually an
|
||||
image, which is defined on an equirectangluar projection relative to an
|
||||
ellipse with a semi-major axis of 285 km and a semi-minor axis of 229 km.
|
||||
The image is reprojected on-the-fly onto a geostationary projection with
|
||||
matching eccentricity.
|
||||
|
||||
"""
|
||||
try:
|
||||
from urllib2 import urlopen
|
||||
except ImportError:
|
||||
from urllib.request import urlopen
|
||||
from io import BytesIO
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
|
||||
|
||||
def vesta_image():
|
||||
"""
|
||||
Return an image of Vesta's topography.
|
||||
|
||||
Image credit: NASA/JPL-Caltech/UCLA/MPS/DLR/IDA/PSI
|
||||
|
||||
Returns
|
||||
-------
|
||||
img : numpy array
|
||||
The pixels of the image in a numpy array.
|
||||
img_proj : cartopy CRS
|
||||
The rectangular coordinate system of the image.
|
||||
img_extent : tuple of floats
|
||||
The extent of the image ``(x0, y0, x1, y1)`` referenced in
|
||||
the ``img_proj`` coordinate system.
|
||||
|
||||
"""
|
||||
url = 'https://www.nasa.gov/sites/default/files/pia17037.jpg'
|
||||
img_handle = BytesIO(urlopen(url).read())
|
||||
raw_image = Image.open(img_handle)
|
||||
# The image is extremely high-resolution, which takes a long time to
|
||||
# plot. Sub-sampling reduces the time taken to plot while not
|
||||
# significantly altering the integrity of the result.
|
||||
smaller_image = raw_image.resize([raw_image.size[0] // 10,
|
||||
raw_image.size[1] // 10])
|
||||
img = np.asarray(smaller_image)
|
||||
# We define the semimajor and semiminor axes, but must also tell the
|
||||
# globe not to use the WGS84 ellipse, which is its default behaviour.
|
||||
img_globe = ccrs.Globe(semimajor_axis=285000., semiminor_axis=229000.,
|
||||
ellipse=None)
|
||||
img_proj = ccrs.PlateCarree(globe=img_globe)
|
||||
img_extent = (-895353.906273091, 895353.906273091,
|
||||
447676.9531365455, -447676.9531365455)
|
||||
return img, img_globe, img_proj, img_extent
|
||||
|
||||
|
||||
def main():
|
||||
img, globe, crs, extent = vesta_image()
|
||||
projection = ccrs.Geostationary(globe=globe)
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=projection)
|
||||
ax.imshow(img, transform=crs, extent=extent)
|
||||
fig.text(.075, .012, "Image credit: NASA/JPL-Caltech/UCLA/MPS/DLR/IDA/PSI",
|
||||
bbox={'facecolor': 'w', 'edgecolor': 'k'})
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,118 @@
|
|||
"""
|
||||
The effect of badly referencing an ellipse
|
||||
------------------------------------------
|
||||
|
||||
This example demonstrates the effect of referencing your data to an incorrect
|
||||
ellipse.
|
||||
|
||||
First we define two coordinate systems - one using the World Geodetic System
|
||||
established in 1984 and the other using a spherical globe. Next we extract
|
||||
data from the Natural Earth land dataset and convert the Geodetic
|
||||
coordinates (referenced in WGS84) into the respective coordinate systems
|
||||
that we have defined. Finally, we plot these datasets onto a map assuming
|
||||
that they are both referenced to the WGS84 ellipse and compare how the
|
||||
coastlines are shifted as a result of referencing the incorrect ellipse.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
from cartopy.io.img_tiles import Stamen
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.lines import Line2D as Line
|
||||
from matplotlib.patheffects import Stroke
|
||||
import numpy as np
|
||||
import shapely.geometry as sgeom
|
||||
from shapely.ops import transform as geom_transform
|
||||
|
||||
|
||||
def transform_fn_factory(target_crs, source_crs):
|
||||
"""
|
||||
Return a function which can be used by ``shapely.op.transform``
|
||||
to transform the coordinate points of a geometry.
|
||||
|
||||
The function explicitly *does not* do any interpolation or clever
|
||||
transformation of the coordinate points, so there is no guarantee
|
||||
that the resulting geometry would make any sense.
|
||||
|
||||
"""
|
||||
def transform_fn(x, y, z=None):
|
||||
new_coords = target_crs.transform_points(source_crs,
|
||||
np.asanyarray(x),
|
||||
np.asanyarray(y))
|
||||
return new_coords[:, 0], new_coords[:, 1], new_coords[:, 2]
|
||||
|
||||
return transform_fn
|
||||
|
||||
|
||||
def main():
|
||||
# Define the two coordinate systems with different ellipses.
|
||||
wgs84 = ccrs.PlateCarree(globe=ccrs.Globe(datum='WGS84',
|
||||
ellipse='WGS84'))
|
||||
sphere = ccrs.PlateCarree(globe=ccrs.Globe(datum='WGS84',
|
||||
ellipse='sphere'))
|
||||
|
||||
# Define the coordinate system of the data we have from Natural Earth and
|
||||
# acquire the 1:10m physical coastline shapefile.
|
||||
geodetic = ccrs.Geodetic(globe=ccrs.Globe(datum='WGS84'))
|
||||
dataset = cfeature.NaturalEarthFeature(category='physical',
|
||||
name='coastline',
|
||||
scale='10m')
|
||||
|
||||
# Create a Stamen map tiler instance, and use its CRS for the GeoAxes.
|
||||
tiler = Stamen('terrain-background')
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=tiler.crs)
|
||||
ax.set_title('The effect of incorrectly referencing the Solomon Islands')
|
||||
|
||||
# Pick the area of interest. In our case, roughly the Solomon Islands, and
|
||||
# get hold of the coastlines for that area.
|
||||
extent = [155, 163, -11.5, -6]
|
||||
ax.set_extent(extent, geodetic)
|
||||
geoms = list(dataset.intersecting_geometries(extent))
|
||||
|
||||
# Add the Stamen aerial imagery at zoom level 7.
|
||||
ax.add_image(tiler, 7)
|
||||
|
||||
# Transform the geodetic coordinates of the coastlines into the two
|
||||
# projections of differing ellipses.
|
||||
wgs84_geoms = [geom_transform(transform_fn_factory(wgs84, geodetic),
|
||||
geom) for geom in geoms]
|
||||
sphere_geoms = [geom_transform(transform_fn_factory(sphere, geodetic),
|
||||
geom) for geom in geoms]
|
||||
|
||||
# Using these differently referenced geometries, assume that they are
|
||||
# both referenced to WGS84.
|
||||
ax.add_geometries(wgs84_geoms, wgs84, edgecolor='white', facecolor='none')
|
||||
ax.add_geometries(sphere_geoms, wgs84, edgecolor='gray', facecolor='none')
|
||||
|
||||
# Create a legend for the coastlines.
|
||||
legend_artists = [Line([0], [0], color=color, linewidth=3)
|
||||
for color in ('white', 'gray')]
|
||||
legend_texts = ['Correct ellipse\n(WGS84)', 'Incorrect ellipse\n(sphere)']
|
||||
legend = ax.legend(legend_artists, legend_texts, fancybox=True,
|
||||
loc='lower left', framealpha=0.75)
|
||||
legend.legendPatch.set_facecolor('wheat')
|
||||
|
||||
# Create an inset GeoAxes showing the location of the Solomon Islands.
|
||||
sub_ax = fig.add_axes([0.7, 0.625, 0.2, 0.2],
|
||||
projection=ccrs.PlateCarree())
|
||||
sub_ax.set_extent([110, 180, -50, 10], geodetic)
|
||||
|
||||
# Make a nice border around the inset axes.
|
||||
effect = Stroke(linewidth=4, foreground='wheat', alpha=0.5)
|
||||
sub_ax.spines['geo'].set_path_effects([effect])
|
||||
|
||||
# Add the land, coastlines and the extent of the Solomon Islands.
|
||||
sub_ax.add_feature(cfeature.LAND)
|
||||
sub_ax.coastlines()
|
||||
extent_box = sgeom.box(extent[0], extent[2], extent[1], extent[3])
|
||||
sub_ax.add_geometries([extent_box], ccrs.PlateCarree(), facecolor='none',
|
||||
edgecolor='blue', linewidth=2)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,55 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Map tile acquisition
|
||||
--------------------
|
||||
|
||||
Demonstrates cartopy's ability to draw map tiles which are downloaded on
|
||||
demand from the Stamen tile server. Internally these tiles are then combined
|
||||
into a single image and displayed in the cartopy GeoAxes.
|
||||
|
||||
"""
|
||||
__tags__ = ["Scalar data"]
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.transforms import offset_copy
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.io.img_tiles as cimgt
|
||||
|
||||
|
||||
def main():
|
||||
# Create a Stamen terrain background instance.
|
||||
stamen_terrain = cimgt.Stamen('terrain-background')
|
||||
|
||||
fig = plt.figure()
|
||||
|
||||
# Create a GeoAxes in the tile's projection.
|
||||
ax = fig.add_subplot(1, 1, 1, projection=stamen_terrain.crs)
|
||||
|
||||
# Limit the extent of the map to a small longitude/latitude range.
|
||||
ax.set_extent([-22, -15, 63, 65], crs=ccrs.Geodetic())
|
||||
|
||||
# Add the Stamen data at zoom level 8.
|
||||
ax.add_image(stamen_terrain, 8)
|
||||
|
||||
# Add a marker for the Eyjafjallajökull volcano.
|
||||
ax.plot(-19.613333, 63.62, marker='o', color='red', markersize=12,
|
||||
alpha=0.7, transform=ccrs.Geodetic())
|
||||
|
||||
# Use the cartopy interface to create a matplotlib transform object
|
||||
# for the Geodetic coordinate system. We will use this along with
|
||||
# matplotlib's offset_copy function to define a coordinate system which
|
||||
# translates the text by 25 pixels to the left.
|
||||
geodetic_transform = ccrs.Geodetic()._as_mpl_transform(ax)
|
||||
text_transform = offset_copy(geodetic_transform, units='dots', x=-25)
|
||||
|
||||
# Add text 25 pixels to the left of the volcano.
|
||||
ax.text(-19.613333, 63.62, u'Eyjafjallajökull',
|
||||
verticalalignment='center', horizontalalignment='right',
|
||||
transform=text_transform,
|
||||
bbox=dict(facecolor='sandybrown', alpha=0.5, boxstyle='round'))
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,41 @@
|
|||
"""
|
||||
Cartopy Favicon
|
||||
---------------
|
||||
|
||||
The actual code to generate cartopy's favicon.
|
||||
|
||||
"""
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.textpath
|
||||
import matplotlib.patches
|
||||
from matplotlib.font_manager import FontProperties
|
||||
import numpy as np
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=[8, 8])
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.SouthPolarStereo())
|
||||
|
||||
ax.coastlines()
|
||||
ax.gridlines()
|
||||
ax.stock_img()
|
||||
|
||||
# Generate a matplotlib path representing the character "C".
|
||||
fp = FontProperties(family='Bitstream Vera Sans', weight='bold')
|
||||
logo_path = matplotlib.textpath.TextPath((-4.5e7, -3.7e7),
|
||||
'C', size=1, prop=fp)
|
||||
|
||||
# Scale the letter up to an appropriate X and Y scale.
|
||||
logo_path._vertices *= np.array([103250000, 103250000])
|
||||
|
||||
# Add the path as a patch, drawing black outlines around the text.
|
||||
patch = matplotlib.patches.PathPatch(logo_path, facecolor='white',
|
||||
edgecolor='black', linewidth=10,
|
||||
transform=ccrs.SouthPolarStereo())
|
||||
ax.add_patch(patch)
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,57 @@
|
|||
"""
|
||||
Feature Creation
|
||||
----------------
|
||||
|
||||
This example manually instantiates a
|
||||
:class:`cartopy.feature.NaturalEarthFeature` to access administrative
|
||||
boundaries (states and provinces).
|
||||
|
||||
Note that this example is intended to illustrate the ability to construct
|
||||
Natural Earth features that cartopy does not necessarily know about
|
||||
*a priori*.
|
||||
In this instance however, it would be possible to make use of the
|
||||
pre-defined :data:`cartopy.feature.STATES` constant.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
from matplotlib.offsetbox import AnchoredText
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
ax.set_extent([80, 170, -45, 30], crs=ccrs.PlateCarree())
|
||||
|
||||
# Put a background image on for nice sea rendering.
|
||||
ax.stock_img()
|
||||
|
||||
# Create a feature for States/Admin 1 regions at 1:50m from Natural Earth
|
||||
states_provinces = cfeature.NaturalEarthFeature(
|
||||
category='cultural',
|
||||
name='admin_1_states_provinces_lines',
|
||||
scale='50m',
|
||||
facecolor='none')
|
||||
|
||||
SOURCE = 'Natural Earth'
|
||||
LICENSE = 'public domain'
|
||||
|
||||
ax.add_feature(cfeature.LAND)
|
||||
ax.add_feature(cfeature.COASTLINE)
|
||||
ax.add_feature(states_provinces, edgecolor='gray')
|
||||
|
||||
# Add a text annotation for the license information to the
|
||||
# the bottom right corner.
|
||||
text = AnchoredText(r'$\mathcircled{{c}}$ {}; license: {}'
|
||||
''.format(SOURCE, LICENSE),
|
||||
loc=4, prop={'size': 12}, frameon=True)
|
||||
ax.add_artist(text)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,32 @@
|
|||
"""
|
||||
Features
|
||||
--------
|
||||
|
||||
A demonstration of some of the built-in Natural Earth features found
|
||||
in cartopy.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
ax.set_extent([-20, 60, -40, 45], crs=ccrs.PlateCarree())
|
||||
|
||||
ax.add_feature(cfeature.LAND)
|
||||
ax.add_feature(cfeature.OCEAN)
|
||||
ax.add_feature(cfeature.COASTLINE)
|
||||
ax.add_feature(cfeature.BORDERS, linestyle=':')
|
||||
ax.add_feature(cfeature.LAKES, alpha=0.5)
|
||||
ax.add_feature(cfeature.RIVERS)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,65 @@
|
|||
"""
|
||||
Reprojecting images from a Geostationary projection
|
||||
---------------------------------------------------
|
||||
|
||||
This example demonstrates Cartopy's ability to project images into the desired
|
||||
projection on-the-fly. The image itself is retrieved from a URL and is loaded
|
||||
directly into memory without storing it intermediately into a file. It
|
||||
represents pre-processed data from the Spinning Enhanced Visible and Infrared
|
||||
Imager onboard Meteosat Second Generation, which has been put into an image in
|
||||
the data's native Geostationary coordinate system - it is then projected by
|
||||
cartopy into a global Miller map.
|
||||
|
||||
"""
|
||||
__tags__ = ["Scalar data"]
|
||||
|
||||
try:
|
||||
from urllib2 import urlopen
|
||||
except ImportError:
|
||||
from urllib.request import urlopen
|
||||
from io import BytesIO
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def geos_image():
|
||||
"""
|
||||
Return a specific SEVIRI image by retrieving it from a github gist URL.
|
||||
|
||||
Returns
|
||||
-------
|
||||
img : numpy array
|
||||
The pixels of the image in a numpy array.
|
||||
img_proj : cartopy CRS
|
||||
The rectangular coordinate system of the image.
|
||||
img_extent : tuple of floats
|
||||
The extent of the image ``(x0, y0, x1, y1)`` referenced in
|
||||
the ``img_proj`` coordinate system.
|
||||
origin : str
|
||||
The origin of the image to be passed through to matplotlib's imshow.
|
||||
|
||||
"""
|
||||
url = ('https://gist.github.com/pelson/5871263/raw/'
|
||||
'EIDA50_201211061300_clip2.png')
|
||||
img_handle = BytesIO(urlopen(url).read())
|
||||
img = plt.imread(img_handle)
|
||||
img_proj = ccrs.Geostationary(satellite_height=35786000)
|
||||
img_extent = [-5500000, 5500000, -5500000, 5500000]
|
||||
return img, img_proj, img_extent, 'upper'
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.Miller())
|
||||
ax.coastlines()
|
||||
ax.set_global()
|
||||
print('Retrieving image...')
|
||||
img, crs, extent, origin = geos_image()
|
||||
print('Projecting and plotting image (this may take a while)...')
|
||||
ax.imshow(img, transform=crs, extent=extent, origin=origin, cmap='gray')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,36 @@
|
|||
"""
|
||||
Global Map
|
||||
----------
|
||||
|
||||
An example of a simple map that compares Geodetic and Plate Carree lines
|
||||
between two locations.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.Robinson())
|
||||
|
||||
# make the map global rather than have it zoom in to
|
||||
# the extents of any plotted data
|
||||
ax.set_global()
|
||||
|
||||
ax.stock_img()
|
||||
ax.coastlines()
|
||||
|
||||
ax.plot(-0.08, 51.53, 'o', transform=ccrs.PlateCarree())
|
||||
ax.plot([-0.08, 132], [51.53, 43.17], transform=ccrs.PlateCarree())
|
||||
ax.plot([-0.08, 132], [51.53, 43.17], transform=ccrs.Geodetic())
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,44 @@
|
|||
"""
|
||||
Gridlines and tick labels
|
||||
-------------------------
|
||||
|
||||
These examples demonstrate how to quickly add longitude
|
||||
and latitude gridlines and tick labels on a non-rectangular projection.
|
||||
|
||||
As you can see on the first example,
|
||||
longitude labels may be drawn on left and right sides,
|
||||
and latitude labels may be drawn on bottom and top sides.
|
||||
Thanks to the ``dms`` keyword, minutes are used when appropriate
|
||||
to display fractions of degree.
|
||||
|
||||
|
||||
In the second example, labels are still drawn at the map edges
|
||||
despite its complexity, and some others are also drawn within the map
|
||||
boundary.
|
||||
|
||||
"""
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
__tags__ = ['Gridlines', 'Tick labels', 'Lines and polygons']
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
rotated_crs = ccrs.RotatedPole(pole_longitude=120.0, pole_latitude=70.0)
|
||||
ax0 = plt.axes(projection=rotated_crs)
|
||||
ax0.set_extent([-6, 1, 47.5, 51.5], crs=ccrs.PlateCarree())
|
||||
ax0.add_feature(cfeature.LAND.with_scale('110m'))
|
||||
ax0.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False)
|
||||
|
||||
plt.figure(figsize=(6.9228, 3))
|
||||
ax1 = plt.axes(projection=ccrs.InterruptedGoodeHomolosine())
|
||||
ax1.coastlines(resolution='110m')
|
||||
ax1.gridlines(draw_labels=True)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,98 @@
|
|||
"""
|
||||
Hurricane Katrina
|
||||
-----------------
|
||||
|
||||
This example uses the power of Shapely to illustrate states that are likely to
|
||||
have been significantly impacted by Hurricane Katrina.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import matplotlib.patches as mpatches
|
||||
import matplotlib.pyplot as plt
|
||||
import shapely.geometry as sgeom
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.io.shapereader as shpreader
|
||||
|
||||
|
||||
def sample_data():
|
||||
"""
|
||||
Return a list of latitudes and a list of longitudes (lons, lats)
|
||||
for Hurricane Katrina (2005).
|
||||
|
||||
The data was originally sourced from the HURDAT2 dataset from AOML/NOAA:
|
||||
https://www.aoml.noaa.gov/hrd/hurdat/newhurdat-all.html on 14th Dec 2012.
|
||||
|
||||
"""
|
||||
lons = [-75.1, -75.7, -76.2, -76.5, -76.9, -77.7, -78.4, -79.0,
|
||||
-79.6, -80.1, -80.3, -81.3, -82.0, -82.6, -83.3, -84.0,
|
||||
-84.7, -85.3, -85.9, -86.7, -87.7, -88.6, -89.2, -89.6,
|
||||
-89.6, -89.6, -89.6, -89.6, -89.1, -88.6, -88.0, -87.0,
|
||||
-85.3, -82.9]
|
||||
|
||||
lats = [23.1, 23.4, 23.8, 24.5, 25.4, 26.0, 26.1, 26.2, 26.2, 26.0,
|
||||
25.9, 25.4, 25.1, 24.9, 24.6, 24.4, 24.4, 24.5, 24.8, 25.2,
|
||||
25.7, 26.3, 27.2, 28.2, 29.3, 29.5, 30.2, 31.1, 32.6, 34.1,
|
||||
35.6, 37.0, 38.6, 40.1]
|
||||
|
||||
return lons, lats
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
# to get the effect of having just the states without a map "background"
|
||||
# turn off the background patch and axes frame
|
||||
ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.LambertConformal(),
|
||||
frameon=False)
|
||||
ax.patch.set_visible(False)
|
||||
|
||||
ax.set_extent([-125, -66.5, 20, 50], ccrs.Geodetic())
|
||||
|
||||
shapename = 'admin_1_states_provinces_lakes_shp'
|
||||
states_shp = shpreader.natural_earth(resolution='110m',
|
||||
category='cultural', name=shapename)
|
||||
|
||||
lons, lats = sample_data()
|
||||
|
||||
ax.set_title('US States which intersect the track of '
|
||||
'Hurricane Katrina (2005)')
|
||||
|
||||
# turn the lons and lats into a shapely LineString
|
||||
track = sgeom.LineString(zip(lons, lats))
|
||||
|
||||
# buffer the linestring by two degrees (note: this is a non-physical
|
||||
# distance)
|
||||
track_buffer = track.buffer(2)
|
||||
|
||||
def colorize_state(geometry):
|
||||
facecolor = (0.9375, 0.9375, 0.859375)
|
||||
if geometry.intersects(track):
|
||||
facecolor = 'red'
|
||||
elif geometry.intersects(track_buffer):
|
||||
facecolor = '#FF7E00'
|
||||
return {'facecolor': facecolor, 'edgecolor': 'black'}
|
||||
|
||||
ax.add_geometries(
|
||||
shpreader.Reader(states_shp).geometries(),
|
||||
ccrs.PlateCarree(),
|
||||
styler=colorize_state)
|
||||
|
||||
ax.add_geometries([track_buffer], ccrs.PlateCarree(),
|
||||
facecolor='#C8A2C8', alpha=0.5)
|
||||
ax.add_geometries([track], ccrs.PlateCarree(),
|
||||
facecolor='none', edgecolor='k')
|
||||
|
||||
# make two proxy artists to add to a legend
|
||||
direct_hit = mpatches.Rectangle((0, 0), 1, 1, facecolor="red")
|
||||
within_2_deg = mpatches.Rectangle((0, 0), 1, 1, facecolor="#FF7E00")
|
||||
labels = ['State directly intersects\nwith track',
|
||||
'State is within \n2 degrees of track']
|
||||
ax.legend([direct_hit, within_2_deg], labels,
|
||||
loc='lower left', bbox_to_anchor=(0.025, -0.1), fancybox=True)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,32 @@
|
|||
"""
|
||||
Web tile imagery
|
||||
----------------
|
||||
|
||||
This example demonstrates how imagery from a tile
|
||||
providing web service can be accessed.
|
||||
|
||||
"""
|
||||
__tags__ = ['Web services']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
from cartopy.io.img_tiles import Stamen
|
||||
|
||||
|
||||
def main():
|
||||
tiler = Stamen('terrain-background')
|
||||
mercator = tiler.crs
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=mercator)
|
||||
ax.set_extent([-90, -73, 22, 34], crs=ccrs.PlateCarree())
|
||||
|
||||
ax.add_image(tiler, 6)
|
||||
|
||||
ax.coastlines('10m')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
Cartopy Logo
|
||||
------------
|
||||
|
||||
The actual code to produce cartopy's logo.
|
||||
|
||||
"""
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.textpath
|
||||
import matplotlib.patches
|
||||
from matplotlib.font_manager import FontProperties
|
||||
import numpy as np
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=[12, 6])
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.Robinson())
|
||||
|
||||
ax.coastlines()
|
||||
ax.gridlines()
|
||||
|
||||
# generate a matplotlib path representing the word "cartopy"
|
||||
fp = FontProperties(family='Bitstream Vera Sans', weight='bold')
|
||||
logo_path = matplotlib.textpath.TextPath((-175, -35), 'cartopy',
|
||||
size=1, prop=fp)
|
||||
# scale the letters up to sensible longitude and latitude sizes
|
||||
logo_path._vertices *= np.array([80, 160])
|
||||
|
||||
# add a background image
|
||||
im = ax.stock_img()
|
||||
# clip the image according to the logo_path. mpl v1.2.0 does not support
|
||||
# the transform API that cartopy makes use of, so we have to convert the
|
||||
# projection into a transform manually
|
||||
plate_carree_transform = ccrs.PlateCarree()._as_mpl_transform(ax)
|
||||
im.set_clip_path(logo_path, transform=plate_carree_transform)
|
||||
|
||||
# add the path as a patch, drawing black outlines around the text
|
||||
patch = matplotlib.patches.PathPatch(logo_path,
|
||||
facecolor='none', edgecolor='black',
|
||||
transform=ccrs.PlateCarree())
|
||||
ax.add_patch(patch)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,24 @@
|
|||
"""
|
||||
Nightshade feature
|
||||
------------------
|
||||
|
||||
Draws a polygon where there is no sunlight for the given datetime.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import datetime
|
||||
import matplotlib.pyplot as plt
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.feature.nightshade import Nightshade
|
||||
|
||||
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
|
||||
date = datetime.datetime(1999, 12, 31, 12)
|
||||
|
||||
ax.set_title('Night time shading for {}'.format(date))
|
||||
ax.stock_img()
|
||||
ax.add_feature(Nightshade(date, alpha=0.2))
|
||||
plt.show()
|
|
@ -0,0 +1,59 @@
|
|||
"""
|
||||
Regridding vectors with quiver
|
||||
------------------------------
|
||||
|
||||
This example demonstrates the regridding functionality in quiver (there exists
|
||||
equivalent functionality in :meth:`cartopy.mpl.geoaxes.GeoAxes.barbs`).
|
||||
|
||||
Regridding can be an effective way of visualising a vector field, particularly
|
||||
if the data is dense or warped.
|
||||
|
||||
"""
|
||||
__tags__ = ['Vector data']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def sample_data(shape=(20, 30)):
|
||||
"""
|
||||
Return ``(x, y, u, v, crs)`` of some vector data
|
||||
computed mathematically. The returned CRS will be a North Polar
|
||||
Stereographic projection, meaning that the vectors will be unevenly
|
||||
spaced in a PlateCarree projection.
|
||||
|
||||
"""
|
||||
crs = ccrs.NorthPolarStereo()
|
||||
scale = 1e7
|
||||
x = np.linspace(-scale, scale, shape[1])
|
||||
y = np.linspace(-scale, scale, shape[0])
|
||||
|
||||
x2d, y2d = np.meshgrid(x, y)
|
||||
u = 10 * np.cos(2 * x2d / scale + 3 * y2d / scale)
|
||||
v = 20 * np.cos(6 * x2d / scale)
|
||||
|
||||
return x, y, u, v, crs
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(8, 10))
|
||||
|
||||
x, y, u, v, vector_crs = sample_data(shape=(50, 50))
|
||||
ax1 = fig.add_subplot(2, 1, 1, projection=ccrs.PlateCarree())
|
||||
ax1.coastlines('50m')
|
||||
ax1.set_extent([-45, 55, 20, 80], ccrs.PlateCarree())
|
||||
ax1.quiver(x, y, u, v, transform=vector_crs)
|
||||
|
||||
ax2 = fig.add_subplot(2, 1, 2, projection=ccrs.PlateCarree())
|
||||
ax2.set_title('The same vector field regridded')
|
||||
ax2.coastlines('50m')
|
||||
ax2.set_extent([-45, 55, 20, 80], ccrs.PlateCarree())
|
||||
ax2.quiver(x, y, u, v, transform=vector_crs, regrid_shape=20)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,56 @@
|
|||
"""
|
||||
Displaying WMTS tiled map data on an arbitrary projection
|
||||
---------------------------------------------------------
|
||||
|
||||
This example displays imagery from a web map tile service on two different
|
||||
projections, one of which is not provided by the service.
|
||||
|
||||
This result can also be interactively panned and zoomed.
|
||||
|
||||
The example WMTS layer is a single composite of data sampled over nine days
|
||||
in April 2012 and thirteen days in October 2012 showing the Earth at night.
|
||||
It does not vary over time.
|
||||
|
||||
The imagery was collected by the Suomi National Polar-orbiting Partnership
|
||||
(Suomi NPP) weather satellite operated by the United States National Oceanic
|
||||
and Atmospheric Administration (NOAA).
|
||||
|
||||
"""
|
||||
__tags__ = ['Web services']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def plot_city_lights():
|
||||
# Define resource for the NASA night-time illumination data.
|
||||
base_uri = 'https://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'
|
||||
layer_name = 'VIIRS_CityLights_2012'
|
||||
|
||||
# Create a Cartopy crs for plain and rotated lat-lon projections.
|
||||
plain_crs = ccrs.PlateCarree()
|
||||
rotated_crs = ccrs.RotatedPole(pole_longitude=120.0, pole_latitude=45.0)
|
||||
|
||||
fig = plt.figure()
|
||||
|
||||
# Plot WMTS data in a specific region, over a plain lat-lon map.
|
||||
ax = fig.add_subplot(1, 2, 1, projection=plain_crs)
|
||||
ax.set_extent([-6, 3, 48, 58], crs=ccrs.PlateCarree())
|
||||
ax.coastlines(resolution='50m', color='yellow')
|
||||
ax.gridlines(color='lightgrey', linestyle='-')
|
||||
# Add WMTS imaging.
|
||||
ax.add_wmts(base_uri, layer_name=layer_name)
|
||||
|
||||
# Plot WMTS data on a rotated map, over the same nominal region.
|
||||
ax = fig.add_subplot(1, 2, 2, projection=rotated_crs)
|
||||
ax.set_extent([-6, 3, 48, 58], crs=ccrs.PlateCarree())
|
||||
ax.coastlines(resolution='50m', color='yellow')
|
||||
ax.gridlines(color='lightgrey', linestyle='-')
|
||||
# Add WMTS imaging.
|
||||
ax.add_wmts(base_uri, layer_name=layer_name)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
plot_city_lights()
|
|
@ -0,0 +1,45 @@
|
|||
"""
|
||||
Rotated pole boxes
|
||||
------------------
|
||||
|
||||
A demonstration of the way a box is warped when it is defined
|
||||
in a rotated pole coordinate system.
|
||||
|
||||
Try changing the ``box_top`` to ``44``, ``46`` and ``75`` to see the effect
|
||||
that including the pole in the polygon has.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def main():
|
||||
rotated_pole = ccrs.RotatedPole(pole_latitude=45, pole_longitude=180)
|
||||
|
||||
box_top = 45
|
||||
x, y = [-44, -44, 45, 45, -44], [-45, box_top, box_top, -45, -45]
|
||||
|
||||
fig = plt.figure()
|
||||
|
||||
ax = fig.add_subplot(2, 1, 1, projection=rotated_pole)
|
||||
ax.stock_img()
|
||||
ax.coastlines()
|
||||
ax.plot(x, y, marker='o', transform=rotated_pole)
|
||||
ax.fill(x, y, color='coral', transform=rotated_pole, alpha=0.4)
|
||||
ax.gridlines()
|
||||
|
||||
ax = fig.add_subplot(2, 1, 2, projection=ccrs.PlateCarree())
|
||||
ax.stock_img()
|
||||
ax.coastlines()
|
||||
ax.plot(x, y, marker='o', transform=rotated_pole)
|
||||
ax.fill(x, y, transform=rotated_pole, color='coral', alpha=0.4)
|
||||
ax.gridlines()
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,36 @@
|
|||
"""
|
||||
Modifying the boundary/neatline of a map in cartopy
|
||||
---------------------------------------------------
|
||||
|
||||
This example demonstrates how to modify the boundary/neatline
|
||||
of an axes. We construct a star with coordinates in a Plate Carree
|
||||
coordinate system, and use the star as the outline of the map.
|
||||
|
||||
Notice how changing the projection of the map represents a *projected*
|
||||
star shaped boundary.
|
||||
|
||||
"""
|
||||
import matplotlib.path as mpath
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure()
|
||||
ax = fig.add_axes([0, 0, 1, 1], projection=ccrs.PlateCarree())
|
||||
ax.coastlines()
|
||||
|
||||
# Construct a star in longitudes and latitudes.
|
||||
star_path = mpath.Path.unit_regular_star(5, 0.5)
|
||||
star_path = mpath.Path(star_path.vertices.copy() * 80,
|
||||
star_path.codes.copy())
|
||||
|
||||
# Use the star as the boundary.
|
||||
ax.set_boundary(star_path, transform=ccrs.PlateCarree())
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,30 @@
|
|||
"""
|
||||
Streamplot
|
||||
----------
|
||||
|
||||
Generating a vector-based streamplot.
|
||||
|
||||
"""
|
||||
__tags__ = ['Vector data']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.examples.arrows import sample_data
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
ax.set_extent([-90, 75, 10, 85], crs=ccrs.PlateCarree())
|
||||
ax.coastlines()
|
||||
|
||||
x, y, u, v, vector_crs = sample_data(shape=(80, 100))
|
||||
magnitude = (u ** 2 + v ** 2) ** 0.5
|
||||
ax.streamplot(x, y, u, v, transform=vector_crs,
|
||||
linewidth=2, density=2, color=magnitude)
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
Tick Labels
|
||||
-----------
|
||||
|
||||
This example demonstrates adding tick labels to maps on rectangular
|
||||
projections using special tick formatters.
|
||||
|
||||
"""
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(8, 10))
|
||||
|
||||
# Label axes of a Plate Carree projection with a central longitude of 180:
|
||||
ax1 = fig.add_subplot(2, 1, 1,
|
||||
projection=ccrs.PlateCarree(central_longitude=180))
|
||||
ax1.set_global()
|
||||
ax1.coastlines()
|
||||
ax1.set_xticks([0, 60, 120, 180, 240, 300, 360], crs=ccrs.PlateCarree())
|
||||
ax1.set_yticks([-90, -60, -30, 0, 30, 60, 90], crs=ccrs.PlateCarree())
|
||||
lon_formatter = LongitudeFormatter(zero_direction_label=True)
|
||||
lat_formatter = LatitudeFormatter()
|
||||
ax1.xaxis.set_major_formatter(lon_formatter)
|
||||
ax1.yaxis.set_major_formatter(lat_formatter)
|
||||
|
||||
# Label axes of a Mercator projection without degree symbols in the labels
|
||||
# and formatting labels to include 1 decimal place:
|
||||
ax2 = fig.add_subplot(2, 1, 2, projection=ccrs.Mercator())
|
||||
ax2.set_global()
|
||||
ax2.coastlines()
|
||||
ax2.set_xticks([-180, -120, -60, 0, 60, 120, 180], crs=ccrs.PlateCarree())
|
||||
ax2.set_yticks([-78.5, -60, -25.5, 25.5, 60, 80], crs=ccrs.PlateCarree())
|
||||
lon_formatter = LongitudeFormatter(number_format='.1f',
|
||||
degree_symbol='',
|
||||
dateline_direction_label=True)
|
||||
lat_formatter = LatitudeFormatter(number_format='.1f',
|
||||
degree_symbol='')
|
||||
ax2.xaxis.set_major_formatter(lon_formatter)
|
||||
ax2.yaxis.set_major_formatter(lat_formatter)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,32 @@
|
|||
"""
|
||||
Tissot's Indicatrix
|
||||
-------------------
|
||||
|
||||
Visualize Tissot's indicatrix on a map.
|
||||
|
||||
"""
|
||||
__tags__ = ['Lines and polygons']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
|
||||
# make the map global rather than have it zoom in to
|
||||
# the extents of any plotted data
|
||||
ax.set_global()
|
||||
|
||||
ax.stock_img()
|
||||
ax.coastlines()
|
||||
|
||||
ax.tissot(facecolor='orange', alpha=0.4)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,71 @@
|
|||
"""
|
||||
Tube Stations
|
||||
-------------
|
||||
|
||||
Produces a map showing London Underground station locations with high
|
||||
resolution background imagery provided by OpenStreetMap.
|
||||
|
||||
"""
|
||||
from matplotlib.path import Path
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
from cartopy.io.img_tiles import OSM
|
||||
|
||||
|
||||
def tube_locations():
|
||||
"""
|
||||
Return an (n, 2) array of selected London Tube locations in Ordnance
|
||||
Survey GB coordinates.
|
||||
|
||||
Source: https://www.doogal.co.uk/london_stations.php
|
||||
|
||||
"""
|
||||
return np.array([[531738., 180890.], [532379., 179734.],
|
||||
[531096., 181642.], [530234., 180492.],
|
||||
[531688., 181150.], [530242., 180982.],
|
||||
[531940., 179144.], [530406., 180380.],
|
||||
[529012., 180283.], [530553., 181488.],
|
||||
[531165., 179489.], [529987., 180812.],
|
||||
[532347., 180962.], [529102., 181227.],
|
||||
[529612., 180625.], [531566., 180025.],
|
||||
[529629., 179503.], [532105., 181261.],
|
||||
[530995., 180810.], [529774., 181354.],
|
||||
[528941., 179131.], [531050., 179933.],
|
||||
[530240., 179718.]])
|
||||
|
||||
|
||||
def main():
|
||||
imagery = OSM()
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=imagery.crs)
|
||||
ax.set_extent([-0.14, -0.1, 51.495, 51.515], ccrs.PlateCarree())
|
||||
|
||||
# Construct concentric circles and a rectangle,
|
||||
# suitable for a London Underground logo.
|
||||
theta = np.linspace(0, 2 * np.pi, 100)
|
||||
circle_verts = np.vstack([np.sin(theta), np.cos(theta)]).T
|
||||
concentric_circle = Path.make_compound_path(Path(circle_verts[::-1]),
|
||||
Path(circle_verts * 0.6))
|
||||
|
||||
rectangle = Path([[-1.1, -0.2], [1, -0.2], [1, 0.3], [-1.1, 0.3]])
|
||||
|
||||
# Add the imagery to the map.
|
||||
ax.add_image(imagery, 14)
|
||||
|
||||
# Plot the locations twice, first with the red concentric circles,
|
||||
# then with the blue rectangle.
|
||||
xs, ys = tube_locations().T
|
||||
ax.plot(xs, ys, transform=ccrs.OSGB(approx=False),
|
||||
marker=concentric_circle, color='red', markersize=9, linestyle='')
|
||||
ax.plot(xs, ys, transform=ccrs.OSGB(approx=False),
|
||||
marker=rectangle, color='blue', markersize=11, linestyle='')
|
||||
|
||||
ax.set_title('London underground locations')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,162 @@
|
|||
"""
|
||||
UN Flag
|
||||
-------
|
||||
|
||||
A demonstration of the power of Matplotlib combined with cartopy's Azimuthal
|
||||
Equidistant projection to reproduce the UN flag.
|
||||
|
||||
"""
|
||||
import cartopy.crs as ccrs
|
||||
import cartopy.feature as cfeature
|
||||
import matplotlib.pyplot as plt
|
||||
from matplotlib.patches import PathPatch
|
||||
import matplotlib.path
|
||||
import matplotlib.ticker
|
||||
from matplotlib.transforms import BboxTransform, Bbox
|
||||
import numpy as np
|
||||
|
||||
|
||||
# When drawing the flag, we can either use white filled land, or be a little
|
||||
# more fancy and use the Natural Earth shaded relief imagery.
|
||||
filled_land = True
|
||||
|
||||
|
||||
def olive_path():
|
||||
"""
|
||||
Return a Matplotlib path representing a single olive branch from the
|
||||
UN Flag. The path coordinates were extracted from the SVG at
|
||||
https://commons.wikimedia.org/wiki/File:Flag_of_the_United_Nations.svg.
|
||||
|
||||
"""
|
||||
olives_verts = np.array(
|
||||
[[0, 2, 6, 9, 30, 55, 79, 94, 104, 117, 134, 157, 177,
|
||||
188, 199, 207, 191, 167, 149, 129, 109, 87, 53, 22, 0, 663,
|
||||
245, 223, 187, 158, 154, 150, 146, 149, 154, 158, 181, 184, 197,
|
||||
181, 167, 153, 142, 129, 116, 119, 123, 127, 151, 178, 203, 220,
|
||||
237, 245, 663, 280, 267, 232, 209, 205, 201, 196, 196, 201, 207,
|
||||
211, 224, 219, 230, 220, 212, 207, 198, 195, 176, 197, 220, 239,
|
||||
259, 277, 280, 663, 295, 293, 264, 250, 247, 244, 240, 240, 243,
|
||||
244, 249, 251, 250, 248, 242, 245, 233, 236, 230, 228, 224, 222,
|
||||
234, 249, 262, 275, 285, 291, 295, 296, 295, 663, 294, 293, 292,
|
||||
289, 294, 277, 271, 269, 268, 265, 264, 264, 264, 272, 260, 248,
|
||||
245, 243, 242, 240, 243, 245, 247, 252, 256, 259, 258, 257, 258,
|
||||
267, 285, 290, 294, 297, 294, 663, 285, 285, 277, 266, 265, 265,
|
||||
265, 277, 266, 268, 269, 269, 269, 268, 268, 267, 267, 264, 248,
|
||||
235, 232, 229, 228, 229, 232, 236, 246, 266, 269, 271, 285, 285,
|
||||
663, 252, 245, 238, 230, 246, 245, 250, 252, 255, 256, 256, 253,
|
||||
249, 242, 231, 214, 208, 208, 227, 244, 252, 258, 262, 262, 261,
|
||||
262, 264, 265, 252, 663, 185, 197, 206, 215, 223, 233, 242, 237,
|
||||
237, 230, 220, 202, 185, 663],
|
||||
[8, 5, 3, 0, 22, 46, 46, 46, 35, 27, 16, 10, 18,
|
||||
22, 28, 38, 27, 26, 33, 41, 52, 52, 52, 30, 8, 595,
|
||||
77, 52, 61, 54, 53, 52, 53, 55, 55, 57, 65, 90, 106,
|
||||
96, 81, 68, 58, 54, 51, 50, 51, 50, 44, 34, 43, 48,
|
||||
61, 77, 595, 135, 104, 102, 83, 79, 76, 74, 74, 79, 84,
|
||||
90, 109, 135, 156, 145, 133, 121, 100, 77, 62, 69, 67, 80,
|
||||
92, 113, 135, 595, 198, 171, 156, 134, 129, 124, 120, 123, 126,
|
||||
129, 138, 149, 161, 175, 188, 202, 177, 144, 116, 110, 105, 99,
|
||||
108, 116, 126, 136, 147, 162, 173, 186, 198, 595, 249, 255, 261,
|
||||
267, 241, 222, 200, 192, 183, 175, 175, 175, 175, 199, 221, 240,
|
||||
245, 250, 256, 245, 233, 222, 207, 194, 180, 172, 162, 153, 154,
|
||||
171, 184, 202, 216, 233, 249, 595, 276, 296, 312, 327, 327, 327,
|
||||
327, 308, 284, 262, 240, 240, 239, 239, 242, 244, 247, 265, 277,
|
||||
290, 293, 296, 300, 291, 282, 274, 253, 236, 213, 235, 252, 276,
|
||||
595, 342, 349, 355, 357, 346, 326, 309, 303, 297, 291, 290, 297,
|
||||
304, 310, 321, 327, 343, 321, 305, 292, 286, 278, 270, 276, 281,
|
||||
287, 306, 328, 342, 595, 379, 369, 355, 343, 333, 326, 318, 328,
|
||||
340, 349, 366, 373, 379, 595]]).T
|
||||
olives_codes = np.array([1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 2, 4,
|
||||
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 4,
|
||||
4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 4, 4, 79, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
|
||||
4, 4, 79], dtype=np.uint8)
|
||||
|
||||
return matplotlib.path.Path(olives_verts, olives_codes)
|
||||
|
||||
|
||||
def main():
|
||||
blue = '#4b92db'
|
||||
|
||||
# We're drawing a flag with a 3:5 aspect ratio.
|
||||
fig = plt.figure(figsize=[7.5, 4.5], facecolor=blue)
|
||||
# Put a blue background on the figure.
|
||||
blue_background = PathPatch(matplotlib.path.Path.unit_rectangle(),
|
||||
transform=fig.transFigure, color=blue,
|
||||
zorder=-1)
|
||||
fig.patches.append(blue_background)
|
||||
|
||||
# Set up the Azimuthal Equidistant and Plate Carree projections
|
||||
# for later use.
|
||||
az_eq = ccrs.AzimuthalEquidistant(central_latitude=90)
|
||||
pc = ccrs.PlateCarree()
|
||||
|
||||
# Pick a suitable location for the map (which is in an Azimuthal
|
||||
# Equidistant projection).
|
||||
ax = fig.add_axes([0.25, 0.24, 0.5, 0.54], projection=az_eq)
|
||||
|
||||
# The background patch is not needed in this example.
|
||||
ax.patch.set_facecolor('none')
|
||||
# The Axes frame produces the outer meridian line.
|
||||
for spine in ax.spines.values():
|
||||
spine.update({'edgecolor': 'white', 'linewidth': 2})
|
||||
|
||||
# We want the map to go down to -60 degrees latitude.
|
||||
ax.set_extent([-180, 180, -60, 90], ccrs.PlateCarree())
|
||||
|
||||
# Importantly, we want the axes to be circular at the -60 latitude
|
||||
# rather than cartopy's default behaviour of zooming in and becoming
|
||||
# square.
|
||||
_, patch_radius = az_eq.transform_point(0, -60, pc)
|
||||
circular_path = matplotlib.path.Path.circle(0, patch_radius)
|
||||
ax.set_boundary(circular_path)
|
||||
|
||||
if filled_land:
|
||||
ax.add_feature(
|
||||
cfeature.LAND, facecolor='white', edgecolor='none')
|
||||
else:
|
||||
ax.stock_img()
|
||||
|
||||
gl = ax.gridlines(crs=pc, linewidth=2, color='white', linestyle='-')
|
||||
# Meridians every 45 degrees, and 4 parallels.
|
||||
gl.xlocator = matplotlib.ticker.FixedLocator(np.arange(-180, 181, 45))
|
||||
parallels = np.arange(-30, 70, 30)
|
||||
gl.ylocator = matplotlib.ticker.FixedLocator(parallels)
|
||||
|
||||
# Now add the olive branches around the axes. We do this in normalised
|
||||
# figure coordinates
|
||||
olive_leaf = olive_path()
|
||||
|
||||
olives_bbox = Bbox.null()
|
||||
olives_bbox.update_from_path(olive_leaf)
|
||||
|
||||
# The first olive branch goes from left to right.
|
||||
olive1_axes_bbox = Bbox([[0.45, 0.15], [0.725, 0.75]])
|
||||
olive1_trans = BboxTransform(olives_bbox, olive1_axes_bbox)
|
||||
|
||||
# THe second olive branch goes from right to left (mirroring the first).
|
||||
olive2_axes_bbox = Bbox([[0.55, 0.15], [0.275, 0.75]])
|
||||
olive2_trans = BboxTransform(olives_bbox, olive2_axes_bbox)
|
||||
|
||||
olive1 = PathPatch(olive_leaf, facecolor='white', edgecolor='none',
|
||||
transform=olive1_trans + fig.transFigure)
|
||||
olive2 = PathPatch(olive_leaf, facecolor='white', edgecolor='none',
|
||||
transform=olive2_trans + fig.transFigure)
|
||||
|
||||
fig.patches.append(olive1)
|
||||
fig.patches.append(olive2)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,46 @@
|
|||
"""
|
||||
Displaying all 60 zones of the UTM projection
|
||||
---------------------------------------------
|
||||
|
||||
This example displays all 60 zones of the Universal Transverse Mercator
|
||||
projection next to each other in a figure.
|
||||
|
||||
First we create a figure with 60 subplots in one row.
|
||||
Next we set the projection of each axis in the figure to a specific UTM zone.
|
||||
Then we add coastlines, gridlines and the number of the zone.
|
||||
Finally we add a supertitle and display the figure.
|
||||
"""
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def main():
|
||||
# Create a list of integers from 1 - 60
|
||||
zones = range(1, 61)
|
||||
|
||||
# Create a figure
|
||||
fig = plt.figure(figsize=(18, 6))
|
||||
|
||||
# Loop through each zone in the list
|
||||
for zone in zones:
|
||||
|
||||
# Add GeoAxes object with specific UTM zone projection to the figure
|
||||
ax = fig.add_subplot(1, len(zones), zone,
|
||||
projection=ccrs.UTM(zone=zone,
|
||||
southern_hemisphere=True))
|
||||
|
||||
# Add coastlines, gridlines and zone number for the subplot
|
||||
ax.coastlines(resolution='110m')
|
||||
ax.gridlines()
|
||||
ax.set_title(zone)
|
||||
|
||||
# Add a supertitle for the figure
|
||||
fig.suptitle("UTM Projection - Zones")
|
||||
|
||||
# Display the figure
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,47 @@
|
|||
"""
|
||||
Filled contours
|
||||
---------------
|
||||
|
||||
An example of contourf on manufactured data.
|
||||
|
||||
"""
|
||||
__tags__ = ['Scalar data']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def sample_data(shape=(73, 145)):
|
||||
"""Return ``lons``, ``lats`` and ``data`` of some fake data."""
|
||||
nlats, nlons = shape
|
||||
lats = np.linspace(-np.pi / 2, np.pi / 2, nlats)
|
||||
lons = np.linspace(0, 2 * np.pi, nlons)
|
||||
lons, lats = np.meshgrid(lons, lats)
|
||||
wave = 0.75 * (np.sin(2 * lats) ** 8) * np.cos(4 * lons)
|
||||
mean = 0.5 * np.cos(2 * lats) * ((np.sin(2 * lats)) ** 2 + 2)
|
||||
|
||||
lats = np.rad2deg(lats)
|
||||
lons = np.rad2deg(lons)
|
||||
data = wave + mean
|
||||
|
||||
return lons, lats, data
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.Mollweide())
|
||||
|
||||
lons, lats, data = sample_data()
|
||||
|
||||
ax.contourf(lons, lats, data,
|
||||
transform=ccrs.PlateCarree(),
|
||||
cmap='nipy_spectral')
|
||||
ax.coastlines()
|
||||
ax.set_global()
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,27 @@
|
|||
"""
|
||||
Interactive WMS (Web Map Service)
|
||||
---------------------------------
|
||||
|
||||
This example demonstrates the interactive pan and zoom capability
|
||||
supported by an OGC web services Web Map Service (WMS) aware axes.
|
||||
|
||||
"""
|
||||
__tags__ = ['Web services']
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def main():
|
||||
fig = plt.figure(figsize=(10, 5))
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.InterruptedGoodeHomolosine())
|
||||
ax.coastlines()
|
||||
|
||||
ax.add_wms(wms='http://vmap0.tiles.osgeo.org/wms/vmap0',
|
||||
layers=['basic'])
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,37 @@
|
|||
"""
|
||||
Interactive WMTS (Web Map Tile Service)
|
||||
---------------------------------------
|
||||
|
||||
This example demonstrates the interactive pan and zoom capability
|
||||
supported by an OGC web services Web Map Tile Service (WMTS) aware axes.
|
||||
|
||||
The example WMTS layer is a single composite of data sampled over nine days
|
||||
in April 2012 and thirteen days in October 2012 showing the Earth at night.
|
||||
It does not vary over time.
|
||||
|
||||
The imagery was collected by the Suomi National Polar-orbiting Partnership
|
||||
(Suomi NPP) weather satellite operated by the United States National Oceanic
|
||||
and Atmospheric Administration (NOAA).
|
||||
|
||||
"""
|
||||
__tags__ = ['Web services']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def main():
|
||||
url = 'https://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'
|
||||
layer = 'VIIRS_CityLights_2012'
|
||||
|
||||
fig = plt.figure()
|
||||
ax = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree())
|
||||
ax.add_wmts(url, layer)
|
||||
ax.set_extent([-15, 25, 35, 60], crs=ccrs.PlateCarree())
|
||||
|
||||
ax.set_title('Suomi NPP Earth at night April/October 2012')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,57 @@
|
|||
"""
|
||||
Web Map Tile Service time dimension demonstration
|
||||
-------------------------------------------------
|
||||
|
||||
This example further demonstrates WMTS support within cartopy. Optional
|
||||
keyword arguments can be supplied to the OGC WMTS 'gettile' method. This
|
||||
allows for the specification of the 'time' dimension for a WMTS layer
|
||||
which supports it.
|
||||
|
||||
The example shows satellite imagery retrieved from NASA's Global Imagery
|
||||
Browse Services for 5th Feb 2016. A true color MODIS image is shown on
|
||||
the left, with the MODIS false color 'snow RGB' shown on the right.
|
||||
|
||||
"""
|
||||
__tags__ = ['Web services']
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.patheffects as PathEffects
|
||||
from owslib.wmts import WebMapTileService
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
|
||||
|
||||
def main():
|
||||
# URL of NASA GIBS
|
||||
URL = 'https://gibs.earthdata.nasa.gov/wmts/epsg4326/best/wmts.cgi'
|
||||
wmts = WebMapTileService(URL)
|
||||
|
||||
# Layers for MODIS true color and snow RGB
|
||||
layers = ['MODIS_Terra_SurfaceReflectance_Bands143',
|
||||
'MODIS_Terra_CorrectedReflectance_Bands367']
|
||||
|
||||
date_str = '2016-02-05'
|
||||
|
||||
# Plot setup
|
||||
plot_CRS = ccrs.Mercator()
|
||||
geodetic_CRS = ccrs.Geodetic()
|
||||
x0, y0 = plot_CRS.transform_point(4.6, 43.1, geodetic_CRS)
|
||||
x1, y1 = plot_CRS.transform_point(11.0, 47.4, geodetic_CRS)
|
||||
ysize = 8
|
||||
xsize = 2 * ysize * (x1 - x0) / (y1 - y0)
|
||||
fig = plt.figure(figsize=(xsize, ysize), dpi=100)
|
||||
|
||||
for layer, offset in zip(layers, [0, 0.5]):
|
||||
ax = fig.add_axes([offset, 0, 0.5, 1], projection=plot_CRS)
|
||||
ax.set_xlim((x0, x1))
|
||||
ax.set_ylim((y0, y1))
|
||||
ax.add_wmts(wmts, layer, wmts_kwargs={'time': date_str})
|
||||
txt = ax.text(4.7, 43.2, wmts[layer].title, fontsize=18, color='wheat',
|
||||
transform=geodetic_CRS)
|
||||
txt.set_path_effects([PathEffects.withStroke(linewidth=5,
|
||||
foreground='black')])
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,157 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Web Map Tile Service time dimension demonstration\n",
|
||||
"-------------------------------------------------\n",
|
||||
"\n",
|
||||
"This example further demonstrates WMTS support within cartopy. Optional\n",
|
||||
"keyword arguments can be supplied to the OGC WMTS 'gettile' method. This\n",
|
||||
"allows for the specification of the 'time' dimension for a WMTS layer\n",
|
||||
"which supports it.\n",
|
||||
"\n",
|
||||
"The example shows satellite imagery retrieved from NASA's Global Imagery\n",
|
||||
"Browse Services for 5th Feb 2016. A true color MODIS image is shown on\n",
|
||||
"the left, with the MODIS false color 'snow RGB' shown on the right."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"import matplotlib.patheffects as PathEffects\n",
|
||||
"from owslib.wmts import WebMapTileService\n",
|
||||
"\n",
|
||||
"import cartopy.crs as ccrs\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"def main():\n",
|
||||
" # URL of NASA GIBS\n",
|
||||
" URL = 'http://gibs.earthdata.nasa.gov/wmts/epsg4326/best/wmts.cgi'\n",
|
||||
" wmts = WebMapTileService(URL)\n",
|
||||
"\n",
|
||||
" # Layers for MODIS true color and snow RGB\n",
|
||||
" layers = ['MODIS_Terra_SurfaceReflectance_Bands143',\n",
|
||||
" 'MODIS_Terra_CorrectedReflectance_Bands367']\n",
|
||||
"\n",
|
||||
" date_str = '2016-02-05'\n",
|
||||
"\n",
|
||||
" # Plot setup\n",
|
||||
" plot_CRS = ccrs.Mercator()\n",
|
||||
" geodetic_CRS = ccrs.Geodetic()\n",
|
||||
" x0, y0 = plot_CRS.transform_point(4.6, 43.1, geodetic_CRS)\n",
|
||||
" x1, y1 = plot_CRS.transform_point(11.0, 47.4, geodetic_CRS)\n",
|
||||
" ysize = 8\n",
|
||||
" xsize = 2 * ysize * (x1 - x0) / (y1 - y0)\n",
|
||||
" fig = plt.figure(figsize=(xsize, ysize), dpi=100)\n",
|
||||
"\n",
|
||||
" for layer, offset in zip(layers, [0, 0.5]):\n",
|
||||
" ax = fig.add_axes([offset, 0, 0.5, 1], projection=plot_CRS)\n",
|
||||
" ax.set_xlim((x0, x1))\n",
|
||||
" ax.set_ylim((y0, y1))\n",
|
||||
" ax.add_wmts(wmts, layer, wmts_kwargs={'time': date_str})\n",
|
||||
" txt = ax.text(4.7, 43.2, wmts[layer].title, fontsize=18, color='wheat',\n",
|
||||
" transform=geodetic_CRS)\n",
|
||||
" txt.set_path_effects([PathEffects.withStroke(linewidth=5,\n",
|
||||
" foreground='black')])\n",
|
||||
" plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "SSLError",
|
||||
"evalue": "HTTPSConnectionPool(host='gibs.earthdata.nasa.gov', port=443): Max retries exceeded with url: /wmts/epsg4326/best/wmts.cgi?service=WMTS&request=GetCapabilities&version=1.0.0 (Caused by SSLError(SSLError(\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\")))",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/contrib/pyopenssl.py\u001b[0m in \u001b[0;36mwrap_socket\u001b[0;34m(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname)\u001b[0m\n\u001b[1;32m 484\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 485\u001b[0;31m \u001b[0mcnx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdo_handshake\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 486\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mOpenSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mWantReadError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/OpenSSL/SSL.py\u001b[0m in \u001b[0;36mdo_handshake\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1914\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_lib\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSL_do_handshake\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ssl\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1915\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_raise_ssl_error\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ssl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1916\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/OpenSSL/SSL.py\u001b[0m in \u001b[0;36m_raise_ssl_error\u001b[0;34m(self, ssl, result)\u001b[0m\n\u001b[1;32m 1646\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1647\u001b[0;31m \u001b[0m_raise_current_error\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1648\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/OpenSSL/_util.py\u001b[0m in \u001b[0;36mexception_from_error_queue\u001b[0;34m(exception_type)\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mexception_type\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0merrors\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mError\u001b[0m: [('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')]",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36murlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[1;32m 664\u001b[0m \u001b[0;31m# Make the request on the httplib connection object.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 665\u001b[0;31m httplib_response = self._make_request(\n\u001b[0m\u001b[1;32m 666\u001b[0m \u001b[0mconn\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36m_make_request\u001b[0;34m(self, conn, method, url, timeout, chunked, **httplib_request_kw)\u001b[0m\n\u001b[1;32m 375\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 376\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_validate_conn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconn\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 377\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mSocketTimeout\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mBaseSSLError\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36m_validate_conn\u001b[0;34m(self, conn)\u001b[0m\n\u001b[1;32m 995\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mgetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"sock\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# AppEngine might not have `.sock`\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 996\u001b[0;31m \u001b[0mconn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mconnect\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 997\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connection.py\u001b[0m in \u001b[0;36mconnect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 365\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 366\u001b[0;31m self.sock = ssl_wrap_socket(\n\u001b[0m\u001b[1;32m 367\u001b[0m \u001b[0msock\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mconn\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/util/ssl_.py\u001b[0m in \u001b[0;36mssl_wrap_socket\u001b[0;34m(sock, keyfile, certfile, cert_reqs, ca_certs, server_hostname, ssl_version, ciphers, ssl_context, ca_cert_dir, key_password)\u001b[0m\n\u001b[1;32m 369\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mHAS_SNI\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mserver_hostname\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 370\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mwrap_socket\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msock\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mserver_hostname\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mserver_hostname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 371\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/contrib/pyopenssl.py\u001b[0m in \u001b[0;36mwrap_socket\u001b[0;34m(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname)\u001b[0m\n\u001b[1;32m 490\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mOpenSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSL\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 491\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mssl\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSSLError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"bad handshake: %r\"\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 492\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m: (\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\",)",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mMaxRetryError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/adapters.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 438\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mchunked\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 439\u001b[0;31m resp = conn.urlopen(\n\u001b[0m\u001b[1;32m 440\u001b[0m \u001b[0mmethod\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/connectionpool.py\u001b[0m in \u001b[0;36murlopen\u001b[0;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[1;32m 718\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 719\u001b[0;31m retries = retries.increment(\n\u001b[0m\u001b[1;32m 720\u001b[0m \u001b[0mmethod\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0merror\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_pool\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_stacktrace\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexc_info\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/urllib3/util/retry.py\u001b[0m in \u001b[0;36mincrement\u001b[0;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[1;32m 435\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mnew_retry\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_exhausted\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 436\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mMaxRetryError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_pool\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0merror\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mResponseError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcause\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 437\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mMaxRetryError\u001b[0m: HTTPSConnectionPool(host='gibs.earthdata.nasa.gov', port=443): Max retries exceeded with url: /wmts/epsg4326/best/wmts.cgi?service=WMTS&request=GetCapabilities&version=1.0.0 (Caused by SSLError(SSLError(\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\")))",
|
||||
"\nDuring handling of the above exception, another exception occurred:\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m<ipython-input-3-263240bbee7e>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mmain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
|
||||
"\u001b[0;32m<ipython-input-2-fbc2f47733a3>\u001b[0m in \u001b[0;36mmain\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m# URL of NASA GIBS\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mURL\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'http://gibs.earthdata.nasa.gov/wmts/epsg4326/best/wmts.cgi'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mwmts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mWebMapTileService\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mURL\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;31m# Layers for MODIS true color and snow RGB\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/owslib/wmts.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, url, version, xml, username, password, parse_remote_metadata, vendor_kwargs, headers, auth, timeout)\u001b[0m\n\u001b[1;32m 175\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_capabilities\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mreader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreadString\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mxml\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 176\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# read from server\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 177\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_capabilities\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mreader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvendor_kwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 178\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 179\u001b[0m \u001b[0;31m# Avoid building capabilities metadata if the response is a\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/owslib/wmts.py\u001b[0m in \u001b[0;36mread\u001b[0;34m(self, service_url, vendor_kwargs)\u001b[0m\n\u001b[1;32m 827\u001b[0m \u001b[0;31m# now split it up again to use the generic openURL function...\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 828\u001b[0m \u001b[0mspliturl\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgetcaprequest\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msplit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'?'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 829\u001b[0;31m \u001b[0mu\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mopenURL\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mspliturl\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mspliturl\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmethod\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Get'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mheaders\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mauth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mauth\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 830\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0metree\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfromstring\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mu\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 831\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/owslib/util.py\u001b[0m in \u001b[0;36mopenURL\u001b[0;34m(url_base, data, method, cookies, username, password, timeout, headers, verify, cert, auth)\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[0mrkwargs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'cookies'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcookies\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 203\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 204\u001b[0;31m \u001b[0mreq\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrequests\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl_base\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mheaders\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mheaders\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mrkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 205\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 206\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mreq\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstatus_code\u001b[0m \u001b[0;32min\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;36m400\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m401\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/api.py\u001b[0m in \u001b[0;36mrequest\u001b[0;34m(method, url, **kwargs)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0;31m# cases, and look like a memory leak in others.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0msessions\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mSession\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 60\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msession\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmethod\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0murl\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0murl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 61\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36mrequest\u001b[0;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[1;32m 531\u001b[0m }\n\u001b[1;32m 532\u001b[0m \u001b[0msend_kwargs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msettings\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 533\u001b[0;31m \u001b[0mresp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprep\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0msend_kwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 534\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 535\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mresp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, **kwargs)\u001b[0m\n\u001b[1;32m 666\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 667\u001b[0m \u001b[0;31m# Resolve redirects if allowed.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 668\u001b[0;31m \u001b[0mhistory\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mresp\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mresp\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgen\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mallow_redirects\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 669\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 670\u001b[0m \u001b[0;31m# Shuffle things around if there's history.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 666\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 667\u001b[0m \u001b[0;31m# Resolve redirects if allowed.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 668\u001b[0;31m \u001b[0mhistory\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mresp\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mresp\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgen\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mallow_redirects\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 669\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 670\u001b[0m \u001b[0;31m# Shuffle things around if there's history.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36mresolve_redirects\u001b[0;34m(self, resp, req, stream, timeout, verify, cert, proxies, yield_requests, **adapter_kwargs)\u001b[0m\n\u001b[1;32m 237\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 238\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 239\u001b[0;31m resp = self.send(\n\u001b[0m\u001b[1;32m 240\u001b[0m \u001b[0mreq\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 241\u001b[0m \u001b[0mstream\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mstream\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/sessions.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, **kwargs)\u001b[0m\n\u001b[1;32m 644\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 645\u001b[0m \u001b[0;31m# Send the request\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 646\u001b[0;31m \u001b[0mr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0madapter\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 647\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 648\u001b[0m \u001b[0;31m# Total elapsed time of the request (approximately)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;32m/usr/lib/python3/dist-packages/requests/adapters.py\u001b[0m in \u001b[0;36msend\u001b[0;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[1;32m 512\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreason\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_SSLError\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 513\u001b[0m \u001b[0;31m# This branch is for urllib3 v1.22 and later.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 514\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mSSLError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrequest\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 515\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 516\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mConnectionError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrequest\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrequest\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mSSLError\u001b[0m: HTTPSConnectionPool(host='gibs.earthdata.nasa.gov', port=443): Max retries exceeded with url: /wmts/epsg4326/best/wmts.cgi?service=WMTS&request=GetCapabilities&version=1.0.0 (Caused by SSLError(SSLError(\"bad handshake: Error([('SSL routines', 'tls12_check_peer_sigalg', 'wrong signature type')])\")))"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"main()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"import fiona\n",
|
||||
"\n",
|
||||
"from matplotlib.patches import Polygon\n",
|
||||
"from shapely.geometry import shape, box\n",
|
||||
"from shapely.ops import cascaded_union\n",
|
||||
"\n",
|
||||
"## Fiona, IPython Notebook interaction\n",
|
||||
"## Live 8.5 * darkblue-b\n",
|
||||
"##"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Terminal Commands\n",
|
||||
"----------------------\n",
|
||||
"``Shell script can be executed with results stored into python variables``\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"shps = !ls /home/user/data/north_carolina/shape/*shp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Task: quickly examine the bounding areas of a directory of shapefiles\n",
|
||||
"------------------------------------------------------------------\n",
|
||||
"* use ``fiona.open()`` to read data files on disk\n",
|
||||
"* save the filename and bounding box into a python ``dict``\n",
|
||||
"\n",
|
||||
".\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"td = {}\n",
|
||||
"\n",
|
||||
"for shp in shps:\n",
|
||||
" with fiona.open( shp, 'r') as inp:\n",
|
||||
" td[ inp.name ] = inp.bounds\n",
|
||||
"\n",
|
||||
"## Fiona inp.bounds => ( lower_lng, lower_lat, upper_lng, upper_lat)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Create shapely geometry from the coords\n",
|
||||
"## shapely/geometry/geo.py\n",
|
||||
"## box(minx, miny, maxx, maxy, ccw=True)\n",
|
||||
"\n",
|
||||
"nboxes = []\n",
|
||||
"for k,v in td.items():\n",
|
||||
" nboxes.append( box( v[0], v[1], v[2], v[3]) )\n",
|
||||
"\n",
|
||||
"print( 'Found BBOXs: ',len(nboxes))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## create a single cascaded UNION too\n",
|
||||
"dst_poly = cascaded_union(nboxes)\n",
|
||||
"dst_poly.bounds"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Draw every BBOX for all files, transparently\n",
|
||||
"## use matplotlib.Polygon to draw; let autoscale calculate the area\n",
|
||||
"\n",
|
||||
"fig, ax = plt.subplots(figsize=(12,12))\n",
|
||||
"for polygon in nboxes:\n",
|
||||
" mpl_poly = Polygon(np.array(polygon.exterior), facecolor=\"y\", alpha=0.02)\n",
|
||||
" ax.add_patch(mpl_poly)\n",
|
||||
"\n",
|
||||
"## Indicate the exterior of the study area with a heavy line\n",
|
||||
"ax.add_patch( Polygon(np.array(dst_poly.exterior), fill=False, lw=4, ec=\"b\", alpha=0.9) )\n",
|
||||
"\n",
|
||||
"ax.relim()\n",
|
||||
"ax.autoscale()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
|
@ -0,0 +1,189 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from IPython.core.display import Image\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import fiona\n",
|
||||
"import mapnik\n",
|
||||
"import shapely.geometry\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"## Fiona, mapnik demo\n",
|
||||
"## Live 8.5 * darkblue-b\n",
|
||||
"##\n",
|
||||
"## based on UoLPythonGroup/data-hack-0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"BASE_FOLDER = '/home/user/data/north_carolina/shape'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c_shapefile = os.path.join(BASE_FOLDER, 'nc_state.shp')\n",
|
||||
"f = fiona.open(c_shapefile)\n",
|
||||
"shps = list(f)\n",
|
||||
"print 'f: ',type(f)\n",
|
||||
"print f.schema"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"type(f[2])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Use the Shapely geometry classes\n",
|
||||
"## instantiate a Polygon from the Fiona collection "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import shapely.geometry\n",
|
||||
"\n",
|
||||
"geo = shapely.geometry.shape(f[0]['geometry'])\n",
|
||||
"geo"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## plt.plot(*geo.xy) ## hmm not implemented"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Mapnik\n",
|
||||
"## load the Mapnik python interfaces\n",
|
||||
"## read the shapefile directly with Mapnik libs\n",
|
||||
"## use the IPython Image interface as the drawing target"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def show_mapnik(m):\n",
|
||||
" \"\"\"Returns an IPython Image of the rendered map.\"\"\"\n",
|
||||
" im = mapnik.Image(m.width, m.height)\n",
|
||||
" mapnik.render(m, im)\n",
|
||||
" return Image(data=im.tostring('png32'))\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import mapnik\n",
|
||||
"\n",
|
||||
"m = mapnik.Map(600, 600)\n",
|
||||
"\n",
|
||||
"layer = mapnik.Layer('contour')\n",
|
||||
"layer.datasource = mapnik.Shapefile(file=c_shapefile)\n",
|
||||
"\n",
|
||||
"style = mapnik.Style()\n",
|
||||
"rule = mapnik.Rule()\n",
|
||||
"\n",
|
||||
"#mapnik.Color('green'),0.4\n",
|
||||
"line_symbolizer = mapnik.LineSymbolizer()\n",
|
||||
"rule.symbols.append(line_symbolizer)\n",
|
||||
"\n",
|
||||
"m.layers.append(layer)\n",
|
||||
"style.rules.append(rule)\n",
|
||||
"m.append_style('My Style', style)\n",
|
||||
"layer.styles.append('My Style')\n",
|
||||
"\n",
|
||||
"m.layers.append(layer)\n",
|
||||
"m.zoom_all()\n",
|
||||
"show_mapnik(m)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mapnik.LineSymbolizer?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
|
@ -0,0 +1,416 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"from matplotlib.patches import Polygon\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import fiona\n",
|
||||
"from shapely.geometry import shape\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"import os\n",
|
||||
"from collections import defaultdict, OrderedDict \n",
|
||||
"\n",
|
||||
"## Fiona, ogr2ogr, pyplot demo\n",
|
||||
"## Live 8.5 * darkblue-b\n",
|
||||
"##"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"`Demo Area Analysis in IPython Notebook`\n",
|
||||
"\n",
|
||||
" * Use North Carolina sample data supplied on the OSGeo Live \n",
|
||||
" * Extract an area of interest\n",
|
||||
" * Extract a subset of a state soils record based on a bounding box (BBOX)\n",
|
||||
" * Plot geometry graphically\n",
|
||||
" * Buffer the geometry by a fixed amount; calculate the areas of intersection for soil types\n",
|
||||
" * Produce a chart of the totals with pyplot"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"infile = '/usr/local/share/data/north_carolina/shape/urbanarea.shp'\n",
|
||||
"infile_soils = '/usr/local/share/data/north_carolina/shape/soils_general.shp'\n",
|
||||
"\n",
|
||||
"out_farmville = '/tmp/farmville_shp'\n",
|
||||
"out_farmville_soils = '/tmp/farmville_soil_shp'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## -- Call the gdal/ogr tool suite to extract and subset the original statewide dataset\n",
|
||||
"## use a feature of IPython Notebook to pass python variables to a command line\n",
|
||||
"\n",
|
||||
"!rm -rf $out_farmville\n",
|
||||
"!ogr2ogr -f 'ESRI Shapefile' $out_farmville $infile -where \"NAME='Farmville'\" -dim 2 -a_srs EPSG:3358\n",
|
||||
"\n",
|
||||
"##-- advanced example -- raleigh is multiple polygons\n",
|
||||
"#!rm -rf /tmp/raleigh1_shp\n",
|
||||
"#!ogr2ogr -f 'ESRI Shapefile' /tmp/raleigh1_shp $infile -where \"NAME='Raleigh'\" -dim 2 -a_srs EPSG:3358\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## -- use the IPython notebook to get help on py module Fiona\n",
|
||||
"# fiona?\n",
|
||||
"# fiona.open?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## open and inspect a Shapefile using Fiona\n",
|
||||
"## a POLYGON record could be very long, so dont print the record w/ geometry\n",
|
||||
"with fiona.open( out_farmville ) as f:\n",
|
||||
" crs = f.crs\n",
|
||||
" print( 'CRS:',crs)\n",
|
||||
" rec = f.next()\n",
|
||||
" #print rec\n",
|
||||
" print( 'SHAPELY REC:',rec.keys() )\n",
|
||||
" print( 'SHAPEFILE FLDS: ',rec['properties'].keys() )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"with fiona.open( out_farmville,'r') as inp:\n",
|
||||
" #print dir(inp) ## whats inside this object?\n",
|
||||
" #print inp.bounds\n",
|
||||
" \n",
|
||||
" ## take the bounding box of the area of interest\n",
|
||||
" ## add 300 meters on each side for aesthetics\n",
|
||||
" ## (left, bottom, right, top)\n",
|
||||
" left_bnds = inp.bounds[0] - 300\n",
|
||||
" bottom_bnds = inp.bounds[1] - 300\n",
|
||||
" right_bnds = inp.bounds[2] + 300\n",
|
||||
" top_bnds = inp.bounds[3] + 300\n",
|
||||
"\n",
|
||||
"## echo one variable to sanity check\n",
|
||||
"#left_bnds"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## ogr2ogr ... -clipsrc [xmin ymin xmax ymax] ...\n",
|
||||
"\n",
|
||||
"!rm -rf $out_farmville_soils\n",
|
||||
"!ogr2ogr -f 'ESRI Shapefile' $out_farmville_soils $infile_soils -clipsrc $left_bnds $bottom_bnds $right_bnds $top_bnds -dim 2 -a_srs EPSG:3358\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## ----------------------------------------\n",
|
||||
"## plot_multipolygon function\n",
|
||||
"## Author: Kelsey Jordahl, Enthought\n",
|
||||
"## Scipy 2013 geospatial tutorial\n",
|
||||
"\n",
|
||||
"def plot_polygon(ax, poly, color='black'):\n",
|
||||
" a = np.asarray(poly.exterior)\n",
|
||||
" ax.add_patch(Polygon(a, facecolor=color, alpha=0.5))\n",
|
||||
" ax.plot(a[:, 0], a[:, 1], color=color)\n",
|
||||
"\n",
|
||||
"def plot_multipolygon(ax, geom, color='red'):\n",
|
||||
" \"\"\" Can safely call with either Polygon or Multipolygon geometry\n",
|
||||
" \"\"\"\n",
|
||||
" if geom.type == 'Polygon':\n",
|
||||
" plot_polygon(ax, geom, color)\n",
|
||||
" elif geom.type == 'MultiPolygon':\n",
|
||||
" for poly in geom.geoms:\n",
|
||||
" plot_polygon(ax, poly, color)\n",
|
||||
"## ----------------------------------------"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"soil_colors = ['green', 'brown', '#ceFFde']\n",
|
||||
"\n",
|
||||
"dst_geoms = OrderedDict()\n",
|
||||
"src_geoms = OrderedDict()\n",
|
||||
"\n",
|
||||
"cnt = 0\n",
|
||||
"with fiona.open( out_farmville ) as f:\n",
|
||||
" for rec in f:\n",
|
||||
" src_geoms[cnt] = shape(rec['geometry'])\n",
|
||||
" cnt += 1\n",
|
||||
"\n",
|
||||
"cnt = 0\n",
|
||||
"with fiona.open( out_farmville_soils ) as f:\n",
|
||||
" for rec in f:\n",
|
||||
" ## check the geometry type if desired\n",
|
||||
" ## intersections may result in POINT or LINESTRING\n",
|
||||
" if rec['geometry']['type'] != 'Polygon':\n",
|
||||
" continue\n",
|
||||
" gsl = rec['properties']['GSL_NAME']\n",
|
||||
" dst_geoms[gsl] = shape(rec['geometry'])\n",
|
||||
" cnt += 1\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"fig, ax = plt.subplots(figsize=(11,11))\n",
|
||||
"plt.title(\"Soil Class and Urban Area via N. Carolina Sample Dataset (CC-by-SA) OSGeo\")\n",
|
||||
"\n",
|
||||
"cnt = 0\n",
|
||||
"for key in dst_geoms :\n",
|
||||
" ## cnt mod (number of colors) is always a safe index into colors[]\n",
|
||||
" color = soil_colors[ cnt%len(soil_colors) ]\n",
|
||||
" plot_multipolygon(ax, dst_geoms[key], color=color)\n",
|
||||
" cnt += 1\n",
|
||||
" \n",
|
||||
"cnt = 0\n",
|
||||
"color = 'gray'\n",
|
||||
"with fiona.open( out_farmville ) as f:\n",
|
||||
" for rec in f:\n",
|
||||
" plot_multipolygon(ax, src_geoms[cnt], color=color)\n",
|
||||
" cnt += 1\n",
|
||||
"\n",
|
||||
"#ax.add_patch(Polygon( src_geoms[0].centroid, facecolor='black', alpha=0.5))\n",
|
||||
"\n",
|
||||
"labels = ax.get_xticklabels() \n",
|
||||
"for label in labels: \n",
|
||||
" label.set_rotation(90) \n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## show all the variables defined so far\n",
|
||||
"%whos"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## what geomtries have been created ?\n",
|
||||
"print( 'src_geoms len: ',len(src_geoms) )\n",
|
||||
"print( 'dst_geoms len: ',len(dst_geoms ) )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## buffer the urbanarea by N meters\n",
|
||||
"## save the result in the source-geometry list\n",
|
||||
"\n",
|
||||
"src_geom_buffered = src_geoms[0].buffer(166)\n",
|
||||
"src_geoms[1] = src_geom_buffered\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"fig, ax = plt.subplots(figsize=(11,11))\n",
|
||||
"\n",
|
||||
"cnt = 0\n",
|
||||
"for key in dst_geoms :\n",
|
||||
" color = soil_colors[ cnt%len(soil_colors) ]\n",
|
||||
" plot_multipolygon(ax, dst_geoms[key], color=color)\n",
|
||||
" cnt += 1\n",
|
||||
"\n",
|
||||
"plot_multipolygon(ax, src_geoms[1], color='gray')\n",
|
||||
"plot_multipolygon(ax, src_geoms[0], color='gray')\n",
|
||||
" \n",
|
||||
"labels = ax.get_xticklabels() \n",
|
||||
"for label in labels: \n",
|
||||
" label.set_rotation(90) \n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## take the intersection of each soil area against the buffered poly of interest\n",
|
||||
"## store into a third convenient list\n",
|
||||
"res_geoms = OrderedDict()\n",
|
||||
"for key in dst_geoms:\n",
|
||||
" res_geoms[key] = src_geom_buffered.intersection( dst_geoms[key] )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"fig, ax = plt.subplots(figsize=(11,11))\n",
|
||||
"\n",
|
||||
"cnt = 0\n",
|
||||
"for key in res_geoms :\n",
|
||||
" color = soil_colors[ cnt%len(soil_colors) ]\n",
|
||||
" plot_multipolygon(ax, res_geoms[key], color=color)\n",
|
||||
" cnt += 1\n",
|
||||
" \n",
|
||||
"labels = ax.get_xticklabels() \n",
|
||||
"for label in labels: \n",
|
||||
" label.set_rotation(90) \n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## define a lookup table of mock attributes of each soil type\n",
|
||||
"## (substitute a real data source here)\n",
|
||||
"\n",
|
||||
"soils_lkup = {\n",
|
||||
" 'NC038' : { 'TEXTURE_CLAY':0.53, 'TEXTURE_SILT':0.33, 'TEXTURE_SAND':0.44 },\n",
|
||||
" 'NC035' : { 'TEXTURE_CLAY':0.70, 'TEXTURE_SILT':0.33, 'TEXTURE_SAND':0.44 },\n",
|
||||
" 'NC034' : { 'TEXTURE_CLAY':0.23, 'TEXTURE_SILT':0.74, 'TEXTURE_SAND':0.44 }\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"## get a rough total area for display purposes\n",
|
||||
"sum_areas = 0.0\n",
|
||||
"for key in res_geoms:\n",
|
||||
" sum_areas += int(res_geoms[key].area)\n",
|
||||
"\n",
|
||||
"## record the area and percentage area in a convenient dictionary \n",
|
||||
"tdd = {}\n",
|
||||
"for key in res_geoms:\n",
|
||||
" tdd[key] = soils_lkup[key]\n",
|
||||
" tdd[key]['area'] = int(res_geoms[key].area)\n",
|
||||
" tdd[key]['perc'] = int((res_geoms[key].area/sum_areas)*100)/100.0\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Now all attributes for visualization are available in a single dict\n",
|
||||
"## in a larger system this could be delivered for serving graphical reports\n",
|
||||
"tdd"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## What matplotlib pylot graphs are available ?\n",
|
||||
"## http://matplotlib.org/api/pyplot_api.html\n",
|
||||
"##\n",
|
||||
"## Use IPython built-in help to discover pie chart attributes\n",
|
||||
"\n",
|
||||
"plt.pie?\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# The slices will be ordered and plotted counter-clockwise.\n",
|
||||
"labels = [ 'NC034', 'NC035', 'NC038' ]\n",
|
||||
"sizes = [ tdd['NC034']['perc']*100, tdd['NC035']['perc']*100, tdd['NC038']['perc']*100 ]\n",
|
||||
"pie_colors = ['lightcoral', '#eeefee', 'yellowgreen']\n",
|
||||
"# only \"explode\" the 2nd slice (i.e. 'NC035')\n",
|
||||
"explode = (0, 0.1, 0)\n",
|
||||
"\n",
|
||||
"plt.pie(sizes, explode=explode, labels=labels, colors=pie_colors,\n",
|
||||
" autopct='%1.1f%%', shadow=True, startangle=90)\n",
|
||||
"# Set aspect ratio to be equal so that pie is drawn as a circle.\n",
|
||||
"plt.axis('equal')\n",
|
||||
"\n",
|
||||
"plt.show()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
|
@ -0,0 +1,205 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"np.__version__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"pd.__version__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import geopandas as gpd\n",
|
||||
"gpd.__version__"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from __future__ import absolute_import\n",
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"from geopandas import read_file, GeoDataFrame\n",
|
||||
"from geopandas.datasets import get_path"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dsets = ['naturalearth_lowres','naturalearth_cities','nybb']\n",
|
||||
"\n",
|
||||
"res0 = read_file(get_path( dsets[0] ))\n",
|
||||
"res1 = read_file(get_path( dsets[1] ))\n",
|
||||
"res2 = read_file(get_path( dsets[2] ))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res0.plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res1[ :8]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res2[3:5].plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"fname = '/usr/share/qgis/resources/data/world_map.gpkg'\n",
|
||||
"w_gpd = gpd.read_file(fname)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"w_gpd.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%whos"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"gpd.read_postgis?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import psycopg2\n",
|
||||
"\n",
|
||||
"conn = psycopg2.connect('dbname=osm_local')\n",
|
||||
"osm_gpd = gpd.read_postgis(\n",
|
||||
" 'select * from planet_osm_polygon', \n",
|
||||
" conn, geom_col='way' )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"osm_gpd[0:3].plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"osm_bldgs_gpd = gpd.read_postgis(\n",
|
||||
" 'select * from planet_osm_polygon where building is not null', \n",
|
||||
" conn, geom_col='way' )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"osm_bldgs_gpd[:12].plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from fiona.crs import from_epsg\n",
|
||||
"from_epsg?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,153 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from __future__ import absolute_import\n",
|
||||
"\n",
|
||||
"import json\n",
|
||||
"import os\n",
|
||||
"import tempfile\n",
|
||||
"import shutil\n",
|
||||
"\n",
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"from shapely.geometry import Point, Polygon\n",
|
||||
"import fiona\n",
|
||||
"\n",
|
||||
"import geopandas\n",
|
||||
"from geopandas import GeoDataFrame, read_file, GeoSeries\n",
|
||||
"#from geopandas.geodataframe import points_from_xy\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# NEW GeoDataFrame via read_file()\n",
|
||||
"\n",
|
||||
"tempdir = tempfile.mkdtemp()\n",
|
||||
"\n",
|
||||
"nybb_filename = geopandas.datasets.get_path('nybb')\n",
|
||||
"df_nybb = read_file(nybb_filename)\n",
|
||||
"df_nybb\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# NEW GeoDataFrame via list\n",
|
||||
"\n",
|
||||
"nybb_filename = geopandas.datasets.get_path('nybb')\n",
|
||||
"with fiona.open(nybb_filename) as f:\n",
|
||||
" features = list(f)\n",
|
||||
" crs = f.crs\n",
|
||||
"\n",
|
||||
"df = GeoDataFrame.from_features(features, crs=crs)\n",
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# NEW GeoDataFrame via dict\n",
|
||||
"\n",
|
||||
"crs = {'init': 'epsg:4326'}\n",
|
||||
"N = 10\n",
|
||||
"\n",
|
||||
"df2_synthetic = GeoDataFrame([\n",
|
||||
" {'geometry': Point(x, y), 'value1': x + y, 'value2': x * y}\n",
|
||||
" for x, y in zip(range(N), range(N))], crs=crs)\n",
|
||||
"df2_synthetic\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df2_synthetic.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df2_synthetic.to_json()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## Filter to subset\n",
|
||||
"## [ EXPR ] returns a Dataframe of Boolean\n",
|
||||
"## df[ EXPR ] returns the subset as a new Dataframe\n",
|
||||
"\n",
|
||||
"df_nybb_bs= df_nybb[ df_nybb['BoroName'].str.contains('B') ]\n",
|
||||
"df_nybb_bs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"## show all defined variables\n",
|
||||
"%whos"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import psycopg2\n",
|
||||
"import geopandas as gpd\n",
|
||||
"\n",
|
||||
"%matplotlib inline"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"conn = psycopg2.connect( \"dbname=osm_local\")\n",
|
||||
"curs = conn.cursor()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"tSQL = '''select osm_id, \n",
|
||||
" ST_IsSimple(way) as simple,\n",
|
||||
" ST_NPoints(way) as pts_cnt,\n",
|
||||
" (building is not Null) as bldg, \n",
|
||||
" way as geom from planet_osm_polygon\n",
|
||||
" WHERE ST_NPoints(way) > 7\n",
|
||||
"'''\n",
|
||||
"\n",
|
||||
"res_df = gpd.read_postgis( tSQL, conn)\n",
|
||||
"#print( res_df.shape[0],\" rows of \",res_df.shape[1],\" columns\")\n",
|
||||
"#res_df.dtypes\n",
|
||||
"res_df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# make a subset dataframe via a filter expression\n",
|
||||
"# boolean column 'bldg' only keep True entries\n",
|
||||
"res_df_A = res_df[ res_df['bldg'] ]\n",
|
||||
"\n",
|
||||
"# example test -- exactly 24 poly points\n",
|
||||
"res_df_A33 = res_df_A[ res_df_A['pts_cnt'] == 24]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res_df_A33.plot( figsize=(12,20) )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res_df_A.plot( column='pts_cnt', cmap=None, figsize=(12,20) )"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,442 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# MapScript Quick Start\n",
|
||||
"\n",
|
||||
"Welcome to the Python MapScript quick start guide. \n",
|
||||
"MapScript provides a programming interface to MapServer, and this notebook\n",
|
||||
"provides an overview of its key functionality. \n",
|
||||
"\n",
|
||||
"## Mapfiles\n",
|
||||
"The simplest way to use MapScript is to work with an existing [Mapfile](https://mapserver.org/mapfile/). \n",
|
||||
"A new ```mapObj``` can be created by passing the path to a Mapfile. We will \n",
|
||||
"be working with the Itasca demo map that is also used in the \n",
|
||||
"[MapServer Demo](http://localhost/mapserver_demos/itasca/) on OSGeoLive. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import sys\n",
|
||||
"#sys.path.append(\"/rofs/usr/lib/python2.7/dist-packages\") # temporary hack for OSGeoLive\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"import mapscript\n",
|
||||
"from IPython.display import Image\n",
|
||||
"\n",
|
||||
"#demo_fld = os.getenv(\"MAPSERVER_DEMO\")\n",
|
||||
"#mapfile = os.path.join(demo_fld, \"itasca.map\")\n",
|
||||
"\n",
|
||||
"#osgeolive 13 \n",
|
||||
"fname = '/usr/local/share/mapserver/demos/itasca/itasca.map'\n",
|
||||
"map = mapscript.mapObj(fname)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Anything found in the Mapfile can be accessed and manipulated using MapScript. \n",
|
||||
"For example we can get the count of all the layers in the Mapfile, and loop\n",
|
||||
"through them printing out each layers name. \n",
|
||||
"\n",
|
||||
"MapScript objects are typically accessed using an index. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for idx in range(0, map.numlayers):\n",
|
||||
" lyr = map.getLayer(idx)\n",
|
||||
" print(lyr.name)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Drawing Maps\n",
|
||||
"MapScript can be used to create an image file. The draw method\n",
|
||||
"returns an imageObj which can be saved to a filename on disk. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import tempfile\n",
|
||||
"# before creating images let's set the working directory to the temp folder\n",
|
||||
"os.chdir(tempfile.gettempdir()) \n",
|
||||
"\n",
|
||||
"output_file = \"map.png\"\n",
|
||||
"image = map.draw()\n",
|
||||
"image.save(output_file)\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The map image above doesn't contain all the layers in the Mapfile. \n",
|
||||
"This can be because they are set to hidden by default using ```LAYER STATUS OFF```.\n",
|
||||
"\n",
|
||||
"To turn on these layers and create a more interesting map, we \n",
|
||||
"can loop through the layers again and set their ```STATUS``` to ```ON```. \n",
|
||||
"We can then use the ```isVisible``` method to check if the layer will\n",
|
||||
"be drawn onto the map. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for idx in range(0, map.numlayers):\n",
|
||||
" lyr = map.getLayer(idx)\n",
|
||||
" lyr.status = mapscript.MS_ON\n",
|
||||
" print(lyr.name, lyr.isVisible())"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You may notice that the *ctybdpy2* layer is still not visible even though\n",
|
||||
"we set its ```STATUS``` to ```ON```. This is due to the ```REQUIRES``` keyword in its layer \n",
|
||||
"definition that hides the layer if the *drgs* layer is displayed. \n",
|
||||
"The *ctyrdln3* and *ctyrdln3_anno* layers are both hidden because of the ```MAXSCALE 300000```\n",
|
||||
"layer setting. \n",
|
||||
"\n",
|
||||
"Now we can now draw the map again with the newly visible layers. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"output_file = \"map_full.png\"\n",
|
||||
"image = map.draw()\n",
|
||||
"image.save(output_file)\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Other types of images can also be created from the ```mapObj```. These\n",
|
||||
"use the same process of creating an ```imageObj``` and saving it to disk. \n",
|
||||
"\n",
|
||||
"For example to create a legend image:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"output_file = \"map_legend.png\"\n",
|
||||
"legend_img = map.drawLegend()\n",
|
||||
"legend_img.save(output_file)\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Querying Maps\n",
|
||||
"As well as drawing maps using MapScript we can also query the data\n",
|
||||
"referenced by the layers. In this example we will be finding the\n",
|
||||
"layer to query using its name, and then querying the ```NAME``` field to find\n",
|
||||
"the name of an airport. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"qry_layer = map.getLayerByName('airports')\n",
|
||||
"qry_layer.queryByAttributes(qry_layer.map, \"NAME\", \"Bowstring Municipal Airport\", \n",
|
||||
" mapscript.MS_SINGLE)\n",
|
||||
"\n",
|
||||
"results = qry_layer.getResults()\n",
|
||||
"assert results.numresults == 1 # as we did a single query (using MS_SINGLE) there should be only one result\n",
|
||||
"result = results.getResult(0)\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Query results are stored as ```resultCacheObj```. These contain a reference to the\n",
|
||||
"result feature - a ```shapeObj```. The ```shapeObj``` can access both the geometry and \n",
|
||||
"attributes of a feature. \n",
|
||||
"\n",
|
||||
"Let's get the ```shapeObj``` from the ```resultCacheObj``` and \n",
|
||||
"loop through the shapes attributes to store them in a list. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"result_shp = qry_layer.getShape(result)\n",
|
||||
"\n",
|
||||
"values = []\n",
|
||||
"for idx in range(0, result_shp.numvalues):\n",
|
||||
" values.append(result_shp.getValue(idx))\n",
|
||||
"\n",
|
||||
"print(values)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"It would be nice to have also the property names alongside the values. Field names\n",
|
||||
"are stored in the layer in which the ```shapeObj``` belongs, and not in the ```shapeObj```\n",
|
||||
"itself. We can get a list of fields from the layer, and then use the Python ```zip``` function\n",
|
||||
"to join them with the shape values: "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"fields = []\n",
|
||||
"for idx in range(0, qry_layer.numitems):\n",
|
||||
" fields.append(qry_layer.getItem(idx))\n",
|
||||
"\n",
|
||||
"print(fields)\n",
|
||||
"props = zip(fields, values) # join fields to values\n",
|
||||
"print(props)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We can also create a map showing the query results: \n",
|
||||
"*Note the imageObj is broken for Python MapScript 7.0, but is fixed in 7.2*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# create a new 400 by 400 empty image\n",
|
||||
"query_image = mapscript.imageObj(400, 400)\n",
|
||||
"# draw the query into the image and save it to file\n",
|
||||
"qry_layer.drawQuery(qry_layer.map, query_image)\n",
|
||||
"output_file = r\"layer_query.png\"\n",
|
||||
"query_image.save(output_file)\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"If we want to zoom in on the results we can set the map extent to a buffered area\n",
|
||||
"around the results: "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"bbox = result_shp.bounds\n",
|
||||
"print(bbox.minx, bbox.miny, bbox.maxx, bbox.maxy)\n",
|
||||
"buffer = 2000\n",
|
||||
"\n",
|
||||
"map.getLayerByName('drgs').status = mapscript.MS_OFF # hide the raster layer for this map\n",
|
||||
"map.setExtent(bbox.minx - buffer, bbox.miny - buffer, bbox.maxx + buffer, bbox.maxy + buffer)\n",
|
||||
"\n",
|
||||
"output_file = r\"map_query.png\"\n",
|
||||
"image = map.draw()\n",
|
||||
"image.save(output_file)\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## OGC Web Services\n",
|
||||
"\n",
|
||||
"MapScript can also be used to send requests to MapServer OWS capabilities, to \n",
|
||||
"query WMS and WFS services. First we will get the WMS GetCapabilities XML for the map: "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ows_req = mapscript.OWSRequest()\n",
|
||||
"ows_req.type = mapscript.MS_GET_REQUEST\n",
|
||||
"ows_req.setParameter(\"SERVICE\", \"WMS\");\n",
|
||||
"ows_req.setParameter(\"VERSION\", \"1.3.0\");\n",
|
||||
"ows_req.setParameter(\"REQUEST\", \"GetCapabilities\");"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We use the msIO methods to capture the response the request\n",
|
||||
"that is sent to ```stdout```. \n",
|
||||
"The response is typically an HTTP response with HTTP content headers. \n",
|
||||
"We can strip these out using MapScript"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"mapscript.msIO_installStdoutToBuffer()\n",
|
||||
"map.OWSDispatch(ows_req)\n",
|
||||
"content_type = mapscript.msIO_stripStdoutBufferContentType()\n",
|
||||
"# remove the content type header from the XML\n",
|
||||
"mapscript.msIO_stripStdoutBufferContentHeaders() # Strip all Content-* headers\n",
|
||||
"result = mapscript.msIO_getStdoutBufferBytes()\n",
|
||||
"print(result)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We can also retrieve images from a WMS service. \n",
|
||||
"Rather than setting lots of individual parameters we can simply load them from\n",
|
||||
"a string in the same format was would be sent via a web client. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# First let's get the extent of the map to use in the request\n",
|
||||
"extent = map.extent\n",
|
||||
"print(extent)\n",
|
||||
"\n",
|
||||
"bbox = \"BBOX={},{},{},{}\".format(extent.minx, extent.miny, extent.maxx, extent.maxy)\n",
|
||||
"querystring = \"SERVICE=WMS&REQUEST=GetMap&VERSION=1.3.0&LAYERS=lakespy2&CRS=EPSG:26915&FORMAT=image/png&WIDTH=400&HEIGHT=400&{}\".format(bbox)\n",
|
||||
"\n",
|
||||
"ows_req = mapscript.OWSRequest()\n",
|
||||
"ows_req.loadParamsFromURL(querystring)\n",
|
||||
"success = map.OWSDispatch(ows_req)\n",
|
||||
"assert success == mapscript.MS_SUCCESS\n",
|
||||
"\n",
|
||||
"# clear the HTTP headers or we will have an invalid image\n",
|
||||
"headers = mapscript.msIO_getAndStripStdoutBufferMimeHeaders()\n",
|
||||
"result = mapscript.msIO_getStdoutBufferBytes()\n",
|
||||
"\n",
|
||||
"output_file = \"wms.png\"\n",
|
||||
"with open(output_file, \"wb\") as f:\n",
|
||||
" f.write(result)\n",
|
||||
"\n",
|
||||
"Image(filename=output_file)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Finally let's get the SLD for one of the layers in the map: "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"lines_to_next_cell": 2
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"lakes_layer = map.getLayerByName('lakespy2')\n",
|
||||
"result = lakes_layer.generateSLD()\n",
|
||||
"print(result)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Thanks for working through this notebook! For more information on MapScript\n",
|
||||
"please see the [MapScript documentation](https://mapserver.org/mapscript/introduction.html). \n",
|
||||
"Additional Python examples can be found in the [MapServer GitHub repository](https://github.com/mapserver/mapserver/tree/master/mapscript/python/examples)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"jupytext": {
|
||||
"formats": "ipynb,pct.py:percent,lgt.py:light,spx.py:sphinx,md,Rmd",
|
||||
"text_representation": {
|
||||
"extension": ".py",
|
||||
"format_name": "percent",
|
||||
"format_version": "1.1",
|
||||
"jupytext_version": "0.8.0"
|
||||
}
|
||||
},
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,921 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"try:\n",
|
||||
" import seaborn\n",
|
||||
"except ImportError:\n",
|
||||
" pass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Tabular data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv(\"data/titanic.csv\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Starting from reading this dataset, to answering questions about this data in a few lines of code:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"**What is the age distribution of the passengers?**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['Age'].hist()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"**How does the survival rate of the passengers differ between sexes?**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('Sex')[['Survived']].aggregate(lambda x: x.sum() / len(x))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"**Or how does it differ between the different classes?**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('Pclass')['Survived'].aggregate(lambda x: x.sum() / len(x)).plot(kind='bar')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"**Are young people more likely to survive?**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df['Survived'].sum() / df['Survived'].count()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df25 = df[df['Age'] <= 25]\n",
|
||||
"df25['Survived'].sum() / len(df25['Survived'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"All the needed functionality for the above examples will be explained throughout this tutorial."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Data structures\n",
|
||||
"\n",
|
||||
"Pandas provides two fundamental data objects, for 1D (``Series``) and 2D data (``DataFrame``)."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Series\n",
|
||||
"\n",
|
||||
"A Series is a basic holder for **one-dimensional labeled data**. It can be created much as a NumPy array is created:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s = pd.Series([0.1, 0.2, 0.3, 0.4])\n",
|
||||
"s"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Attributes of a Series: `index` and `values`\n",
|
||||
"\n",
|
||||
"The series has a built-in concept of an **index**, which by default is the numbers *0* through *N - 1*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s.index"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You can access the underlying numpy array representation with the `.values` attribute:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s.values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We can access series values via the index, just like for NumPy arrays:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s[0]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Unlike the NumPy array, though, this index can be something other than integers:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s2 = pd.Series(np.arange(4), index=['a', 'b', 'c', 'd'])\n",
|
||||
"s2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s2['c']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"In this way, a ``Series`` object can be thought of as similar to an ordered dictionary mapping one typed value to another typed value.\n",
|
||||
"\n",
|
||||
"In fact, it's possible to construct a series directly from a Python dictionary:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pop_dict = {'Germany': 81.3, \n",
|
||||
" 'Belgium': 11.3, \n",
|
||||
" 'France': 64.3, \n",
|
||||
" 'United Kingdom': 64.9, \n",
|
||||
" 'Netherlands': 16.9}\n",
|
||||
"population = pd.Series(pop_dict)\n",
|
||||
"population"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We can index the populations like a dict as expected:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"population['France']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"but with the power of numpy arrays:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"population * 1000"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## DataFrames: Multi-dimensional Data\n",
|
||||
"\n",
|
||||
"A DataFrame is a **tablular data structure** (multi-dimensional object to hold labeled data) comprised of rows and columns, akin to a spreadsheet, database table, or R's data.frame object. You can think of it as multiple Series object which share the same index.\n",
|
||||
"\n",
|
||||
"<img src=\"img/dataframe.png\" width=110%>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"One of the most common ways of creating a dataframe is from a dictionary of arrays or lists.\n",
|
||||
"\n",
|
||||
"Note that in the IPython notebook, the dataframe will display in a rich HTML view:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data = {'country': ['Belgium', 'France', 'Germany', 'Netherlands', 'United Kingdom'],\n",
|
||||
" 'population': [11.3, 64.3, 81.3, 16.9, 64.9],\n",
|
||||
" 'area': [30510, 671308, 357050, 41526, 244820],\n",
|
||||
" 'capital': ['Brussels', 'Paris', 'Berlin', 'Amsterdam', 'London']}\n",
|
||||
"countries = pd.DataFrame(data)\n",
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Attributes of the DataFrame\n",
|
||||
"\n",
|
||||
"A DataFrame has besides a `index` attribute, also a `columns` attribute:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.index"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.columns"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"To check the data types of the different columns:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.dtypes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"An overview of that information can be given with the `info()` method:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Also a DataFrame has a `values` attribute, but attention: when you have heterogeneous data, all values will be upcasted:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.values"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"If we don't like what the index looks like, we can reset it and set one of our columns:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries = countries.set_index('country')\n",
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"To access a Series representing a column in the data, use typical indexing syntax:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['area']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Basic operations on Series/Dataframes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"As you play around with DataFrames, you'll notice that many operations which work on NumPy arrays will also work on dataframes.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# redefining the example objects\n",
|
||||
"\n",
|
||||
"population = pd.Series({'Germany': 81.3, 'Belgium': 11.3, 'France': 64.3, \n",
|
||||
" 'United Kingdom': 64.9, 'Netherlands': 16.9})\n",
|
||||
"\n",
|
||||
"countries = pd.DataFrame({'country': ['Belgium', 'France', 'Germany', 'Netherlands', 'United Kingdom'],\n",
|
||||
" 'population': [11.3, 64.3, 81.3, 16.9, 64.9],\n",
|
||||
" 'area': [30510, 671308, 357050, 41526, 244820],\n",
|
||||
" 'capital': ['Brussels', 'Paris', 'Berlin', 'Amsterdam', 'London']})"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Elementwise-operations (like numpy)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Just like with numpy arrays, many operations are element-wise:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"population / 100"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['population'] / countries['area']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Alignment! (unlike numpy)\n",
|
||||
"\n",
|
||||
"Only, pay attention to **alignment**: operations between series will align on the index: "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s1 = population[['Belgium', 'France']]\n",
|
||||
"s2 = population[['France', 'Germany']]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s1 + s2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Reductions (like numpy)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The average population number:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"population.mean()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The minimum area:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['area'].min()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For dataframes, often only the numeric columns are included in the result:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.median()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Calculate the population numbers relative to Belgium\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false,
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Calculate the population density for each country and add this as a new column to the dataframe.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false,
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false,
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Some other useful methods"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Sorting the rows of the DataFrame according to the values in a column:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.sort_values('density', ascending=False)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"One useful method to use is the ``describe`` method, which computes summary statistics for each column:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.describe()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The `plot` method can be used to quickly visualize the data in different ways:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"However, for this dataset, it does not say that much:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['population'].plot(kind='bar')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You can play with the `kind` keyword: 'line', 'bar', 'hist', 'density', 'area', 'pie', 'scatter', 'hexbin'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Importing and exporting data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"A wide range of input/output formats are natively supported by pandas:\n",
|
||||
"\n",
|
||||
"* CSV, text\n",
|
||||
"* SQL database\n",
|
||||
"* Excel\n",
|
||||
"* HDF5\n",
|
||||
"* json\n",
|
||||
"* html\n",
|
||||
"* pickle\n",
|
||||
"* ..."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pd.read"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"states.to"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Other features\n",
|
||||
"\n",
|
||||
"* Working with missing data (`.dropna()`, `pd.isnull()`)\n",
|
||||
"* Merging and joining (`concat`, `join`)\n",
|
||||
"* Grouping: `groupby` functionality\n",
|
||||
"* Reshaping (`stack`, `pivot`)\n",
|
||||
"* Time series manipulation (resampling, timezones, ..)\n",
|
||||
"* Easy plotting"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"There are many, many more interesting operations that can be done on Series and DataFrame objects, but rather than continue using this toy data, we'll instead move to a real-world example, and illustrate some of the advanced concepts along the way.\n",
|
||||
"\n",
|
||||
"See the next notebooks!"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"## Acknowledgement\n",
|
||||
"\n",
|
||||
"> *© 2015, Stijn Van Hoey and Joris Van den Bossche (<mailto:stijnvanhoey@gmail.com>, <mailto:jorisvandenbossche@gmail.com>). Licensed under [CC BY 4.0 Creative Commons](http://creativecommons.org/licenses/by/4.0/)*\n",
|
||||
"\n",
|
||||
"> This notebook is partly based on material of Jake Vanderplas (https://github.com/jakevdp/OsloWorkshop2014).\n",
|
||||
"\n",
|
||||
"---"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.5.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
|
@ -0,0 +1,913 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Indexing and selecting data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"try:\n",
|
||||
" import seaborn\n",
|
||||
"except ImportError:\n",
|
||||
" pass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# redefining the example objects\n",
|
||||
"\n",
|
||||
"# series\n",
|
||||
"population = pd.Series({'Germany': 81.3, 'Belgium': 11.3, 'France': 64.3, \n",
|
||||
" 'United Kingdom': 64.9, 'Netherlands': 16.9})\n",
|
||||
"\n",
|
||||
"# dataframe\n",
|
||||
"data = {'country': ['Belgium', 'France', 'Germany', 'Netherlands', 'United Kingdom'],\n",
|
||||
" 'population': [11.3, 64.3, 81.3, 16.9, 64.9],\n",
|
||||
" 'area': [30510, 671308, 357050, 41526, 244820],\n",
|
||||
" 'capital': ['Brussels', 'Paris', 'Berlin', 'Amsterdam', 'London']}\n",
|
||||
"countries = pd.DataFrame(data)\n",
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Setting the index to the country names:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries = countries.set_index('country')\n",
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Some notes on selecting data\n",
|
||||
"\n",
|
||||
"One of pandas' basic features is the labeling of rows and columns, but this makes indexing also a bit more complex compared to numpy. We now have to distuinguish between:\n",
|
||||
"\n",
|
||||
"- selection by label\n",
|
||||
"- selection by position."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### `data[]` provides some convenience shortcuts "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For a DataFrame, basic indexing selects the columns.\n",
|
||||
"\n",
|
||||
"Selecting a single column:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['area']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"or multiple columns:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries[['area', 'population']]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"But, slicing accesses the rows:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['France':'Netherlands']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-danger\">\n",
|
||||
" <b>NOTE</b>: Unlike slicing in numpy, the end label is **included**.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"So as a summary, `[]` provides the following convenience shortcuts:\n",
|
||||
"\n",
|
||||
"- Series: selecting a label: `s[label]`\n",
|
||||
"- DataFrame: selecting a single or multiple columns: `df['col']` or `df[['col1', 'col2']]`\n",
|
||||
"- DataFrame: slicing the rows: `df['row_label1':'row_label2']` or `df[mask]`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Systematic indexing with `loc` and `iloc`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"When using `[]` like above, you can only select from one axis at once (rows or columns, not both). For more advanced indexing, you have some extra attributes:\n",
|
||||
" \n",
|
||||
"* `loc`: selection by label\n",
|
||||
"* `iloc`: selection by position\n",
|
||||
"\n",
|
||||
"These methods index the different dimensions of the frame:\n",
|
||||
"\n",
|
||||
"* `df.loc[row_indexer, column_indexer]`\n",
|
||||
"* `df.iloc[row_indexer, column_indexer]`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Selecting a single element:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.loc['Germany', 'area']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"But the row or column indexer can also be a list, slice, boolean array, .."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.loc['France':'Germany', ['area', 'population']]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"---\n",
|
||||
"Selecting by position with `iloc` works similar as indexing numpy arrays:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.iloc[0:2,1:3]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The different indexing methods can also be used to assign data:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries2 = countries.copy()\n",
|
||||
"countries2.loc['Belgium':'Germany', 'population'] = 10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Boolean indexing (filtering)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Often, you want to select rows based on a certain condition. This can be done with 'boolean indexing' (like a where clause in SQL). \n",
|
||||
"\n",
|
||||
"The indexer (or boolean mask) should be 1-dimensional and the same length as the thing being indexed."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['area'] > 100000"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries[countries['area'] > 100000]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"---\n",
|
||||
"\n",
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Add a column `density` with the population density (note: population column is expressed in millions)\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Select the capital and the population column of those countries where the density is larger than 300\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Add a column 'density_ratio' with the ratio of the density to the mean density\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Change the capital of the UK to Cambridge\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Select all countries whose population density is between 100 and 300 people/km²\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"## Some other useful methods: `isin` and string methods"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The `isin` method of Series is very useful to select rows that may contain certain values:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s = countries['capital']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s.isin?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s.isin(['Berlin', 'London'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"This can then be used to filter the dataframe with boolean indexing:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries[countries['capital'].isin(['Berlin', 'London'])]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Let's say we want to select all data for which the capital starts with a 'B'. In Python, when having a string, we could use the `startswith` method:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"'Berlin'.startswith('B')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"In pandas, these are available on a Series through the `str` namespace:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['capital'].str.startswith('B')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For an overview of all string methods, see: http://pandas.pydata.org/pandas-docs/stable/api.html#string-handling"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Select all countries that have capital names with more than 7 characters\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Select all countries that have capital names that contain the character sequence 'am'\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Pitfall: chained indexing (and the 'SettingWithCopyWarning')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries.loc['Belgium', 'capital'] = 'Ghent' "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries['capital']['Belgium'] = 'Antwerp' "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries[countries['capital'] == 'Antwerp']['capital'] = 'Brussels' "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"countries"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"How to avoid this?\n",
|
||||
"\n",
|
||||
"* Use `loc` instead of chained indexing if possible!\n",
|
||||
"* Or `copy` explicitly if you don't want to change the original data."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"## More exercises!"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For the quick ones among you, here are some more exercises with some larger dataframe with film data. These exercises are based on the [PyCon tutorial of Brandon Rhodes](https://github.com/brandon-rhodes/pycon-pandas-tutorial/) (so all credit to him!) and the datasets he prepared for that. You can download these data from here: [`titles.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKajNMa1pfSzN6Q3M) and [`cast.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKal9UYTJSR2ZhSW8) and put them in the `/data` folder."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cast = pd.read_csv('data/cast.csv')\n",
|
||||
"cast.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"titles = pd.read_csv('data/titles.csv')\n",
|
||||
"titles.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many movies are listed in the titles dataframe?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: What are the earliest two films listed in the titles dataframe?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many movies have the title \"Hamlet\"?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: List all of the \"Treasure Island\" movies from earliest to most recent.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many movies were made from 1950 through 1959?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many roles in the movie \"Inception\" are NOT ranked by an \"n\" value?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: But how many roles in the movie \"Inception\" did receive an \"n\" value?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Display the cast of \"North by Northwest\" in their correct \"n\"-value order, ignoring roles that did not earn a numeric \"n\" value.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many roles were credited in the silent 1921 version of Hamlet?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: List the supporting roles (having n=2) played by Cary Grant in the 1940s, in order by year.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"celltoolbar": "Nbtutor - export exercises",
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.5.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
|
@ -0,0 +1,245 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Advanced indexing"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"try:\n",
|
||||
" import seaborn\n",
|
||||
"except ImportError:\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
"pd.options.display.max_rows = 10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"This dataset is borrowed from the [PyCon tutorial of Brandon Rhodes](https://github.com/brandon-rhodes/pycon-pandas-tutorial/) (so all credit to him!). You can download these data from here: [`titles.csv`](https://drive.google.com/file/d/0B3G70MlBnCgKa0U4WFdWdGdVOFU/view?usp=sharing) and [`cast.csv`](https://drive.google.com/file/d/0B3G70MlBnCgKRzRmTWdQTUdjNnM/view?usp=sharing) and put them in the `/data` folder."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cast = pd.read_csv('data/cast.csv')\n",
|
||||
"cast.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"titles = pd.read_csv('data/titles.csv')\n",
|
||||
"titles.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Setting columns as the index"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Why is it useful to have an index?\n",
|
||||
"\n",
|
||||
"- Giving meaningful labels to your data -> easier to remember which data are where\n",
|
||||
"- Unleash some powerful methods, eg with a DatetimeIndex for time series\n",
|
||||
"- Easier and faster selection of data\n",
|
||||
"\n",
|
||||
"It is this last one we are going to explore here!"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Setting the `title` column as the index:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c = cast.set_index('title')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Instead of doing:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%time\n",
|
||||
"cast[cast['title'] == 'Hamlet']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"we can now do:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%time\n",
|
||||
"c.loc['Hamlet']"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"But you can also have multiple columns as the index, leading to a **multi-index or hierarchical index**:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c = cast.set_index(['title', 'year'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%time\n",
|
||||
"c.loc[('Hamlet', 2000),:]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"c2 = c.sort_index()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%%time\n",
|
||||
"c2.loc[('Hamlet', 2000),:]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"celltoolbar": "Nbtutor - export exercises",
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.4.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
|
@ -0,0 +1,288 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Groupby operations"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "slide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"Some imports:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "-"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"try:\n",
|
||||
" import seaborn\n",
|
||||
"except ImportError:\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
"pd.options.display.max_rows = 10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"## Some 'theory': the groupby operation (split-apply-combine)\n",
|
||||
"\n",
|
||||
"The \"group by\" concept: we want to **apply the same function on subsets of your dataframe, based on some key to split the dataframe in subsets**\n",
|
||||
"\n",
|
||||
"This operation is also referred to as the \"split-apply-combine\" operation, involving the following steps:\n",
|
||||
"\n",
|
||||
"* **Splitting** the data into groups based on some criteria\n",
|
||||
"* **Applying** a function to each group independently\n",
|
||||
"* **Combining** the results into a data structure\n",
|
||||
"\n",
|
||||
"<img src=\"img/splitApplyCombine.png\">\n",
|
||||
"\n",
|
||||
"Similar to SQL `GROUP BY`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"The example of the image in pandas syntax:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.DataFrame({'key':['A','B','C','A','B','C','A','B','C'],\n",
|
||||
" 'data': [0, 5, 10, 5, 10, 15, 10, 15, 20]})\n",
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Using the filtering and reductions operations we have seen in the previous notebooks, we could do something like:\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" df[df['key'] == \"A\"].sum()\n",
|
||||
" df[df['key'] == \"B\"].sum()\n",
|
||||
" ...\n",
|
||||
"\n",
|
||||
"But pandas provides the `groupby` method to do this:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').aggregate(np.sum) # 'sum'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').sum()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"And many more methods are available. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"## And now applying this on some real data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We go back to the titanic survival data:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.read_csv(\"data/titanic.csv\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Using groupby(), calculate the average age for each sex.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Calculate the average survival ratio for all passengers.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"\n",
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Calculate this survival ratio for all passengers younger that 25 (remember: filtering/boolean indexing).\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Is there a difference in this survival ratio between the sexes? (tip: write the above calculation of the survival ratio as a function)\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Make a bar plot of the survival ratio for the different classes ('Pclass' column).\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"If you are ready, more groupby exercises can be found in the \"Advanded groupby operations\" notebook."
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"celltoolbar": "Nbtutor - export exercises",
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.8"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
|
@ -0,0 +1,835 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Groupby operations"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "slide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"Some imports:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "-"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"try:\n",
|
||||
" import seaborn\n",
|
||||
"except ImportError:\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
"pd.options.display.max_rows = 10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"## Recap: the groupby operation (split-apply-combine)\n",
|
||||
"\n",
|
||||
"The \"group by\" concept: we want to **apply the same function on subsets of your dataframe, based on some key to split the dataframe in subsets**\n",
|
||||
"\n",
|
||||
"This operation is also referred to as the \"split-apply-combine\" operation, involving the following steps:\n",
|
||||
"\n",
|
||||
"* **Splitting** the data into groups based on some criteria\n",
|
||||
"* **Applying** a function to each group independently\n",
|
||||
"* **Combining** the results into a data structure\n",
|
||||
"\n",
|
||||
"<img src=\"img/splitApplyCombine.png\">\n",
|
||||
"\n",
|
||||
"Similar to SQL `GROUP BY`"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"The example of the image in pandas syntax:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.DataFrame({'key':['A','B','C','A','B','C','A','B','C'],\n",
|
||||
" 'data': [0, 5, 10, 5, 10, 15, 10, 15, 20]})\n",
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Using the filtering and reductions operations we have seen in the previous notebooks, we could do something like:\n",
|
||||
"\n",
|
||||
"\n",
|
||||
" df[df['key'] == \"A\"].sum()\n",
|
||||
" df[df['key'] == \"B\"].sum()\n",
|
||||
" ...\n",
|
||||
"\n",
|
||||
"But pandas provides the `groupby` method to do this:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').aggregate('sum') # np.sum"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').sum()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Pandas does not only let you group by a column name. In `df.groupby(grouper)` can be many things:\n",
|
||||
"\n",
|
||||
"- Series (or string indicating a column in df)\n",
|
||||
"- function (to be applied on the index)\n",
|
||||
"- dict : groups by values\n",
|
||||
"- levels=[], names of levels in a MultiIndex\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby(lambda x: x % 2).mean()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"slideshow": {
|
||||
"slide_type": "subslide"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"## And now applying this on some real data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"These exercises are based on the [PyCon tutorial of Brandon Rhodes](https://github.com/brandon-rhodes/pycon-pandas-tutorial/) (so all credit to him!) and the datasets he prepared for that. You can download these data from here: [`titles.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKajNMa1pfSzN6Q3M) and [`cast.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKal9UYTJSR2ZhSW8) and put them in the `/data` folder."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"`cast` dataset: different roles played by actors/actresses in films\n",
|
||||
"\n",
|
||||
"- title: title of the film\n",
|
||||
"- name: name of the actor/actress\n",
|
||||
"- type: actor/actress\n",
|
||||
"- n: the order of the role (n=1: leading role)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cast = pd.read_csv('data/cast.csv')\n",
|
||||
"cast.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"titles = pd.read_csv('data/titles.csv')\n",
|
||||
"titles.head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Using groupby(), plot the number of films that have been released each decade in the history of cinema.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"titles.groupby('year').sum()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations8.py\n",
|
||||
"titles.groupby(titles.year // 10 * 10).size().plot(kind='bar')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Use groupby() to plot the number of \"Hamlet\" films made each decade.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations9.py\n",
|
||||
"hamlet = titles[titles['title'] == 'Hamlet']\n",
|
||||
"hamlet.groupby(hamlet.year // 10 * 10).size().plot(kind='bar')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many leading (n=1) roles were available to actors, and how many to actresses, in each year of the 1950s?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations10.py\n",
|
||||
"cast1950 = cast[cast.year // 10 == 195]\n",
|
||||
"cast1950 = cast1950[cast1950.n == 1]\n",
|
||||
"cast1950.groupby(['year', 'type']).size()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: List the 10 actors/actresses that have the most leading roles (n=1) since the 1990's.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations11.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Use groupby() to determine how many roles are listed for each of The Pink Panther movies.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations12.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: List, in order by year, each of the films in which Frank Oz has played more than 1 role.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations13.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: List each of the characters that Frank Oz has portrayed at least twice.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations15.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Transforms"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Sometimes you don't want to aggregate the groups, but transform the values in each group. This can be achieved with `transform`:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').transform('mean')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def normalize(group):\n",
|
||||
" return (group - group.mean()) / group.std()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').transform(normalize)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.groupby('key').transform('sum')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Add a column to the `cast` dataframe that indicates the number of roles for the film.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations21.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Calculate the ratio of leading actor and actress roles to the total number of leading roles per decade. \n",
|
||||
"</div>\n",
|
||||
"\n",
|
||||
"Tip: you can to do a groupby twice in two steps, once calculating the numbers, and then the ratios."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations22.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations23.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true,
|
||||
"scrolled": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations24.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Intermezzo: string manipulations"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Python strings have a lot of useful methods available to manipulate or check the content of the string:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s = 'Bradwurst'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s.startswith('B')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"In pandas, those methods (together with some additional methods) are also available for string Series through the `.str` accessor:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s = pd.Series(['Bradwurst', 'Kartoffelsalat', 'Sauerkraut'])"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"s.str.startswith('B')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"For an overview of all string methods, see: http://pandas.pydata.org/pandas-docs/stable/api.html#string-handling"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: We already plotted the number of 'Hamlet' films released each decade, but not all titles are exactly called 'Hamlet'. Give an overview of the titles that contain 'Hamlet', and that start with 'Hamlet':\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations29.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations30.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: List the 10 movie titles with the longest name.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations31.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations32.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Value counts"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"A useful shortcut to calculate the number of occurences of certain values is `value_counts` (this is somewhat equivalent to `df.groupby(key).size())`)\n",
|
||||
"\n",
|
||||
"For example, what are the most occuring movie titles?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"titles.title.value_counts().head()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Which years saw the most films released?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations34.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Plot the number of released films over time\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations35.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Plot the number of \"Hamlet\" films made each decade.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations36.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: What are the 11 most common character names in movie history?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations37.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Which actors or actresses appeared in the most movies in the year 2010?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations38.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: Plot how many roles Brad Pitt has played in each year of his career.\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations39.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: What are the 10 most film titles roles that start with the word \"The Life\"?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations40.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<div class=\"alert alert-success\">\n",
|
||||
" <b>EXERCISE</b>: How many leading (n=1) roles were available to actors, and how many to actresses, in the 1950s? And in 2000s?\n",
|
||||
"</div>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations41.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"clear_cell": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %load snippets/04b - Advanced groupby operations42.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"celltoolbar": "Nbtutor - export exercises",
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.8"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,12 @@
|
|||
# Pandas Tutorial - contributing guide
|
||||
|
||||
This tutorial can be freely used, and I also welcome contributions.
|
||||
|
||||
Some notes (for the future me as well :-)):
|
||||
|
||||
- edit the 'solved' notebooks, the other ones (the ones used in the tutorial) are generated automatically using the `convert_notebooks.sh` script.
|
||||
- the exercises are cleared using the `nbtutor` notebook extension: https://github.com/jorisvandenbossche/nbtutor
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,125 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Finally, some more notes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"try:\n",
|
||||
" import seaborn\n",
|
||||
"except ImportError:\n",
|
||||
" pass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## About the dtypes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"- Missing values (NaN) cast integer or boolean arrays to floats\n",
|
||||
"- The object dtype is the fallback\n",
|
||||
"- Some custom dtypes (Categorical, tz datetime (upcoming))\n",
|
||||
"- Some custom objects, eg Timestamp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"dft = pd.DataFrame(dict(A = np.random.rand(3),\n",
|
||||
" B = 1,\n",
|
||||
" C = 'foo',\n",
|
||||
" D = pd.Timestamp('20010102'),\n",
|
||||
" E = pd.Series([1.0]*3).astype('float32'),\n",
|
||||
" F = False,\n",
|
||||
" G = pd.Series([1]*3,dtype='int8'),\n",
|
||||
" H = pd.Series(['a', 'b', 'a'], dtype='category')))\n",
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"A float64\n",
|
||||
"B int64\n",
|
||||
"C object\n",
|
||||
"D datetime64[ns]\n",
|
||||
"E float32\n",
|
||||
"F bool\n",
|
||||
"G int8\n",
|
||||
"H category\n",
|
||||
"dtype: object"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"dft.dtypes"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.4.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"<CENTER>\n",
|
||||
" <header>\n",
|
||||
" <h1>Pandas Tutorial</h1>\n",
|
||||
" <h3>EuroScipy, Erlangen DE, August 24th, 2016</h3>\n",
|
||||
" <h2>Joris Van den Bossche</h2>\n",
|
||||
" <p></p>\n",
|
||||
"Source: <a href=\"https://github.com/jorisvandenbossche/pandas-tutorial\">https://github.com/jorisvandenbossche/pandas-tutorial</a>\n",
|
||||
" </header>\n",
|
||||
"</CENTER>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Two data files are not included in the repo, you can download them from: [`titles.csv`](https://drive.google.com/file/d/0B3G70MlBnCgKa0U4WFdWdGdVOFU/view?usp=sharing) and [`cast.csv`](https://drive.google.com/file/d/0B3G70MlBnCgKRzRmTWdQTUdjNnM/view?usp=sharing) and put them in the `/data` folder."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Requirements to run this tutorial"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"To follow this tutorial you need to have the following packages installed:\n",
|
||||
"\n",
|
||||
"- Python version 2.6-2.7 or 3.3-3.5\n",
|
||||
"- `pandas` version 0.18.0 or later: http://pandas.pydata.org/\n",
|
||||
"- `numpy` version 1.7 or later: http://www.numpy.org/\n",
|
||||
"- `matplotlib` version 1.3 or later: http://matplotlib.org/\n",
|
||||
"- `ipython` version 3.x with notebook support, or `ipython 4.x` combined with `jupyter`: http://ipython.org\n",
|
||||
"- `seaborn` (this is used for some plotting, but not necessary to follow the tutorial): http://stanford.edu/~mwaskom/software/seaborn/\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Downloading the tutorial materials"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"source": [
|
||||
"If you have git installed, you can get the material in this tutorial by cloning this repo:\n",
|
||||
"\n",
|
||||
" git clone https://github.com/jorisvandenbossche/pandas-tutorial.git\n",
|
||||
"\n",
|
||||
"As an alternative, you can download it as a zip file:\n",
|
||||
"https://github.com/jorisvandenbossche/pandas-tutorial/archive/master.zip.\n",
|
||||
"I will probably make some changes until the start of the tutorial, so best to download\n",
|
||||
"the latest version then (or do a `git pull` if you are using git).\n",
|
||||
"\n",
|
||||
"Two data files are not included in the repo, you can download them from: [`titles.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKajNMa1pfSzN6Q3M) and [`cast.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKal9UYTJSR2ZhSW8) and put them in the `/data` folder."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Contents\n",
|
||||
"\n",
|
||||
"Beginners track:\n",
|
||||
"\n",
|
||||
"- [01 - Introduction - beginners.ipynb](01 - Introduction - beginners.ipynb)\n",
|
||||
"- [02 - Data structures](02 - Data structures.ipynb)\n",
|
||||
"- [03 - Indexing and selecting data](03 - Indexing and selecting data.ipynb)\n",
|
||||
"- [04 - Groupby operations](04 - Groupby operations.ipynb)\n",
|
||||
"\n",
|
||||
"Advanced track:\n",
|
||||
"\n",
|
||||
"- [03b - Some more advanced indexing](03b - Some more advanced indexing.ipynb)\n",
|
||||
"- [04b - Advanced groupby operations](04b - Advanced groupby operations.ipynb)\n",
|
||||
"- [05 - Time series data](05 - Time series data.ipynb)\n",
|
||||
"- [06 - Reshaping data](06 - Reshaping data.ipynb)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"celltoolbar": "Slideshow",
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.5.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
Copyright (c) 2015, Joris Van den Bossche
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
# EuroScipy 2016 Pandas Tutorial
|
||||
|
||||
This repository contains the material (notebooks, data) for the pandas tutorial at EuroScipy 2016. For previous versions of the tutorial (EuroScipy 2015), see the [releases page](https://github.com/jorisvandenbossche/pandas-tutorial/releases).
|
||||
|
||||
## Requirements to run this tutorial
|
||||
|
||||
To follow this tutorial you need to have the following packages installed:
|
||||
|
||||
- Python version 2.6-2.7 or 3.3-3.5
|
||||
- `pandas` version 0.18.0 or later: http://pandas.pydata.org/ (previous versions will work for most examples as well)
|
||||
- `numpy` version 1.7 or later: http://www.numpy.org/
|
||||
- `matplotlib` version 1.3 or later: http://matplotlib.org/
|
||||
- `ipython` version 3.x with notebook support, or `ipython 4.x` combined with `jupyter`: http://ipython.org
|
||||
- `seaborn` (this is used for some plotting, but not necessary to follow the tutorial): http://stanford.edu/~mwaskom/software/seaborn/
|
||||
|
||||
I recommend to use the [conda](http://conda.pydata.org/docs/intro.html) environment manager to install all the requirements
|
||||
(you can install [miniconda](http://conda.pydata.org/miniconda.html) or install the (very large) Anaconda software
|
||||
distribution, found at http://continuum.io/downloads).
|
||||
|
||||
Once this is installed, the following command will install all required packages in your Python environment:
|
||||
```
|
||||
conda install pandas jupyter seaborn
|
||||
```
|
||||
|
||||
But of course, using another distribution (e.g. Enthought Canopy) or pip is good as well, as long
|
||||
as you have the above packages installed.
|
||||
|
||||
|
||||
## Downloading the tutorial materials
|
||||
|
||||
If you have git installed, you can get the material in this tutorial by cloning this repo:
|
||||
|
||||
git clone https://github.com/jorisvandenbossche/pandas-tutorial.git
|
||||
|
||||
As an alternative, you can download it as a zip file:
|
||||
https://github.com/jorisvandenbossche/pandas-tutorial/archive/master.zip.
|
||||
I will probably make some changes until the start of the tutorial, so best to download
|
||||
the latest version then (or do a `git pull` if you are using git).
|
||||
|
||||
Two data files are not included in the repo, you can download them from: [`titles.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKajNMa1pfSzN6Q3M) and [`cast.csv`](https://drive.google.com/open?id=0B3G70MlBnCgKal9UYTJSR2ZhSW8) and put them in the `/data` folder.
|
||||
|
||||
## Content
|
||||
|
||||
To view the content on nbviewer:
|
||||
|
||||
- [Index](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/Index.ipynb)
|
||||
- [01 - Introduction](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/01%20-%20Introduction.ipynb)
|
||||
- [02 - Data structures](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/02%20-%20Data%20structures.ipynb)
|
||||
- [03 - Indexing and selecting data](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/03%20-%20Indexing%20and%20selecting%20data.ipynb)
|
||||
- [03b - Some more advanced indexing](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/03b%20-%20Some%20more%20advanced%20indexing.ipynb)
|
||||
- [04 - Groupby operations](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/04%20-%20Groupby%20operations.ipynb)
|
||||
- [05 - Time series data](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/05%20-%20Time%20series%20data.ipynb)
|
||||
- [06 - Reshaping data](http://nbviewer.ipython.org/github/jorisvandenbossche/pandas-tutorial/blob/master/06%20-%20Reshaping%20data.ipynb)
|
||||
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
#jupyter nbconvert --to=notebook --config nbconvert_config.py --output "02 - Data structures.ipynb" "solved - 02 - Data structures.ipynb"
|
||||
# jupyter nbconvert --to=notebook --output "03b - Some more advanced indexing.ipynb" "solved - 03b - Some more advanced indexing.ipynb"
|
||||
#jupyter nbconvert --to=notebook --config nbconvert_config.py --output "03 - Indexing and selecting data.ipynb" "solved - 03 - Indexing and selecting data.ipynb"
|
||||
#jupyter nbconvert --to=notebook --config nbconvert_config.py --output "04 - Groupby operations.ipynb" "solved - 04 - Groupby operations.ipynb"
|
||||
#jupyter nbconvert --to=notebook --config nbconvert_config.py --output "04b - Advanced groupby operations.ipynb" "solved - 04b - Advanced groupby operations.ipynb"
|
||||
jupyter nbconvert --to=notebook --config nbconvert_config.py --output "05 - Time series data.ipynb" "solved - 05 - Time series data.ipynb"
|
||||
jupyter nbconvert --to=notebook --config nbconvert_config.py --output "06 - Reshaping data.ipynb" "solved - 06 - Reshaping data.ipynb"
|
||||
jupyter nbconvert --to=notebook --config nbconvert_config.py --output "07 - Case study - air quality data.ipynb" "solved - 07 - Case study - air quality data.ipynb"
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,892 @@
|
|||
PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
|
||||
1,0,3,"Braund, Mr. Owen Harris",male,22,1,0,A/5 21171,7.25,,S
|
||||
2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38,1,0,PC 17599,71.2833,C85,C
|
||||
3,1,3,"Heikkinen, Miss. Laina",female,26,0,0,STON/O2. 3101282,7.925,,S
|
||||
4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35,1,0,113803,53.1,C123,S
|
||||
5,0,3,"Allen, Mr. William Henry",male,35,0,0,373450,8.05,,S
|
||||
6,0,3,"Moran, Mr. James",male,,0,0,330877,8.4583,,Q
|
||||
7,0,1,"McCarthy, Mr. Timothy J",male,54,0,0,17463,51.8625,E46,S
|
||||
8,0,3,"Palsson, Master. Gosta Leonard",male,2,3,1,349909,21.075,,S
|
||||
9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27,0,2,347742,11.1333,,S
|
||||
10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14,1,0,237736,30.0708,,C
|
||||
11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4,1,1,PP 9549,16.7,G6,S
|
||||
12,1,1,"Bonnell, Miss. Elizabeth",female,58,0,0,113783,26.55,C103,S
|
||||
13,0,3,"Saundercock, Mr. William Henry",male,20,0,0,A/5. 2151,8.05,,S
|
||||
14,0,3,"Andersson, Mr. Anders Johan",male,39,1,5,347082,31.275,,S
|
||||
15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14,0,0,350406,7.8542,,S
|
||||
16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55,0,0,248706,16,,S
|
||||
17,0,3,"Rice, Master. Eugene",male,2,4,1,382652,29.125,,Q
|
||||
18,1,2,"Williams, Mr. Charles Eugene",male,,0,0,244373,13,,S
|
||||
19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31,1,0,345763,18,,S
|
||||
20,1,3,"Masselmani, Mrs. Fatima",female,,0,0,2649,7.225,,C
|
||||
21,0,2,"Fynney, Mr. Joseph J",male,35,0,0,239865,26,,S
|
||||
22,1,2,"Beesley, Mr. Lawrence",male,34,0,0,248698,13,D56,S
|
||||
23,1,3,"McGowan, Miss. Anna ""Annie""",female,15,0,0,330923,8.0292,,Q
|
||||
24,1,1,"Sloper, Mr. William Thompson",male,28,0,0,113788,35.5,A6,S
|
||||
25,0,3,"Palsson, Miss. Torborg Danira",female,8,3,1,349909,21.075,,S
|
||||
26,1,3,"Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson)",female,38,1,5,347077,31.3875,,S
|
||||
27,0,3,"Emir, Mr. Farred Chehab",male,,0,0,2631,7.225,,C
|
||||
28,0,1,"Fortune, Mr. Charles Alexander",male,19,3,2,19950,263,C23 C25 C27,S
|
||||
29,1,3,"O'Dwyer, Miss. Ellen ""Nellie""",female,,0,0,330959,7.8792,,Q
|
||||
30,0,3,"Todoroff, Mr. Lalio",male,,0,0,349216,7.8958,,S
|
||||
31,0,1,"Uruchurtu, Don. Manuel E",male,40,0,0,PC 17601,27.7208,,C
|
||||
32,1,1,"Spencer, Mrs. William Augustus (Marie Eugenie)",female,,1,0,PC 17569,146.5208,B78,C
|
||||
33,1,3,"Glynn, Miss. Mary Agatha",female,,0,0,335677,7.75,,Q
|
||||
34,0,2,"Wheadon, Mr. Edward H",male,66,0,0,C.A. 24579,10.5,,S
|
||||
35,0,1,"Meyer, Mr. Edgar Joseph",male,28,1,0,PC 17604,82.1708,,C
|
||||
36,0,1,"Holverson, Mr. Alexander Oskar",male,42,1,0,113789,52,,S
|
||||
37,1,3,"Mamee, Mr. Hanna",male,,0,0,2677,7.2292,,C
|
||||
38,0,3,"Cann, Mr. Ernest Charles",male,21,0,0,A./5. 2152,8.05,,S
|
||||
39,0,3,"Vander Planke, Miss. Augusta Maria",female,18,2,0,345764,18,,S
|
||||
40,1,3,"Nicola-Yarred, Miss. Jamila",female,14,1,0,2651,11.2417,,C
|
||||
41,0,3,"Ahlin, Mrs. Johan (Johanna Persdotter Larsson)",female,40,1,0,7546,9.475,,S
|
||||
42,0,2,"Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott)",female,27,1,0,11668,21,,S
|
||||
43,0,3,"Kraeff, Mr. Theodor",male,,0,0,349253,7.8958,,C
|
||||
44,1,2,"Laroche, Miss. Simonne Marie Anne Andree",female,3,1,2,SC/Paris 2123,41.5792,,C
|
||||
45,1,3,"Devaney, Miss. Margaret Delia",female,19,0,0,330958,7.8792,,Q
|
||||
46,0,3,"Rogers, Mr. William John",male,,0,0,S.C./A.4. 23567,8.05,,S
|
||||
47,0,3,"Lennon, Mr. Denis",male,,1,0,370371,15.5,,Q
|
||||
48,1,3,"O'Driscoll, Miss. Bridget",female,,0,0,14311,7.75,,Q
|
||||
49,0,3,"Samaan, Mr. Youssef",male,,2,0,2662,21.6792,,C
|
||||
50,0,3,"Arnold-Franchi, Mrs. Josef (Josefine Franchi)",female,18,1,0,349237,17.8,,S
|
||||
51,0,3,"Panula, Master. Juha Niilo",male,7,4,1,3101295,39.6875,,S
|
||||
52,0,3,"Nosworthy, Mr. Richard Cater",male,21,0,0,A/4. 39886,7.8,,S
|
||||
53,1,1,"Harper, Mrs. Henry Sleeper (Myna Haxtun)",female,49,1,0,PC 17572,76.7292,D33,C
|
||||
54,1,2,"Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson)",female,29,1,0,2926,26,,S
|
||||
55,0,1,"Ostby, Mr. Engelhart Cornelius",male,65,0,1,113509,61.9792,B30,C
|
||||
56,1,1,"Woolner, Mr. Hugh",male,,0,0,19947,35.5,C52,S
|
||||
57,1,2,"Rugg, Miss. Emily",female,21,0,0,C.A. 31026,10.5,,S
|
||||
58,0,3,"Novel, Mr. Mansouer",male,28.5,0,0,2697,7.2292,,C
|
||||
59,1,2,"West, Miss. Constance Mirium",female,5,1,2,C.A. 34651,27.75,,S
|
||||
60,0,3,"Goodwin, Master. William Frederick",male,11,5,2,CA 2144,46.9,,S
|
||||
61,0,3,"Sirayanian, Mr. Orsen",male,22,0,0,2669,7.2292,,C
|
||||
62,1,1,"Icard, Miss. Amelie",female,38,0,0,113572,80,B28,
|
||||
63,0,1,"Harris, Mr. Henry Birkhardt",male,45,1,0,36973,83.475,C83,S
|
||||
64,0,3,"Skoog, Master. Harald",male,4,3,2,347088,27.9,,S
|
||||
65,0,1,"Stewart, Mr. Albert A",male,,0,0,PC 17605,27.7208,,C
|
||||
66,1,3,"Moubarek, Master. Gerios",male,,1,1,2661,15.2458,,C
|
||||
67,1,2,"Nye, Mrs. (Elizabeth Ramell)",female,29,0,0,C.A. 29395,10.5,F33,S
|
||||
68,0,3,"Crease, Mr. Ernest James",male,19,0,0,S.P. 3464,8.1583,,S
|
||||
69,1,3,"Andersson, Miss. Erna Alexandra",female,17,4,2,3101281,7.925,,S
|
||||
70,0,3,"Kink, Mr. Vincenz",male,26,2,0,315151,8.6625,,S
|
||||
71,0,2,"Jenkin, Mr. Stephen Curnow",male,32,0,0,C.A. 33111,10.5,,S
|
||||
72,0,3,"Goodwin, Miss. Lillian Amy",female,16,5,2,CA 2144,46.9,,S
|
||||
73,0,2,"Hood, Mr. Ambrose Jr",male,21,0,0,S.O.C. 14879,73.5,,S
|
||||
74,0,3,"Chronopoulos, Mr. Apostolos",male,26,1,0,2680,14.4542,,C
|
||||
75,1,3,"Bing, Mr. Lee",male,32,0,0,1601,56.4958,,S
|
||||
76,0,3,"Moen, Mr. Sigurd Hansen",male,25,0,0,348123,7.65,F G73,S
|
||||
77,0,3,"Staneff, Mr. Ivan",male,,0,0,349208,7.8958,,S
|
||||
78,0,3,"Moutal, Mr. Rahamin Haim",male,,0,0,374746,8.05,,S
|
||||
79,1,2,"Caldwell, Master. Alden Gates",male,0.83,0,2,248738,29,,S
|
||||
80,1,3,"Dowdell, Miss. Elizabeth",female,30,0,0,364516,12.475,,S
|
||||
81,0,3,"Waelens, Mr. Achille",male,22,0,0,345767,9,,S
|
||||
82,1,3,"Sheerlinck, Mr. Jan Baptist",male,29,0,0,345779,9.5,,S
|
||||
83,1,3,"McDermott, Miss. Brigdet Delia",female,,0,0,330932,7.7875,,Q
|
||||
84,0,1,"Carrau, Mr. Francisco M",male,28,0,0,113059,47.1,,S
|
||||
85,1,2,"Ilett, Miss. Bertha",female,17,0,0,SO/C 14885,10.5,,S
|
||||
86,1,3,"Backstrom, Mrs. Karl Alfred (Maria Mathilda Gustafsson)",female,33,3,0,3101278,15.85,,S
|
||||
87,0,3,"Ford, Mr. William Neal",male,16,1,3,W./C. 6608,34.375,,S
|
||||
88,0,3,"Slocovski, Mr. Selman Francis",male,,0,0,SOTON/OQ 392086,8.05,,S
|
||||
89,1,1,"Fortune, Miss. Mabel Helen",female,23,3,2,19950,263,C23 C25 C27,S
|
||||
90,0,3,"Celotti, Mr. Francesco",male,24,0,0,343275,8.05,,S
|
||||
91,0,3,"Christmann, Mr. Emil",male,29,0,0,343276,8.05,,S
|
||||
92,0,3,"Andreasson, Mr. Paul Edvin",male,20,0,0,347466,7.8542,,S
|
||||
93,0,1,"Chaffee, Mr. Herbert Fuller",male,46,1,0,W.E.P. 5734,61.175,E31,S
|
||||
94,0,3,"Dean, Mr. Bertram Frank",male,26,1,2,C.A. 2315,20.575,,S
|
||||
95,0,3,"Coxon, Mr. Daniel",male,59,0,0,364500,7.25,,S
|
||||
96,0,3,"Shorney, Mr. Charles Joseph",male,,0,0,374910,8.05,,S
|
||||
97,0,1,"Goldschmidt, Mr. George B",male,71,0,0,PC 17754,34.6542,A5,C
|
||||
98,1,1,"Greenfield, Mr. William Bertram",male,23,0,1,PC 17759,63.3583,D10 D12,C
|
||||
99,1,2,"Doling, Mrs. John T (Ada Julia Bone)",female,34,0,1,231919,23,,S
|
||||
100,0,2,"Kantor, Mr. Sinai",male,34,1,0,244367,26,,S
|
||||
101,0,3,"Petranec, Miss. Matilda",female,28,0,0,349245,7.8958,,S
|
||||
102,0,3,"Petroff, Mr. Pastcho (""Pentcho"")",male,,0,0,349215,7.8958,,S
|
||||
103,0,1,"White, Mr. Richard Frasar",male,21,0,1,35281,77.2875,D26,S
|
||||
104,0,3,"Johansson, Mr. Gustaf Joel",male,33,0,0,7540,8.6542,,S
|
||||
105,0,3,"Gustafsson, Mr. Anders Vilhelm",male,37,2,0,3101276,7.925,,S
|
||||
106,0,3,"Mionoff, Mr. Stoytcho",male,28,0,0,349207,7.8958,,S
|
||||
107,1,3,"Salkjelsvik, Miss. Anna Kristine",female,21,0,0,343120,7.65,,S
|
||||
108,1,3,"Moss, Mr. Albert Johan",male,,0,0,312991,7.775,,S
|
||||
109,0,3,"Rekic, Mr. Tido",male,38,0,0,349249,7.8958,,S
|
||||
110,1,3,"Moran, Miss. Bertha",female,,1,0,371110,24.15,,Q
|
||||
111,0,1,"Porter, Mr. Walter Chamberlain",male,47,0,0,110465,52,C110,S
|
||||
112,0,3,"Zabour, Miss. Hileni",female,14.5,1,0,2665,14.4542,,C
|
||||
113,0,3,"Barton, Mr. David John",male,22,0,0,324669,8.05,,S
|
||||
114,0,3,"Jussila, Miss. Katriina",female,20,1,0,4136,9.825,,S
|
||||
115,0,3,"Attalah, Miss. Malake",female,17,0,0,2627,14.4583,,C
|
||||
116,0,3,"Pekoniemi, Mr. Edvard",male,21,0,0,STON/O 2. 3101294,7.925,,S
|
||||
117,0,3,"Connors, Mr. Patrick",male,70.5,0,0,370369,7.75,,Q
|
||||
118,0,2,"Turpin, Mr. William John Robert",male,29,1,0,11668,21,,S
|
||||
119,0,1,"Baxter, Mr. Quigg Edmond",male,24,0,1,PC 17558,247.5208,B58 B60,C
|
||||
120,0,3,"Andersson, Miss. Ellis Anna Maria",female,2,4,2,347082,31.275,,S
|
||||
121,0,2,"Hickman, Mr. Stanley George",male,21,2,0,S.O.C. 14879,73.5,,S
|
||||
122,0,3,"Moore, Mr. Leonard Charles",male,,0,0,A4. 54510,8.05,,S
|
||||
123,0,2,"Nasser, Mr. Nicholas",male,32.5,1,0,237736,30.0708,,C
|
||||
124,1,2,"Webber, Miss. Susan",female,32.5,0,0,27267,13,E101,S
|
||||
125,0,1,"White, Mr. Percival Wayland",male,54,0,1,35281,77.2875,D26,S
|
||||
126,1,3,"Nicola-Yarred, Master. Elias",male,12,1,0,2651,11.2417,,C
|
||||
127,0,3,"McMahon, Mr. Martin",male,,0,0,370372,7.75,,Q
|
||||
128,1,3,"Madsen, Mr. Fridtjof Arne",male,24,0,0,C 17369,7.1417,,S
|
||||
129,1,3,"Peter, Miss. Anna",female,,1,1,2668,22.3583,F E69,C
|
||||
130,0,3,"Ekstrom, Mr. Johan",male,45,0,0,347061,6.975,,S
|
||||
131,0,3,"Drazenoic, Mr. Jozef",male,33,0,0,349241,7.8958,,C
|
||||
132,0,3,"Coelho, Mr. Domingos Fernandeo",male,20,0,0,SOTON/O.Q. 3101307,7.05,,S
|
||||
133,0,3,"Robins, Mrs. Alexander A (Grace Charity Laury)",female,47,1,0,A/5. 3337,14.5,,S
|
||||
134,1,2,"Weisz, Mrs. Leopold (Mathilde Francoise Pede)",female,29,1,0,228414,26,,S
|
||||
135,0,2,"Sobey, Mr. Samuel James Hayden",male,25,0,0,C.A. 29178,13,,S
|
||||
136,0,2,"Richard, Mr. Emile",male,23,0,0,SC/PARIS 2133,15.0458,,C
|
||||
137,1,1,"Newsom, Miss. Helen Monypeny",female,19,0,2,11752,26.2833,D47,S
|
||||
138,0,1,"Futrelle, Mr. Jacques Heath",male,37,1,0,113803,53.1,C123,S
|
||||
139,0,3,"Osen, Mr. Olaf Elon",male,16,0,0,7534,9.2167,,S
|
||||
140,0,1,"Giglio, Mr. Victor",male,24,0,0,PC 17593,79.2,B86,C
|
||||
141,0,3,"Boulos, Mrs. Joseph (Sultana)",female,,0,2,2678,15.2458,,C
|
||||
142,1,3,"Nysten, Miss. Anna Sofia",female,22,0,0,347081,7.75,,S
|
||||
143,1,3,"Hakkarainen, Mrs. Pekka Pietari (Elin Matilda Dolck)",female,24,1,0,STON/O2. 3101279,15.85,,S
|
||||
144,0,3,"Burke, Mr. Jeremiah",male,19,0,0,365222,6.75,,Q
|
||||
145,0,2,"Andrew, Mr. Edgardo Samuel",male,18,0,0,231945,11.5,,S
|
||||
146,0,2,"Nicholls, Mr. Joseph Charles",male,19,1,1,C.A. 33112,36.75,,S
|
||||
147,1,3,"Andersson, Mr. August Edvard (""Wennerstrom"")",male,27,0,0,350043,7.7958,,S
|
||||
148,0,3,"Ford, Miss. Robina Maggie ""Ruby""",female,9,2,2,W./C. 6608,34.375,,S
|
||||
149,0,2,"Navratil, Mr. Michel (""Louis M Hoffman"")",male,36.5,0,2,230080,26,F2,S
|
||||
150,0,2,"Byles, Rev. Thomas Roussel Davids",male,42,0,0,244310,13,,S
|
||||
151,0,2,"Bateman, Rev. Robert James",male,51,0,0,S.O.P. 1166,12.525,,S
|
||||
152,1,1,"Pears, Mrs. Thomas (Edith Wearne)",female,22,1,0,113776,66.6,C2,S
|
||||
153,0,3,"Meo, Mr. Alfonzo",male,55.5,0,0,A.5. 11206,8.05,,S
|
||||
154,0,3,"van Billiard, Mr. Austin Blyler",male,40.5,0,2,A/5. 851,14.5,,S
|
||||
155,0,3,"Olsen, Mr. Ole Martin",male,,0,0,Fa 265302,7.3125,,S
|
||||
156,0,1,"Williams, Mr. Charles Duane",male,51,0,1,PC 17597,61.3792,,C
|
||||
157,1,3,"Gilnagh, Miss. Katherine ""Katie""",female,16,0,0,35851,7.7333,,Q
|
||||
158,0,3,"Corn, Mr. Harry",male,30,0,0,SOTON/OQ 392090,8.05,,S
|
||||
159,0,3,"Smiljanic, Mr. Mile",male,,0,0,315037,8.6625,,S
|
||||
160,0,3,"Sage, Master. Thomas Henry",male,,8,2,CA. 2343,69.55,,S
|
||||
161,0,3,"Cribb, Mr. John Hatfield",male,44,0,1,371362,16.1,,S
|
||||
162,1,2,"Watt, Mrs. James (Elizabeth ""Bessie"" Inglis Milne)",female,40,0,0,C.A. 33595,15.75,,S
|
||||
163,0,3,"Bengtsson, Mr. John Viktor",male,26,0,0,347068,7.775,,S
|
||||
164,0,3,"Calic, Mr. Jovo",male,17,0,0,315093,8.6625,,S
|
||||
165,0,3,"Panula, Master. Eino Viljami",male,1,4,1,3101295,39.6875,,S
|
||||
166,1,3,"Goldsmith, Master. Frank John William ""Frankie""",male,9,0,2,363291,20.525,,S
|
||||
167,1,1,"Chibnall, Mrs. (Edith Martha Bowerman)",female,,0,1,113505,55,E33,S
|
||||
168,0,3,"Skoog, Mrs. William (Anna Bernhardina Karlsson)",female,45,1,4,347088,27.9,,S
|
||||
169,0,1,"Baumann, Mr. John D",male,,0,0,PC 17318,25.925,,S
|
||||
170,0,3,"Ling, Mr. Lee",male,28,0,0,1601,56.4958,,S
|
||||
171,0,1,"Van der hoef, Mr. Wyckoff",male,61,0,0,111240,33.5,B19,S
|
||||
172,0,3,"Rice, Master. Arthur",male,4,4,1,382652,29.125,,Q
|
||||
173,1,3,"Johnson, Miss. Eleanor Ileen",female,1,1,1,347742,11.1333,,S
|
||||
174,0,3,"Sivola, Mr. Antti Wilhelm",male,21,0,0,STON/O 2. 3101280,7.925,,S
|
||||
175,0,1,"Smith, Mr. James Clinch",male,56,0,0,17764,30.6958,A7,C
|
||||
176,0,3,"Klasen, Mr. Klas Albin",male,18,1,1,350404,7.8542,,S
|
||||
177,0,3,"Lefebre, Master. Henry Forbes",male,,3,1,4133,25.4667,,S
|
||||
178,0,1,"Isham, Miss. Ann Elizabeth",female,50,0,0,PC 17595,28.7125,C49,C
|
||||
179,0,2,"Hale, Mr. Reginald",male,30,0,0,250653,13,,S
|
||||
180,0,3,"Leonard, Mr. Lionel",male,36,0,0,LINE,0,,S
|
||||
181,0,3,"Sage, Miss. Constance Gladys",female,,8,2,CA. 2343,69.55,,S
|
||||
182,0,2,"Pernot, Mr. Rene",male,,0,0,SC/PARIS 2131,15.05,,C
|
||||
183,0,3,"Asplund, Master. Clarence Gustaf Hugo",male,9,4,2,347077,31.3875,,S
|
||||
184,1,2,"Becker, Master. Richard F",male,1,2,1,230136,39,F4,S
|
||||
185,1,3,"Kink-Heilmann, Miss. Luise Gretchen",female,4,0,2,315153,22.025,,S
|
||||
186,0,1,"Rood, Mr. Hugh Roscoe",male,,0,0,113767,50,A32,S
|
||||
187,1,3,"O'Brien, Mrs. Thomas (Johanna ""Hannah"" Godfrey)",female,,1,0,370365,15.5,,Q
|
||||
188,1,1,"Romaine, Mr. Charles Hallace (""Mr C Rolmane"")",male,45,0,0,111428,26.55,,S
|
||||
189,0,3,"Bourke, Mr. John",male,40,1,1,364849,15.5,,Q
|
||||
190,0,3,"Turcin, Mr. Stjepan",male,36,0,0,349247,7.8958,,S
|
||||
191,1,2,"Pinsky, Mrs. (Rosa)",female,32,0,0,234604,13,,S
|
||||
192,0,2,"Carbines, Mr. William",male,19,0,0,28424,13,,S
|
||||
193,1,3,"Andersen-Jensen, Miss. Carla Christine Nielsine",female,19,1,0,350046,7.8542,,S
|
||||
194,1,2,"Navratil, Master. Michel M",male,3,1,1,230080,26,F2,S
|
||||
195,1,1,"Brown, Mrs. James Joseph (Margaret Tobin)",female,44,0,0,PC 17610,27.7208,B4,C
|
||||
196,1,1,"Lurette, Miss. Elise",female,58,0,0,PC 17569,146.5208,B80,C
|
||||
197,0,3,"Mernagh, Mr. Robert",male,,0,0,368703,7.75,,Q
|
||||
198,0,3,"Olsen, Mr. Karl Siegwart Andreas",male,42,0,1,4579,8.4042,,S
|
||||
199,1,3,"Madigan, Miss. Margaret ""Maggie""",female,,0,0,370370,7.75,,Q
|
||||
200,0,2,"Yrois, Miss. Henriette (""Mrs Harbeck"")",female,24,0,0,248747,13,,S
|
||||
201,0,3,"Vande Walle, Mr. Nestor Cyriel",male,28,0,0,345770,9.5,,S
|
||||
202,0,3,"Sage, Mr. Frederick",male,,8,2,CA. 2343,69.55,,S
|
||||
203,0,3,"Johanson, Mr. Jakob Alfred",male,34,0,0,3101264,6.4958,,S
|
||||
204,0,3,"Youseff, Mr. Gerious",male,45.5,0,0,2628,7.225,,C
|
||||
205,1,3,"Cohen, Mr. Gurshon ""Gus""",male,18,0,0,A/5 3540,8.05,,S
|
||||
206,0,3,"Strom, Miss. Telma Matilda",female,2,0,1,347054,10.4625,G6,S
|
||||
207,0,3,"Backstrom, Mr. Karl Alfred",male,32,1,0,3101278,15.85,,S
|
||||
208,1,3,"Albimona, Mr. Nassef Cassem",male,26,0,0,2699,18.7875,,C
|
||||
209,1,3,"Carr, Miss. Helen ""Ellen""",female,16,0,0,367231,7.75,,Q
|
||||
210,1,1,"Blank, Mr. Henry",male,40,0,0,112277,31,A31,C
|
||||
211,0,3,"Ali, Mr. Ahmed",male,24,0,0,SOTON/O.Q. 3101311,7.05,,S
|
||||
212,1,2,"Cameron, Miss. Clear Annie",female,35,0,0,F.C.C. 13528,21,,S
|
||||
213,0,3,"Perkin, Mr. John Henry",male,22,0,0,A/5 21174,7.25,,S
|
||||
214,0,2,"Givard, Mr. Hans Kristensen",male,30,0,0,250646,13,,S
|
||||
215,0,3,"Kiernan, Mr. Philip",male,,1,0,367229,7.75,,Q
|
||||
216,1,1,"Newell, Miss. Madeleine",female,31,1,0,35273,113.275,D36,C
|
||||
217,1,3,"Honkanen, Miss. Eliina",female,27,0,0,STON/O2. 3101283,7.925,,S
|
||||
218,0,2,"Jacobsohn, Mr. Sidney Samuel",male,42,1,0,243847,27,,S
|
||||
219,1,1,"Bazzani, Miss. Albina",female,32,0,0,11813,76.2917,D15,C
|
||||
220,0,2,"Harris, Mr. Walter",male,30,0,0,W/C 14208,10.5,,S
|
||||
221,1,3,"Sunderland, Mr. Victor Francis",male,16,0,0,SOTON/OQ 392089,8.05,,S
|
||||
222,0,2,"Bracken, Mr. James H",male,27,0,0,220367,13,,S
|
||||
223,0,3,"Green, Mr. George Henry",male,51,0,0,21440,8.05,,S
|
||||
224,0,3,"Nenkoff, Mr. Christo",male,,0,0,349234,7.8958,,S
|
||||
225,1,1,"Hoyt, Mr. Frederick Maxfield",male,38,1,0,19943,90,C93,S
|
||||
226,0,3,"Berglund, Mr. Karl Ivar Sven",male,22,0,0,PP 4348,9.35,,S
|
||||
227,1,2,"Mellors, Mr. William John",male,19,0,0,SW/PP 751,10.5,,S
|
||||
228,0,3,"Lovell, Mr. John Hall (""Henry"")",male,20.5,0,0,A/5 21173,7.25,,S
|
||||
229,0,2,"Fahlstrom, Mr. Arne Jonas",male,18,0,0,236171,13,,S
|
||||
230,0,3,"Lefebre, Miss. Mathilde",female,,3,1,4133,25.4667,,S
|
||||
231,1,1,"Harris, Mrs. Henry Birkhardt (Irene Wallach)",female,35,1,0,36973,83.475,C83,S
|
||||
232,0,3,"Larsson, Mr. Bengt Edvin",male,29,0,0,347067,7.775,,S
|
||||
233,0,2,"Sjostedt, Mr. Ernst Adolf",male,59,0,0,237442,13.5,,S
|
||||
234,1,3,"Asplund, Miss. Lillian Gertrud",female,5,4,2,347077,31.3875,,S
|
||||
235,0,2,"Leyson, Mr. Robert William Norman",male,24,0,0,C.A. 29566,10.5,,S
|
||||
236,0,3,"Harknett, Miss. Alice Phoebe",female,,0,0,W./C. 6609,7.55,,S
|
||||
237,0,2,"Hold, Mr. Stephen",male,44,1,0,26707,26,,S
|
||||
238,1,2,"Collyer, Miss. Marjorie ""Lottie""",female,8,0,2,C.A. 31921,26.25,,S
|
||||
239,0,2,"Pengelly, Mr. Frederick William",male,19,0,0,28665,10.5,,S
|
||||
240,0,2,"Hunt, Mr. George Henry",male,33,0,0,SCO/W 1585,12.275,,S
|
||||
241,0,3,"Zabour, Miss. Thamine",female,,1,0,2665,14.4542,,C
|
||||
242,1,3,"Murphy, Miss. Katherine ""Kate""",female,,1,0,367230,15.5,,Q
|
||||
243,0,2,"Coleridge, Mr. Reginald Charles",male,29,0,0,W./C. 14263,10.5,,S
|
||||
244,0,3,"Maenpaa, Mr. Matti Alexanteri",male,22,0,0,STON/O 2. 3101275,7.125,,S
|
||||
245,0,3,"Attalah, Mr. Sleiman",male,30,0,0,2694,7.225,,C
|
||||
246,0,1,"Minahan, Dr. William Edward",male,44,2,0,19928,90,C78,Q
|
||||
247,0,3,"Lindahl, Miss. Agda Thorilda Viktoria",female,25,0,0,347071,7.775,,S
|
||||
248,1,2,"Hamalainen, Mrs. William (Anna)",female,24,0,2,250649,14.5,,S
|
||||
249,1,1,"Beckwith, Mr. Richard Leonard",male,37,1,1,11751,52.5542,D35,S
|
||||
250,0,2,"Carter, Rev. Ernest Courtenay",male,54,1,0,244252,26,,S
|
||||
251,0,3,"Reed, Mr. James George",male,,0,0,362316,7.25,,S
|
||||
252,0,3,"Strom, Mrs. Wilhelm (Elna Matilda Persson)",female,29,1,1,347054,10.4625,G6,S
|
||||
253,0,1,"Stead, Mr. William Thomas",male,62,0,0,113514,26.55,C87,S
|
||||
254,0,3,"Lobb, Mr. William Arthur",male,30,1,0,A/5. 3336,16.1,,S
|
||||
255,0,3,"Rosblom, Mrs. Viktor (Helena Wilhelmina)",female,41,0,2,370129,20.2125,,S
|
||||
256,1,3,"Touma, Mrs. Darwis (Hanne Youssef Razi)",female,29,0,2,2650,15.2458,,C
|
||||
257,1,1,"Thorne, Mrs. Gertrude Maybelle",female,,0,0,PC 17585,79.2,,C
|
||||
258,1,1,"Cherry, Miss. Gladys",female,30,0,0,110152,86.5,B77,S
|
||||
259,1,1,"Ward, Miss. Anna",female,35,0,0,PC 17755,512.3292,,C
|
||||
260,1,2,"Parrish, Mrs. (Lutie Davis)",female,50,0,1,230433,26,,S
|
||||
261,0,3,"Smith, Mr. Thomas",male,,0,0,384461,7.75,,Q
|
||||
262,1,3,"Asplund, Master. Edvin Rojj Felix",male,3,4,2,347077,31.3875,,S
|
||||
263,0,1,"Taussig, Mr. Emil",male,52,1,1,110413,79.65,E67,S
|
||||
264,0,1,"Harrison, Mr. William",male,40,0,0,112059,0,B94,S
|
||||
265,0,3,"Henry, Miss. Delia",female,,0,0,382649,7.75,,Q
|
||||
266,0,2,"Reeves, Mr. David",male,36,0,0,C.A. 17248,10.5,,S
|
||||
267,0,3,"Panula, Mr. Ernesti Arvid",male,16,4,1,3101295,39.6875,,S
|
||||
268,1,3,"Persson, Mr. Ernst Ulrik",male,25,1,0,347083,7.775,,S
|
||||
269,1,1,"Graham, Mrs. William Thompson (Edith Junkins)",female,58,0,1,PC 17582,153.4625,C125,S
|
||||
270,1,1,"Bissette, Miss. Amelia",female,35,0,0,PC 17760,135.6333,C99,S
|
||||
271,0,1,"Cairns, Mr. Alexander",male,,0,0,113798,31,,S
|
||||
272,1,3,"Tornquist, Mr. William Henry",male,25,0,0,LINE,0,,S
|
||||
273,1,2,"Mellinger, Mrs. (Elizabeth Anne Maidment)",female,41,0,1,250644,19.5,,S
|
||||
274,0,1,"Natsch, Mr. Charles H",male,37,0,1,PC 17596,29.7,C118,C
|
||||
275,1,3,"Healy, Miss. Hanora ""Nora""",female,,0,0,370375,7.75,,Q
|
||||
276,1,1,"Andrews, Miss. Kornelia Theodosia",female,63,1,0,13502,77.9583,D7,S
|
||||
277,0,3,"Lindblom, Miss. Augusta Charlotta",female,45,0,0,347073,7.75,,S
|
||||
278,0,2,"Parkes, Mr. Francis ""Frank""",male,,0,0,239853,0,,S
|
||||
279,0,3,"Rice, Master. Eric",male,7,4,1,382652,29.125,,Q
|
||||
280,1,3,"Abbott, Mrs. Stanton (Rosa Hunt)",female,35,1,1,C.A. 2673,20.25,,S
|
||||
281,0,3,"Duane, Mr. Frank",male,65,0,0,336439,7.75,,Q
|
||||
282,0,3,"Olsson, Mr. Nils Johan Goransson",male,28,0,0,347464,7.8542,,S
|
||||
283,0,3,"de Pelsmaeker, Mr. Alfons",male,16,0,0,345778,9.5,,S
|
||||
284,1,3,"Dorking, Mr. Edward Arthur",male,19,0,0,A/5. 10482,8.05,,S
|
||||
285,0,1,"Smith, Mr. Richard William",male,,0,0,113056,26,A19,S
|
||||
286,0,3,"Stankovic, Mr. Ivan",male,33,0,0,349239,8.6625,,C
|
||||
287,1,3,"de Mulder, Mr. Theodore",male,30,0,0,345774,9.5,,S
|
||||
288,0,3,"Naidenoff, Mr. Penko",male,22,0,0,349206,7.8958,,S
|
||||
289,1,2,"Hosono, Mr. Masabumi",male,42,0,0,237798,13,,S
|
||||
290,1,3,"Connolly, Miss. Kate",female,22,0,0,370373,7.75,,Q
|
||||
291,1,1,"Barber, Miss. Ellen ""Nellie""",female,26,0,0,19877,78.85,,S
|
||||
292,1,1,"Bishop, Mrs. Dickinson H (Helen Walton)",female,19,1,0,11967,91.0792,B49,C
|
||||
293,0,2,"Levy, Mr. Rene Jacques",male,36,0,0,SC/Paris 2163,12.875,D,C
|
||||
294,0,3,"Haas, Miss. Aloisia",female,24,0,0,349236,8.85,,S
|
||||
295,0,3,"Mineff, Mr. Ivan",male,24,0,0,349233,7.8958,,S
|
||||
296,0,1,"Lewy, Mr. Ervin G",male,,0,0,PC 17612,27.7208,,C
|
||||
297,0,3,"Hanna, Mr. Mansour",male,23.5,0,0,2693,7.2292,,C
|
||||
298,0,1,"Allison, Miss. Helen Loraine",female,2,1,2,113781,151.55,C22 C26,S
|
||||
299,1,1,"Saalfeld, Mr. Adolphe",male,,0,0,19988,30.5,C106,S
|
||||
300,1,1,"Baxter, Mrs. James (Helene DeLaudeniere Chaput)",female,50,0,1,PC 17558,247.5208,B58 B60,C
|
||||
301,1,3,"Kelly, Miss. Anna Katherine ""Annie Kate""",female,,0,0,9234,7.75,,Q
|
||||
302,1,3,"McCoy, Mr. Bernard",male,,2,0,367226,23.25,,Q
|
||||
303,0,3,"Johnson, Mr. William Cahoone Jr",male,19,0,0,LINE,0,,S
|
||||
304,1,2,"Keane, Miss. Nora A",female,,0,0,226593,12.35,E101,Q
|
||||
305,0,3,"Williams, Mr. Howard Hugh ""Harry""",male,,0,0,A/5 2466,8.05,,S
|
||||
306,1,1,"Allison, Master. Hudson Trevor",male,0.92,1,2,113781,151.55,C22 C26,S
|
||||
307,1,1,"Fleming, Miss. Margaret",female,,0,0,17421,110.8833,,C
|
||||
308,1,1,"Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo)",female,17,1,0,PC 17758,108.9,C65,C
|
||||
309,0,2,"Abelson, Mr. Samuel",male,30,1,0,P/PP 3381,24,,C
|
||||
310,1,1,"Francatelli, Miss. Laura Mabel",female,30,0,0,PC 17485,56.9292,E36,C
|
||||
311,1,1,"Hays, Miss. Margaret Bechstein",female,24,0,0,11767,83.1583,C54,C
|
||||
312,1,1,"Ryerson, Miss. Emily Borie",female,18,2,2,PC 17608,262.375,B57 B59 B63 B66,C
|
||||
313,0,2,"Lahtinen, Mrs. William (Anna Sylfven)",female,26,1,1,250651,26,,S
|
||||
314,0,3,"Hendekovic, Mr. Ignjac",male,28,0,0,349243,7.8958,,S
|
||||
315,0,2,"Hart, Mr. Benjamin",male,43,1,1,F.C.C. 13529,26.25,,S
|
||||
316,1,3,"Nilsson, Miss. Helmina Josefina",female,26,0,0,347470,7.8542,,S
|
||||
317,1,2,"Kantor, Mrs. Sinai (Miriam Sternin)",female,24,1,0,244367,26,,S
|
||||
318,0,2,"Moraweck, Dr. Ernest",male,54,0,0,29011,14,,S
|
||||
319,1,1,"Wick, Miss. Mary Natalie",female,31,0,2,36928,164.8667,C7,S
|
||||
320,1,1,"Spedden, Mrs. Frederic Oakley (Margaretta Corning Stone)",female,40,1,1,16966,134.5,E34,C
|
||||
321,0,3,"Dennis, Mr. Samuel",male,22,0,0,A/5 21172,7.25,,S
|
||||
322,0,3,"Danoff, Mr. Yoto",male,27,0,0,349219,7.8958,,S
|
||||
323,1,2,"Slayter, Miss. Hilda Mary",female,30,0,0,234818,12.35,,Q
|
||||
324,1,2,"Caldwell, Mrs. Albert Francis (Sylvia Mae Harbaugh)",female,22,1,1,248738,29,,S
|
||||
325,0,3,"Sage, Mr. George John Jr",male,,8,2,CA. 2343,69.55,,S
|
||||
326,1,1,"Young, Miss. Marie Grice",female,36,0,0,PC 17760,135.6333,C32,C
|
||||
327,0,3,"Nysveen, Mr. Johan Hansen",male,61,0,0,345364,6.2375,,S
|
||||
328,1,2,"Ball, Mrs. (Ada E Hall)",female,36,0,0,28551,13,D,S
|
||||
329,1,3,"Goldsmith, Mrs. Frank John (Emily Alice Brown)",female,31,1,1,363291,20.525,,S
|
||||
330,1,1,"Hippach, Miss. Jean Gertrude",female,16,0,1,111361,57.9792,B18,C
|
||||
331,1,3,"McCoy, Miss. Agnes",female,,2,0,367226,23.25,,Q
|
||||
332,0,1,"Partner, Mr. Austen",male,45.5,0,0,113043,28.5,C124,S
|
||||
333,0,1,"Graham, Mr. George Edward",male,38,0,1,PC 17582,153.4625,C91,S
|
||||
334,0,3,"Vander Planke, Mr. Leo Edmondus",male,16,2,0,345764,18,,S
|
||||
335,1,1,"Frauenthal, Mrs. Henry William (Clara Heinsheimer)",female,,1,0,PC 17611,133.65,,S
|
||||
336,0,3,"Denkoff, Mr. Mitto",male,,0,0,349225,7.8958,,S
|
||||
337,0,1,"Pears, Mr. Thomas Clinton",male,29,1,0,113776,66.6,C2,S
|
||||
338,1,1,"Burns, Miss. Elizabeth Margaret",female,41,0,0,16966,134.5,E40,C
|
||||
339,1,3,"Dahl, Mr. Karl Edwart",male,45,0,0,7598,8.05,,S
|
||||
340,0,1,"Blackwell, Mr. Stephen Weart",male,45,0,0,113784,35.5,T,S
|
||||
341,1,2,"Navratil, Master. Edmond Roger",male,2,1,1,230080,26,F2,S
|
||||
342,1,1,"Fortune, Miss. Alice Elizabeth",female,24,3,2,19950,263,C23 C25 C27,S
|
||||
343,0,2,"Collander, Mr. Erik Gustaf",male,28,0,0,248740,13,,S
|
||||
344,0,2,"Sedgwick, Mr. Charles Frederick Waddington",male,25,0,0,244361,13,,S
|
||||
345,0,2,"Fox, Mr. Stanley Hubert",male,36,0,0,229236,13,,S
|
||||
346,1,2,"Brown, Miss. Amelia ""Mildred""",female,24,0,0,248733,13,F33,S
|
||||
347,1,2,"Smith, Miss. Marion Elsie",female,40,0,0,31418,13,,S
|
||||
348,1,3,"Davison, Mrs. Thomas Henry (Mary E Finck)",female,,1,0,386525,16.1,,S
|
||||
349,1,3,"Coutts, Master. William Loch ""William""",male,3,1,1,C.A. 37671,15.9,,S
|
||||
350,0,3,"Dimic, Mr. Jovan",male,42,0,0,315088,8.6625,,S
|
||||
351,0,3,"Odahl, Mr. Nils Martin",male,23,0,0,7267,9.225,,S
|
||||
352,0,1,"Williams-Lambert, Mr. Fletcher Fellows",male,,0,0,113510,35,C128,S
|
||||
353,0,3,"Elias, Mr. Tannous",male,15,1,1,2695,7.2292,,C
|
||||
354,0,3,"Arnold-Franchi, Mr. Josef",male,25,1,0,349237,17.8,,S
|
||||
355,0,3,"Yousif, Mr. Wazli",male,,0,0,2647,7.225,,C
|
||||
356,0,3,"Vanden Steen, Mr. Leo Peter",male,28,0,0,345783,9.5,,S
|
||||
357,1,1,"Bowerman, Miss. Elsie Edith",female,22,0,1,113505,55,E33,S
|
||||
358,0,2,"Funk, Miss. Annie Clemmer",female,38,0,0,237671,13,,S
|
||||
359,1,3,"McGovern, Miss. Mary",female,,0,0,330931,7.8792,,Q
|
||||
360,1,3,"Mockler, Miss. Helen Mary ""Ellie""",female,,0,0,330980,7.8792,,Q
|
||||
361,0,3,"Skoog, Mr. Wilhelm",male,40,1,4,347088,27.9,,S
|
||||
362,0,2,"del Carlo, Mr. Sebastiano",male,29,1,0,SC/PARIS 2167,27.7208,,C
|
||||
363,0,3,"Barbara, Mrs. (Catherine David)",female,45,0,1,2691,14.4542,,C
|
||||
364,0,3,"Asim, Mr. Adola",male,35,0,0,SOTON/O.Q. 3101310,7.05,,S
|
||||
365,0,3,"O'Brien, Mr. Thomas",male,,1,0,370365,15.5,,Q
|
||||
366,0,3,"Adahl, Mr. Mauritz Nils Martin",male,30,0,0,C 7076,7.25,,S
|
||||
367,1,1,"Warren, Mrs. Frank Manley (Anna Sophia Atkinson)",female,60,1,0,110813,75.25,D37,C
|
||||
368,1,3,"Moussa, Mrs. (Mantoura Boulos)",female,,0,0,2626,7.2292,,C
|
||||
369,1,3,"Jermyn, Miss. Annie",female,,0,0,14313,7.75,,Q
|
||||
370,1,1,"Aubart, Mme. Leontine Pauline",female,24,0,0,PC 17477,69.3,B35,C
|
||||
371,1,1,"Harder, Mr. George Achilles",male,25,1,0,11765,55.4417,E50,C
|
||||
372,0,3,"Wiklund, Mr. Jakob Alfred",male,18,1,0,3101267,6.4958,,S
|
||||
373,0,3,"Beavan, Mr. William Thomas",male,19,0,0,323951,8.05,,S
|
||||
374,0,1,"Ringhini, Mr. Sante",male,22,0,0,PC 17760,135.6333,,C
|
||||
375,0,3,"Palsson, Miss. Stina Viola",female,3,3,1,349909,21.075,,S
|
||||
376,1,1,"Meyer, Mrs. Edgar Joseph (Leila Saks)",female,,1,0,PC 17604,82.1708,,C
|
||||
377,1,3,"Landergren, Miss. Aurora Adelia",female,22,0,0,C 7077,7.25,,S
|
||||
378,0,1,"Widener, Mr. Harry Elkins",male,27,0,2,113503,211.5,C82,C
|
||||
379,0,3,"Betros, Mr. Tannous",male,20,0,0,2648,4.0125,,C
|
||||
380,0,3,"Gustafsson, Mr. Karl Gideon",male,19,0,0,347069,7.775,,S
|
||||
381,1,1,"Bidois, Miss. Rosalie",female,42,0,0,PC 17757,227.525,,C
|
||||
382,1,3,"Nakid, Miss. Maria (""Mary"")",female,1,0,2,2653,15.7417,,C
|
||||
383,0,3,"Tikkanen, Mr. Juho",male,32,0,0,STON/O 2. 3101293,7.925,,S
|
||||
384,1,1,"Holverson, Mrs. Alexander Oskar (Mary Aline Towner)",female,35,1,0,113789,52,,S
|
||||
385,0,3,"Plotcharsky, Mr. Vasil",male,,0,0,349227,7.8958,,S
|
||||
386,0,2,"Davies, Mr. Charles Henry",male,18,0,0,S.O.C. 14879,73.5,,S
|
||||
387,0,3,"Goodwin, Master. Sidney Leonard",male,1,5,2,CA 2144,46.9,,S
|
||||
388,1,2,"Buss, Miss. Kate",female,36,0,0,27849,13,,S
|
||||
389,0,3,"Sadlier, Mr. Matthew",male,,0,0,367655,7.7292,,Q
|
||||
390,1,2,"Lehmann, Miss. Bertha",female,17,0,0,SC 1748,12,,C
|
||||
391,1,1,"Carter, Mr. William Ernest",male,36,1,2,113760,120,B96 B98,S
|
||||
392,1,3,"Jansson, Mr. Carl Olof",male,21,0,0,350034,7.7958,,S
|
||||
393,0,3,"Gustafsson, Mr. Johan Birger",male,28,2,0,3101277,7.925,,S
|
||||
394,1,1,"Newell, Miss. Marjorie",female,23,1,0,35273,113.275,D36,C
|
||||
395,1,3,"Sandstrom, Mrs. Hjalmar (Agnes Charlotta Bengtsson)",female,24,0,2,PP 9549,16.7,G6,S
|
||||
396,0,3,"Johansson, Mr. Erik",male,22,0,0,350052,7.7958,,S
|
||||
397,0,3,"Olsson, Miss. Elina",female,31,0,0,350407,7.8542,,S
|
||||
398,0,2,"McKane, Mr. Peter David",male,46,0,0,28403,26,,S
|
||||
399,0,2,"Pain, Dr. Alfred",male,23,0,0,244278,10.5,,S
|
||||
400,1,2,"Trout, Mrs. William H (Jessie L)",female,28,0,0,240929,12.65,,S
|
||||
401,1,3,"Niskanen, Mr. Juha",male,39,0,0,STON/O 2. 3101289,7.925,,S
|
||||
402,0,3,"Adams, Mr. John",male,26,0,0,341826,8.05,,S
|
||||
403,0,3,"Jussila, Miss. Mari Aina",female,21,1,0,4137,9.825,,S
|
||||
404,0,3,"Hakkarainen, Mr. Pekka Pietari",male,28,1,0,STON/O2. 3101279,15.85,,S
|
||||
405,0,3,"Oreskovic, Miss. Marija",female,20,0,0,315096,8.6625,,S
|
||||
406,0,2,"Gale, Mr. Shadrach",male,34,1,0,28664,21,,S
|
||||
407,0,3,"Widegren, Mr. Carl/Charles Peter",male,51,0,0,347064,7.75,,S
|
||||
408,1,2,"Richards, Master. William Rowe",male,3,1,1,29106,18.75,,S
|
||||
409,0,3,"Birkeland, Mr. Hans Martin Monsen",male,21,0,0,312992,7.775,,S
|
||||
410,0,3,"Lefebre, Miss. Ida",female,,3,1,4133,25.4667,,S
|
||||
411,0,3,"Sdycoff, Mr. Todor",male,,0,0,349222,7.8958,,S
|
||||
412,0,3,"Hart, Mr. Henry",male,,0,0,394140,6.8583,,Q
|
||||
413,1,1,"Minahan, Miss. Daisy E",female,33,1,0,19928,90,C78,Q
|
||||
414,0,2,"Cunningham, Mr. Alfred Fleming",male,,0,0,239853,0,,S
|
||||
415,1,3,"Sundman, Mr. Johan Julian",male,44,0,0,STON/O 2. 3101269,7.925,,S
|
||||
416,0,3,"Meek, Mrs. Thomas (Annie Louise Rowley)",female,,0,0,343095,8.05,,S
|
||||
417,1,2,"Drew, Mrs. James Vivian (Lulu Thorne Christian)",female,34,1,1,28220,32.5,,S
|
||||
418,1,2,"Silven, Miss. Lyyli Karoliina",female,18,0,2,250652,13,,S
|
||||
419,0,2,"Matthews, Mr. William John",male,30,0,0,28228,13,,S
|
||||
420,0,3,"Van Impe, Miss. Catharina",female,10,0,2,345773,24.15,,S
|
||||
421,0,3,"Gheorgheff, Mr. Stanio",male,,0,0,349254,7.8958,,C
|
||||
422,0,3,"Charters, Mr. David",male,21,0,0,A/5. 13032,7.7333,,Q
|
||||
423,0,3,"Zimmerman, Mr. Leo",male,29,0,0,315082,7.875,,S
|
||||
424,0,3,"Danbom, Mrs. Ernst Gilbert (Anna Sigrid Maria Brogren)",female,28,1,1,347080,14.4,,S
|
||||
425,0,3,"Rosblom, Mr. Viktor Richard",male,18,1,1,370129,20.2125,,S
|
||||
426,0,3,"Wiseman, Mr. Phillippe",male,,0,0,A/4. 34244,7.25,,S
|
||||
427,1,2,"Clarke, Mrs. Charles V (Ada Maria Winfield)",female,28,1,0,2003,26,,S
|
||||
428,1,2,"Phillips, Miss. Kate Florence (""Mrs Kate Louise Phillips Marshall"")",female,19,0,0,250655,26,,S
|
||||
429,0,3,"Flynn, Mr. James",male,,0,0,364851,7.75,,Q
|
||||
430,1,3,"Pickard, Mr. Berk (Berk Trembisky)",male,32,0,0,SOTON/O.Q. 392078,8.05,E10,S
|
||||
431,1,1,"Bjornstrom-Steffansson, Mr. Mauritz Hakan",male,28,0,0,110564,26.55,C52,S
|
||||
432,1,3,"Thorneycroft, Mrs. Percival (Florence Kate White)",female,,1,0,376564,16.1,,S
|
||||
433,1,2,"Louch, Mrs. Charles Alexander (Alice Adelaide Slow)",female,42,1,0,SC/AH 3085,26,,S
|
||||
434,0,3,"Kallio, Mr. Nikolai Erland",male,17,0,0,STON/O 2. 3101274,7.125,,S
|
||||
435,0,1,"Silvey, Mr. William Baird",male,50,1,0,13507,55.9,E44,S
|
||||
436,1,1,"Carter, Miss. Lucile Polk",female,14,1,2,113760,120,B96 B98,S
|
||||
437,0,3,"Ford, Miss. Doolina Margaret ""Daisy""",female,21,2,2,W./C. 6608,34.375,,S
|
||||
438,1,2,"Richards, Mrs. Sidney (Emily Hocking)",female,24,2,3,29106,18.75,,S
|
||||
439,0,1,"Fortune, Mr. Mark",male,64,1,4,19950,263,C23 C25 C27,S
|
||||
440,0,2,"Kvillner, Mr. Johan Henrik Johannesson",male,31,0,0,C.A. 18723,10.5,,S
|
||||
441,1,2,"Hart, Mrs. Benjamin (Esther Ada Bloomfield)",female,45,1,1,F.C.C. 13529,26.25,,S
|
||||
442,0,3,"Hampe, Mr. Leon",male,20,0,0,345769,9.5,,S
|
||||
443,0,3,"Petterson, Mr. Johan Emil",male,25,1,0,347076,7.775,,S
|
||||
444,1,2,"Reynaldo, Ms. Encarnacion",female,28,0,0,230434,13,,S
|
||||
445,1,3,"Johannesen-Bratthammer, Mr. Bernt",male,,0,0,65306,8.1125,,S
|
||||
446,1,1,"Dodge, Master. Washington",male,4,0,2,33638,81.8583,A34,S
|
||||
447,1,2,"Mellinger, Miss. Madeleine Violet",female,13,0,1,250644,19.5,,S
|
||||
448,1,1,"Seward, Mr. Frederic Kimber",male,34,0,0,113794,26.55,,S
|
||||
449,1,3,"Baclini, Miss. Marie Catherine",female,5,2,1,2666,19.2583,,C
|
||||
450,1,1,"Peuchen, Major. Arthur Godfrey",male,52,0,0,113786,30.5,C104,S
|
||||
451,0,2,"West, Mr. Edwy Arthur",male,36,1,2,C.A. 34651,27.75,,S
|
||||
452,0,3,"Hagland, Mr. Ingvald Olai Olsen",male,,1,0,65303,19.9667,,S
|
||||
453,0,1,"Foreman, Mr. Benjamin Laventall",male,30,0,0,113051,27.75,C111,C
|
||||
454,1,1,"Goldenberg, Mr. Samuel L",male,49,1,0,17453,89.1042,C92,C
|
||||
455,0,3,"Peduzzi, Mr. Joseph",male,,0,0,A/5 2817,8.05,,S
|
||||
456,1,3,"Jalsevac, Mr. Ivan",male,29,0,0,349240,7.8958,,C
|
||||
457,0,1,"Millet, Mr. Francis Davis",male,65,0,0,13509,26.55,E38,S
|
||||
458,1,1,"Kenyon, Mrs. Frederick R (Marion)",female,,1,0,17464,51.8625,D21,S
|
||||
459,1,2,"Toomey, Miss. Ellen",female,50,0,0,F.C.C. 13531,10.5,,S
|
||||
460,0,3,"O'Connor, Mr. Maurice",male,,0,0,371060,7.75,,Q
|
||||
461,1,1,"Anderson, Mr. Harry",male,48,0,0,19952,26.55,E12,S
|
||||
462,0,3,"Morley, Mr. William",male,34,0,0,364506,8.05,,S
|
||||
463,0,1,"Gee, Mr. Arthur H",male,47,0,0,111320,38.5,E63,S
|
||||
464,0,2,"Milling, Mr. Jacob Christian",male,48,0,0,234360,13,,S
|
||||
465,0,3,"Maisner, Mr. Simon",male,,0,0,A/S 2816,8.05,,S
|
||||
466,0,3,"Goncalves, Mr. Manuel Estanslas",male,38,0,0,SOTON/O.Q. 3101306,7.05,,S
|
||||
467,0,2,"Campbell, Mr. William",male,,0,0,239853,0,,S
|
||||
468,0,1,"Smart, Mr. John Montgomery",male,56,0,0,113792,26.55,,S
|
||||
469,0,3,"Scanlan, Mr. James",male,,0,0,36209,7.725,,Q
|
||||
470,1,3,"Baclini, Miss. Helene Barbara",female,0.75,2,1,2666,19.2583,,C
|
||||
471,0,3,"Keefe, Mr. Arthur",male,,0,0,323592,7.25,,S
|
||||
472,0,3,"Cacic, Mr. Luka",male,38,0,0,315089,8.6625,,S
|
||||
473,1,2,"West, Mrs. Edwy Arthur (Ada Mary Worth)",female,33,1,2,C.A. 34651,27.75,,S
|
||||
474,1,2,"Jerwan, Mrs. Amin S (Marie Marthe Thuillard)",female,23,0,0,SC/AH Basle 541,13.7917,D,C
|
||||
475,0,3,"Strandberg, Miss. Ida Sofia",female,22,0,0,7553,9.8375,,S
|
||||
476,0,1,"Clifford, Mr. George Quincy",male,,0,0,110465,52,A14,S
|
||||
477,0,2,"Renouf, Mr. Peter Henry",male,34,1,0,31027,21,,S
|
||||
478,0,3,"Braund, Mr. Lewis Richard",male,29,1,0,3460,7.0458,,S
|
||||
479,0,3,"Karlsson, Mr. Nils August",male,22,0,0,350060,7.5208,,S
|
||||
480,1,3,"Hirvonen, Miss. Hildur E",female,2,0,1,3101298,12.2875,,S
|
||||
481,0,3,"Goodwin, Master. Harold Victor",male,9,5,2,CA 2144,46.9,,S
|
||||
482,0,2,"Frost, Mr. Anthony Wood ""Archie""",male,,0,0,239854,0,,S
|
||||
483,0,3,"Rouse, Mr. Richard Henry",male,50,0,0,A/5 3594,8.05,,S
|
||||
484,1,3,"Turkula, Mrs. (Hedwig)",female,63,0,0,4134,9.5875,,S
|
||||
485,1,1,"Bishop, Mr. Dickinson H",male,25,1,0,11967,91.0792,B49,C
|
||||
486,0,3,"Lefebre, Miss. Jeannie",female,,3,1,4133,25.4667,,S
|
||||
487,1,1,"Hoyt, Mrs. Frederick Maxfield (Jane Anne Forby)",female,35,1,0,19943,90,C93,S
|
||||
488,0,1,"Kent, Mr. Edward Austin",male,58,0,0,11771,29.7,B37,C
|
||||
489,0,3,"Somerton, Mr. Francis William",male,30,0,0,A.5. 18509,8.05,,S
|
||||
490,1,3,"Coutts, Master. Eden Leslie ""Neville""",male,9,1,1,C.A. 37671,15.9,,S
|
||||
491,0,3,"Hagland, Mr. Konrad Mathias Reiersen",male,,1,0,65304,19.9667,,S
|
||||
492,0,3,"Windelov, Mr. Einar",male,21,0,0,SOTON/OQ 3101317,7.25,,S
|
||||
493,0,1,"Molson, Mr. Harry Markland",male,55,0,0,113787,30.5,C30,S
|
||||
494,0,1,"Artagaveytia, Mr. Ramon",male,71,0,0,PC 17609,49.5042,,C
|
||||
495,0,3,"Stanley, Mr. Edward Roland",male,21,0,0,A/4 45380,8.05,,S
|
||||
496,0,3,"Yousseff, Mr. Gerious",male,,0,0,2627,14.4583,,C
|
||||
497,1,1,"Eustis, Miss. Elizabeth Mussey",female,54,1,0,36947,78.2667,D20,C
|
||||
498,0,3,"Shellard, Mr. Frederick William",male,,0,0,C.A. 6212,15.1,,S
|
||||
499,0,1,"Allison, Mrs. Hudson J C (Bessie Waldo Daniels)",female,25,1,2,113781,151.55,C22 C26,S
|
||||
500,0,3,"Svensson, Mr. Olof",male,24,0,0,350035,7.7958,,S
|
||||
501,0,3,"Calic, Mr. Petar",male,17,0,0,315086,8.6625,,S
|
||||
502,0,3,"Canavan, Miss. Mary",female,21,0,0,364846,7.75,,Q
|
||||
503,0,3,"O'Sullivan, Miss. Bridget Mary",female,,0,0,330909,7.6292,,Q
|
||||
504,0,3,"Laitinen, Miss. Kristina Sofia",female,37,0,0,4135,9.5875,,S
|
||||
505,1,1,"Maioni, Miss. Roberta",female,16,0,0,110152,86.5,B79,S
|
||||
506,0,1,"Penasco y Castellana, Mr. Victor de Satode",male,18,1,0,PC 17758,108.9,C65,C
|
||||
507,1,2,"Quick, Mrs. Frederick Charles (Jane Richards)",female,33,0,2,26360,26,,S
|
||||
508,1,1,"Bradley, Mr. George (""George Arthur Brayton"")",male,,0,0,111427,26.55,,S
|
||||
509,0,3,"Olsen, Mr. Henry Margido",male,28,0,0,C 4001,22.525,,S
|
||||
510,1,3,"Lang, Mr. Fang",male,26,0,0,1601,56.4958,,S
|
||||
511,1,3,"Daly, Mr. Eugene Patrick",male,29,0,0,382651,7.75,,Q
|
||||
512,0,3,"Webber, Mr. James",male,,0,0,SOTON/OQ 3101316,8.05,,S
|
||||
513,1,1,"McGough, Mr. James Robert",male,36,0,0,PC 17473,26.2875,E25,S
|
||||
514,1,1,"Rothschild, Mrs. Martin (Elizabeth L. Barrett)",female,54,1,0,PC 17603,59.4,,C
|
||||
515,0,3,"Coleff, Mr. Satio",male,24,0,0,349209,7.4958,,S
|
||||
516,0,1,"Walker, Mr. William Anderson",male,47,0,0,36967,34.0208,D46,S
|
||||
517,1,2,"Lemore, Mrs. (Amelia Milley)",female,34,0,0,C.A. 34260,10.5,F33,S
|
||||
518,0,3,"Ryan, Mr. Patrick",male,,0,0,371110,24.15,,Q
|
||||
519,1,2,"Angle, Mrs. William A (Florence ""Mary"" Agnes Hughes)",female,36,1,0,226875,26,,S
|
||||
520,0,3,"Pavlovic, Mr. Stefo",male,32,0,0,349242,7.8958,,S
|
||||
521,1,1,"Perreault, Miss. Anne",female,30,0,0,12749,93.5,B73,S
|
||||
522,0,3,"Vovk, Mr. Janko",male,22,0,0,349252,7.8958,,S
|
||||
523,0,3,"Lahoud, Mr. Sarkis",male,,0,0,2624,7.225,,C
|
||||
524,1,1,"Hippach, Mrs. Louis Albert (Ida Sophia Fischer)",female,44,0,1,111361,57.9792,B18,C
|
||||
525,0,3,"Kassem, Mr. Fared",male,,0,0,2700,7.2292,,C
|
||||
526,0,3,"Farrell, Mr. James",male,40.5,0,0,367232,7.75,,Q
|
||||
527,1,2,"Ridsdale, Miss. Lucy",female,50,0,0,W./C. 14258,10.5,,S
|
||||
528,0,1,"Farthing, Mr. John",male,,0,0,PC 17483,221.7792,C95,S
|
||||
529,0,3,"Salonen, Mr. Johan Werner",male,39,0,0,3101296,7.925,,S
|
||||
530,0,2,"Hocking, Mr. Richard George",male,23,2,1,29104,11.5,,S
|
||||
531,1,2,"Quick, Miss. Phyllis May",female,2,1,1,26360,26,,S
|
||||
532,0,3,"Toufik, Mr. Nakli",male,,0,0,2641,7.2292,,C
|
||||
533,0,3,"Elias, Mr. Joseph Jr",male,17,1,1,2690,7.2292,,C
|
||||
534,1,3,"Peter, Mrs. Catherine (Catherine Rizk)",female,,0,2,2668,22.3583,,C
|
||||
535,0,3,"Cacic, Miss. Marija",female,30,0,0,315084,8.6625,,S
|
||||
536,1,2,"Hart, Miss. Eva Miriam",female,7,0,2,F.C.C. 13529,26.25,,S
|
||||
537,0,1,"Butt, Major. Archibald Willingham",male,45,0,0,113050,26.55,B38,S
|
||||
538,1,1,"LeRoy, Miss. Bertha",female,30,0,0,PC 17761,106.425,,C
|
||||
539,0,3,"Risien, Mr. Samuel Beard",male,,0,0,364498,14.5,,S
|
||||
540,1,1,"Frolicher, Miss. Hedwig Margaritha",female,22,0,2,13568,49.5,B39,C
|
||||
541,1,1,"Crosby, Miss. Harriet R",female,36,0,2,WE/P 5735,71,B22,S
|
||||
542,0,3,"Andersson, Miss. Ingeborg Constanzia",female,9,4,2,347082,31.275,,S
|
||||
543,0,3,"Andersson, Miss. Sigrid Elisabeth",female,11,4,2,347082,31.275,,S
|
||||
544,1,2,"Beane, Mr. Edward",male,32,1,0,2908,26,,S
|
||||
545,0,1,"Douglas, Mr. Walter Donald",male,50,1,0,PC 17761,106.425,C86,C
|
||||
546,0,1,"Nicholson, Mr. Arthur Ernest",male,64,0,0,693,26,,S
|
||||
547,1,2,"Beane, Mrs. Edward (Ethel Clarke)",female,19,1,0,2908,26,,S
|
||||
548,1,2,"Padro y Manent, Mr. Julian",male,,0,0,SC/PARIS 2146,13.8625,,C
|
||||
549,0,3,"Goldsmith, Mr. Frank John",male,33,1,1,363291,20.525,,S
|
||||
550,1,2,"Davies, Master. John Morgan Jr",male,8,1,1,C.A. 33112,36.75,,S
|
||||
551,1,1,"Thayer, Mr. John Borland Jr",male,17,0,2,17421,110.8833,C70,C
|
||||
552,0,2,"Sharp, Mr. Percival James R",male,27,0,0,244358,26,,S
|
||||
553,0,3,"O'Brien, Mr. Timothy",male,,0,0,330979,7.8292,,Q
|
||||
554,1,3,"Leeni, Mr. Fahim (""Philip Zenni"")",male,22,0,0,2620,7.225,,C
|
||||
555,1,3,"Ohman, Miss. Velin",female,22,0,0,347085,7.775,,S
|
||||
556,0,1,"Wright, Mr. George",male,62,0,0,113807,26.55,,S
|
||||
557,1,1,"Duff Gordon, Lady. (Lucille Christiana Sutherland) (""Mrs Morgan"")",female,48,1,0,11755,39.6,A16,C
|
||||
558,0,1,"Robbins, Mr. Victor",male,,0,0,PC 17757,227.525,,C
|
||||
559,1,1,"Taussig, Mrs. Emil (Tillie Mandelbaum)",female,39,1,1,110413,79.65,E67,S
|
||||
560,1,3,"de Messemaeker, Mrs. Guillaume Joseph (Emma)",female,36,1,0,345572,17.4,,S
|
||||
561,0,3,"Morrow, Mr. Thomas Rowan",male,,0,0,372622,7.75,,Q
|
||||
562,0,3,"Sivic, Mr. Husein",male,40,0,0,349251,7.8958,,S
|
||||
563,0,2,"Norman, Mr. Robert Douglas",male,28,0,0,218629,13.5,,S
|
||||
564,0,3,"Simmons, Mr. John",male,,0,0,SOTON/OQ 392082,8.05,,S
|
||||
565,0,3,"Meanwell, Miss. (Marion Ogden)",female,,0,0,SOTON/O.Q. 392087,8.05,,S
|
||||
566,0,3,"Davies, Mr. Alfred J",male,24,2,0,A/4 48871,24.15,,S
|
||||
567,0,3,"Stoytcheff, Mr. Ilia",male,19,0,0,349205,7.8958,,S
|
||||
568,0,3,"Palsson, Mrs. Nils (Alma Cornelia Berglund)",female,29,0,4,349909,21.075,,S
|
||||
569,0,3,"Doharr, Mr. Tannous",male,,0,0,2686,7.2292,,C
|
||||
570,1,3,"Jonsson, Mr. Carl",male,32,0,0,350417,7.8542,,S
|
||||
571,1,2,"Harris, Mr. George",male,62,0,0,S.W./PP 752,10.5,,S
|
||||
572,1,1,"Appleton, Mrs. Edward Dale (Charlotte Lamson)",female,53,2,0,11769,51.4792,C101,S
|
||||
573,1,1,"Flynn, Mr. John Irwin (""Irving"")",male,36,0,0,PC 17474,26.3875,E25,S
|
||||
574,1,3,"Kelly, Miss. Mary",female,,0,0,14312,7.75,,Q
|
||||
575,0,3,"Rush, Mr. Alfred George John",male,16,0,0,A/4. 20589,8.05,,S
|
||||
576,0,3,"Patchett, Mr. George",male,19,0,0,358585,14.5,,S
|
||||
577,1,2,"Garside, Miss. Ethel",female,34,0,0,243880,13,,S
|
||||
578,1,1,"Silvey, Mrs. William Baird (Alice Munger)",female,39,1,0,13507,55.9,E44,S
|
||||
579,0,3,"Caram, Mrs. Joseph (Maria Elias)",female,,1,0,2689,14.4583,,C
|
||||
580,1,3,"Jussila, Mr. Eiriik",male,32,0,0,STON/O 2. 3101286,7.925,,S
|
||||
581,1,2,"Christy, Miss. Julie Rachel",female,25,1,1,237789,30,,S
|
||||
582,1,1,"Thayer, Mrs. John Borland (Marian Longstreth Morris)",female,39,1,1,17421,110.8833,C68,C
|
||||
583,0,2,"Downton, Mr. William James",male,54,0,0,28403,26,,S
|
||||
584,0,1,"Ross, Mr. John Hugo",male,36,0,0,13049,40.125,A10,C
|
||||
585,0,3,"Paulner, Mr. Uscher",male,,0,0,3411,8.7125,,C
|
||||
586,1,1,"Taussig, Miss. Ruth",female,18,0,2,110413,79.65,E68,S
|
||||
587,0,2,"Jarvis, Mr. John Denzil",male,47,0,0,237565,15,,S
|
||||
588,1,1,"Frolicher-Stehli, Mr. Maxmillian",male,60,1,1,13567,79.2,B41,C
|
||||
589,0,3,"Gilinski, Mr. Eliezer",male,22,0,0,14973,8.05,,S
|
||||
590,0,3,"Murdlin, Mr. Joseph",male,,0,0,A./5. 3235,8.05,,S
|
||||
591,0,3,"Rintamaki, Mr. Matti",male,35,0,0,STON/O 2. 3101273,7.125,,S
|
||||
592,1,1,"Stephenson, Mrs. Walter Bertram (Martha Eustis)",female,52,1,0,36947,78.2667,D20,C
|
||||
593,0,3,"Elsbury, Mr. William James",male,47,0,0,A/5 3902,7.25,,S
|
||||
594,0,3,"Bourke, Miss. Mary",female,,0,2,364848,7.75,,Q
|
||||
595,0,2,"Chapman, Mr. John Henry",male,37,1,0,SC/AH 29037,26,,S
|
||||
596,0,3,"Van Impe, Mr. Jean Baptiste",male,36,1,1,345773,24.15,,S
|
||||
597,1,2,"Leitch, Miss. Jessie Wills",female,,0,0,248727,33,,S
|
||||
598,0,3,"Johnson, Mr. Alfred",male,49,0,0,LINE,0,,S
|
||||
599,0,3,"Boulos, Mr. Hanna",male,,0,0,2664,7.225,,C
|
||||
600,1,1,"Duff Gordon, Sir. Cosmo Edmund (""Mr Morgan"")",male,49,1,0,PC 17485,56.9292,A20,C
|
||||
601,1,2,"Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy)",female,24,2,1,243847,27,,S
|
||||
602,0,3,"Slabenoff, Mr. Petco",male,,0,0,349214,7.8958,,S
|
||||
603,0,1,"Harrington, Mr. Charles H",male,,0,0,113796,42.4,,S
|
||||
604,0,3,"Torber, Mr. Ernst William",male,44,0,0,364511,8.05,,S
|
||||
605,1,1,"Homer, Mr. Harry (""Mr E Haven"")",male,35,0,0,111426,26.55,,C
|
||||
606,0,3,"Lindell, Mr. Edvard Bengtsson",male,36,1,0,349910,15.55,,S
|
||||
607,0,3,"Karaic, Mr. Milan",male,30,0,0,349246,7.8958,,S
|
||||
608,1,1,"Daniel, Mr. Robert Williams",male,27,0,0,113804,30.5,,S
|
||||
609,1,2,"Laroche, Mrs. Joseph (Juliette Marie Louise Lafargue)",female,22,1,2,SC/Paris 2123,41.5792,,C
|
||||
610,1,1,"Shutes, Miss. Elizabeth W",female,40,0,0,PC 17582,153.4625,C125,S
|
||||
611,0,3,"Andersson, Mrs. Anders Johan (Alfrida Konstantia Brogren)",female,39,1,5,347082,31.275,,S
|
||||
612,0,3,"Jardin, Mr. Jose Neto",male,,0,0,SOTON/O.Q. 3101305,7.05,,S
|
||||
613,1,3,"Murphy, Miss. Margaret Jane",female,,1,0,367230,15.5,,Q
|
||||
614,0,3,"Horgan, Mr. John",male,,0,0,370377,7.75,,Q
|
||||
615,0,3,"Brocklebank, Mr. William Alfred",male,35,0,0,364512,8.05,,S
|
||||
616,1,2,"Herman, Miss. Alice",female,24,1,2,220845,65,,S
|
||||
617,0,3,"Danbom, Mr. Ernst Gilbert",male,34,1,1,347080,14.4,,S
|
||||
618,0,3,"Lobb, Mrs. William Arthur (Cordelia K Stanlick)",female,26,1,0,A/5. 3336,16.1,,S
|
||||
619,1,2,"Becker, Miss. Marion Louise",female,4,2,1,230136,39,F4,S
|
||||
620,0,2,"Gavey, Mr. Lawrence",male,26,0,0,31028,10.5,,S
|
||||
621,0,3,"Yasbeck, Mr. Antoni",male,27,1,0,2659,14.4542,,C
|
||||
622,1,1,"Kimball, Mr. Edwin Nelson Jr",male,42,1,0,11753,52.5542,D19,S
|
||||
623,1,3,"Nakid, Mr. Sahid",male,20,1,1,2653,15.7417,,C
|
||||
624,0,3,"Hansen, Mr. Henry Damsgaard",male,21,0,0,350029,7.8542,,S
|
||||
625,0,3,"Bowen, Mr. David John ""Dai""",male,21,0,0,54636,16.1,,S
|
||||
626,0,1,"Sutton, Mr. Frederick",male,61,0,0,36963,32.3208,D50,S
|
||||
627,0,2,"Kirkland, Rev. Charles Leonard",male,57,0,0,219533,12.35,,Q
|
||||
628,1,1,"Longley, Miss. Gretchen Fiske",female,21,0,0,13502,77.9583,D9,S
|
||||
629,0,3,"Bostandyeff, Mr. Guentcho",male,26,0,0,349224,7.8958,,S
|
||||
630,0,3,"O'Connell, Mr. Patrick D",male,,0,0,334912,7.7333,,Q
|
||||
631,1,1,"Barkworth, Mr. Algernon Henry Wilson",male,80,0,0,27042,30,A23,S
|
||||
632,0,3,"Lundahl, Mr. Johan Svensson",male,51,0,0,347743,7.0542,,S
|
||||
633,1,1,"Stahelin-Maeglin, Dr. Max",male,32,0,0,13214,30.5,B50,C
|
||||
634,0,1,"Parr, Mr. William Henry Marsh",male,,0,0,112052,0,,S
|
||||
635,0,3,"Skoog, Miss. Mabel",female,9,3,2,347088,27.9,,S
|
||||
636,1,2,"Davis, Miss. Mary",female,28,0,0,237668,13,,S
|
||||
637,0,3,"Leinonen, Mr. Antti Gustaf",male,32,0,0,STON/O 2. 3101292,7.925,,S
|
||||
638,0,2,"Collyer, Mr. Harvey",male,31,1,1,C.A. 31921,26.25,,S
|
||||
639,0,3,"Panula, Mrs. Juha (Maria Emilia Ojala)",female,41,0,5,3101295,39.6875,,S
|
||||
640,0,3,"Thorneycroft, Mr. Percival",male,,1,0,376564,16.1,,S
|
||||
641,0,3,"Jensen, Mr. Hans Peder",male,20,0,0,350050,7.8542,,S
|
||||
642,1,1,"Sagesser, Mlle. Emma",female,24,0,0,PC 17477,69.3,B35,C
|
||||
643,0,3,"Skoog, Miss. Margit Elizabeth",female,2,3,2,347088,27.9,,S
|
||||
644,1,3,"Foo, Mr. Choong",male,,0,0,1601,56.4958,,S
|
||||
645,1,3,"Baclini, Miss. Eugenie",female,0.75,2,1,2666,19.2583,,C
|
||||
646,1,1,"Harper, Mr. Henry Sleeper",male,48,1,0,PC 17572,76.7292,D33,C
|
||||
647,0,3,"Cor, Mr. Liudevit",male,19,0,0,349231,7.8958,,S
|
||||
648,1,1,"Simonius-Blumer, Col. Oberst Alfons",male,56,0,0,13213,35.5,A26,C
|
||||
649,0,3,"Willey, Mr. Edward",male,,0,0,S.O./P.P. 751,7.55,,S
|
||||
650,1,3,"Stanley, Miss. Amy Zillah Elsie",female,23,0,0,CA. 2314,7.55,,S
|
||||
651,0,3,"Mitkoff, Mr. Mito",male,,0,0,349221,7.8958,,S
|
||||
652,1,2,"Doling, Miss. Elsie",female,18,0,1,231919,23,,S
|
||||
653,0,3,"Kalvik, Mr. Johannes Halvorsen",male,21,0,0,8475,8.4333,,S
|
||||
654,1,3,"O'Leary, Miss. Hanora ""Norah""",female,,0,0,330919,7.8292,,Q
|
||||
655,0,3,"Hegarty, Miss. Hanora ""Nora""",female,18,0,0,365226,6.75,,Q
|
||||
656,0,2,"Hickman, Mr. Leonard Mark",male,24,2,0,S.O.C. 14879,73.5,,S
|
||||
657,0,3,"Radeff, Mr. Alexander",male,,0,0,349223,7.8958,,S
|
||||
658,0,3,"Bourke, Mrs. John (Catherine)",female,32,1,1,364849,15.5,,Q
|
||||
659,0,2,"Eitemiller, Mr. George Floyd",male,23,0,0,29751,13,,S
|
||||
660,0,1,"Newell, Mr. Arthur Webster",male,58,0,2,35273,113.275,D48,C
|
||||
661,1,1,"Frauenthal, Dr. Henry William",male,50,2,0,PC 17611,133.65,,S
|
||||
662,0,3,"Badt, Mr. Mohamed",male,40,0,0,2623,7.225,,C
|
||||
663,0,1,"Colley, Mr. Edward Pomeroy",male,47,0,0,5727,25.5875,E58,S
|
||||
664,0,3,"Coleff, Mr. Peju",male,36,0,0,349210,7.4958,,S
|
||||
665,1,3,"Lindqvist, Mr. Eino William",male,20,1,0,STON/O 2. 3101285,7.925,,S
|
||||
666,0,2,"Hickman, Mr. Lewis",male,32,2,0,S.O.C. 14879,73.5,,S
|
||||
667,0,2,"Butler, Mr. Reginald Fenton",male,25,0,0,234686,13,,S
|
||||
668,0,3,"Rommetvedt, Mr. Knud Paust",male,,0,0,312993,7.775,,S
|
||||
669,0,3,"Cook, Mr. Jacob",male,43,0,0,A/5 3536,8.05,,S
|
||||
670,1,1,"Taylor, Mrs. Elmer Zebley (Juliet Cummins Wright)",female,,1,0,19996,52,C126,S
|
||||
671,1,2,"Brown, Mrs. Thomas William Solomon (Elizabeth Catherine Ford)",female,40,1,1,29750,39,,S
|
||||
672,0,1,"Davidson, Mr. Thornton",male,31,1,0,F.C. 12750,52,B71,S
|
||||
673,0,2,"Mitchell, Mr. Henry Michael",male,70,0,0,C.A. 24580,10.5,,S
|
||||
674,1,2,"Wilhelms, Mr. Charles",male,31,0,0,244270,13,,S
|
||||
675,0,2,"Watson, Mr. Ennis Hastings",male,,0,0,239856,0,,S
|
||||
676,0,3,"Edvardsson, Mr. Gustaf Hjalmar",male,18,0,0,349912,7.775,,S
|
||||
677,0,3,"Sawyer, Mr. Frederick Charles",male,24.5,0,0,342826,8.05,,S
|
||||
678,1,3,"Turja, Miss. Anna Sofia",female,18,0,0,4138,9.8417,,S
|
||||
679,0,3,"Goodwin, Mrs. Frederick (Augusta Tyler)",female,43,1,6,CA 2144,46.9,,S
|
||||
680,1,1,"Cardeza, Mr. Thomas Drake Martinez",male,36,0,1,PC 17755,512.3292,B51 B53 B55,C
|
||||
681,0,3,"Peters, Miss. Katie",female,,0,0,330935,8.1375,,Q
|
||||
682,1,1,"Hassab, Mr. Hammad",male,27,0,0,PC 17572,76.7292,D49,C
|
||||
683,0,3,"Olsvigen, Mr. Thor Anderson",male,20,0,0,6563,9.225,,S
|
||||
684,0,3,"Goodwin, Mr. Charles Edward",male,14,5,2,CA 2144,46.9,,S
|
||||
685,0,2,"Brown, Mr. Thomas William Solomon",male,60,1,1,29750,39,,S
|
||||
686,0,2,"Laroche, Mr. Joseph Philippe Lemercier",male,25,1,2,SC/Paris 2123,41.5792,,C
|
||||
687,0,3,"Panula, Mr. Jaako Arnold",male,14,4,1,3101295,39.6875,,S
|
||||
688,0,3,"Dakic, Mr. Branko",male,19,0,0,349228,10.1708,,S
|
||||
689,0,3,"Fischer, Mr. Eberhard Thelander",male,18,0,0,350036,7.7958,,S
|
||||
690,1,1,"Madill, Miss. Georgette Alexandra",female,15,0,1,24160,211.3375,B5,S
|
||||
691,1,1,"Dick, Mr. Albert Adrian",male,31,1,0,17474,57,B20,S
|
||||
692,1,3,"Karun, Miss. Manca",female,4,0,1,349256,13.4167,,C
|
||||
693,1,3,"Lam, Mr. Ali",male,,0,0,1601,56.4958,,S
|
||||
694,0,3,"Saad, Mr. Khalil",male,25,0,0,2672,7.225,,C
|
||||
695,0,1,"Weir, Col. John",male,60,0,0,113800,26.55,,S
|
||||
696,0,2,"Chapman, Mr. Charles Henry",male,52,0,0,248731,13.5,,S
|
||||
697,0,3,"Kelly, Mr. James",male,44,0,0,363592,8.05,,S
|
||||
698,1,3,"Mullens, Miss. Katherine ""Katie""",female,,0,0,35852,7.7333,,Q
|
||||
699,0,1,"Thayer, Mr. John Borland",male,49,1,1,17421,110.8833,C68,C
|
||||
700,0,3,"Humblen, Mr. Adolf Mathias Nicolai Olsen",male,42,0,0,348121,7.65,F G63,S
|
||||
701,1,1,"Astor, Mrs. John Jacob (Madeleine Talmadge Force)",female,18,1,0,PC 17757,227.525,C62 C64,C
|
||||
702,1,1,"Silverthorne, Mr. Spencer Victor",male,35,0,0,PC 17475,26.2875,E24,S
|
||||
703,0,3,"Barbara, Miss. Saiide",female,18,0,1,2691,14.4542,,C
|
||||
704,0,3,"Gallagher, Mr. Martin",male,25,0,0,36864,7.7417,,Q
|
||||
705,0,3,"Hansen, Mr. Henrik Juul",male,26,1,0,350025,7.8542,,S
|
||||
706,0,2,"Morley, Mr. Henry Samuel (""Mr Henry Marshall"")",male,39,0,0,250655,26,,S
|
||||
707,1,2,"Kelly, Mrs. Florence ""Fannie""",female,45,0,0,223596,13.5,,S
|
||||
708,1,1,"Calderhead, Mr. Edward Pennington",male,42,0,0,PC 17476,26.2875,E24,S
|
||||
709,1,1,"Cleaver, Miss. Alice",female,22,0,0,113781,151.55,,S
|
||||
710,1,3,"Moubarek, Master. Halim Gonios (""William George"")",male,,1,1,2661,15.2458,,C
|
||||
711,1,1,"Mayne, Mlle. Berthe Antonine (""Mrs de Villiers"")",female,24,0,0,PC 17482,49.5042,C90,C
|
||||
712,0,1,"Klaber, Mr. Herman",male,,0,0,113028,26.55,C124,S
|
||||
713,1,1,"Taylor, Mr. Elmer Zebley",male,48,1,0,19996,52,C126,S
|
||||
714,0,3,"Larsson, Mr. August Viktor",male,29,0,0,7545,9.4833,,S
|
||||
715,0,2,"Greenberg, Mr. Samuel",male,52,0,0,250647,13,,S
|
||||
716,0,3,"Soholt, Mr. Peter Andreas Lauritz Andersen",male,19,0,0,348124,7.65,F G73,S
|
||||
717,1,1,"Endres, Miss. Caroline Louise",female,38,0,0,PC 17757,227.525,C45,C
|
||||
718,1,2,"Troutt, Miss. Edwina Celia ""Winnie""",female,27,0,0,34218,10.5,E101,S
|
||||
719,0,3,"McEvoy, Mr. Michael",male,,0,0,36568,15.5,,Q
|
||||
720,0,3,"Johnson, Mr. Malkolm Joackim",male,33,0,0,347062,7.775,,S
|
||||
721,1,2,"Harper, Miss. Annie Jessie ""Nina""",female,6,0,1,248727,33,,S
|
||||
722,0,3,"Jensen, Mr. Svend Lauritz",male,17,1,0,350048,7.0542,,S
|
||||
723,0,2,"Gillespie, Mr. William Henry",male,34,0,0,12233,13,,S
|
||||
724,0,2,"Hodges, Mr. Henry Price",male,50,0,0,250643,13,,S
|
||||
725,1,1,"Chambers, Mr. Norman Campbell",male,27,1,0,113806,53.1,E8,S
|
||||
726,0,3,"Oreskovic, Mr. Luka",male,20,0,0,315094,8.6625,,S
|
||||
727,1,2,"Renouf, Mrs. Peter Henry (Lillian Jefferys)",female,30,3,0,31027,21,,S
|
||||
728,1,3,"Mannion, Miss. Margareth",female,,0,0,36866,7.7375,,Q
|
||||
729,0,2,"Bryhl, Mr. Kurt Arnold Gottfrid",male,25,1,0,236853,26,,S
|
||||
730,0,3,"Ilmakangas, Miss. Pieta Sofia",female,25,1,0,STON/O2. 3101271,7.925,,S
|
||||
731,1,1,"Allen, Miss. Elisabeth Walton",female,29,0,0,24160,211.3375,B5,S
|
||||
732,0,3,"Hassan, Mr. Houssein G N",male,11,0,0,2699,18.7875,,C
|
||||
733,0,2,"Knight, Mr. Robert J",male,,0,0,239855,0,,S
|
||||
734,0,2,"Berriman, Mr. William John",male,23,0,0,28425,13,,S
|
||||
735,0,2,"Troupiansky, Mr. Moses Aaron",male,23,0,0,233639,13,,S
|
||||
736,0,3,"Williams, Mr. Leslie",male,28.5,0,0,54636,16.1,,S
|
||||
737,0,3,"Ford, Mrs. Edward (Margaret Ann Watson)",female,48,1,3,W./C. 6608,34.375,,S
|
||||
738,1,1,"Lesurer, Mr. Gustave J",male,35,0,0,PC 17755,512.3292,B101,C
|
||||
739,0,3,"Ivanoff, Mr. Kanio",male,,0,0,349201,7.8958,,S
|
||||
740,0,3,"Nankoff, Mr. Minko",male,,0,0,349218,7.8958,,S
|
||||
741,1,1,"Hawksford, Mr. Walter James",male,,0,0,16988,30,D45,S
|
||||
742,0,1,"Cavendish, Mr. Tyrell William",male,36,1,0,19877,78.85,C46,S
|
||||
743,1,1,"Ryerson, Miss. Susan Parker ""Suzette""",female,21,2,2,PC 17608,262.375,B57 B59 B63 B66,C
|
||||
744,0,3,"McNamee, Mr. Neal",male,24,1,0,376566,16.1,,S
|
||||
745,1,3,"Stranden, Mr. Juho",male,31,0,0,STON/O 2. 3101288,7.925,,S
|
||||
746,0,1,"Crosby, Capt. Edward Gifford",male,70,1,1,WE/P 5735,71,B22,S
|
||||
747,0,3,"Abbott, Mr. Rossmore Edward",male,16,1,1,C.A. 2673,20.25,,S
|
||||
748,1,2,"Sinkkonen, Miss. Anna",female,30,0,0,250648,13,,S
|
||||
749,0,1,"Marvin, Mr. Daniel Warner",male,19,1,0,113773,53.1,D30,S
|
||||
750,0,3,"Connaghton, Mr. Michael",male,31,0,0,335097,7.75,,Q
|
||||
751,1,2,"Wells, Miss. Joan",female,4,1,1,29103,23,,S
|
||||
752,1,3,"Moor, Master. Meier",male,6,0,1,392096,12.475,E121,S
|
||||
753,0,3,"Vande Velde, Mr. Johannes Joseph",male,33,0,0,345780,9.5,,S
|
||||
754,0,3,"Jonkoff, Mr. Lalio",male,23,0,0,349204,7.8958,,S
|
||||
755,1,2,"Herman, Mrs. Samuel (Jane Laver)",female,48,1,2,220845,65,,S
|
||||
756,1,2,"Hamalainen, Master. Viljo",male,0.67,1,1,250649,14.5,,S
|
||||
757,0,3,"Carlsson, Mr. August Sigfrid",male,28,0,0,350042,7.7958,,S
|
||||
758,0,2,"Bailey, Mr. Percy Andrew",male,18,0,0,29108,11.5,,S
|
||||
759,0,3,"Theobald, Mr. Thomas Leonard",male,34,0,0,363294,8.05,,S
|
||||
760,1,1,"Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards)",female,33,0,0,110152,86.5,B77,S
|
||||
761,0,3,"Garfirth, Mr. John",male,,0,0,358585,14.5,,S
|
||||
762,0,3,"Nirva, Mr. Iisakki Antino Aijo",male,41,0,0,SOTON/O2 3101272,7.125,,S
|
||||
763,1,3,"Barah, Mr. Hanna Assi",male,20,0,0,2663,7.2292,,C
|
||||
764,1,1,"Carter, Mrs. William Ernest (Lucile Polk)",female,36,1,2,113760,120,B96 B98,S
|
||||
765,0,3,"Eklund, Mr. Hans Linus",male,16,0,0,347074,7.775,,S
|
||||
766,1,1,"Hogeboom, Mrs. John C (Anna Andrews)",female,51,1,0,13502,77.9583,D11,S
|
||||
767,0,1,"Brewe, Dr. Arthur Jackson",male,,0,0,112379,39.6,,C
|
||||
768,0,3,"Mangan, Miss. Mary",female,30.5,0,0,364850,7.75,,Q
|
||||
769,0,3,"Moran, Mr. Daniel J",male,,1,0,371110,24.15,,Q
|
||||
770,0,3,"Gronnestad, Mr. Daniel Danielsen",male,32,0,0,8471,8.3625,,S
|
||||
771,0,3,"Lievens, Mr. Rene Aime",male,24,0,0,345781,9.5,,S
|
||||
772,0,3,"Jensen, Mr. Niels Peder",male,48,0,0,350047,7.8542,,S
|
||||
773,0,2,"Mack, Mrs. (Mary)",female,57,0,0,S.O./P.P. 3,10.5,E77,S
|
||||
774,0,3,"Elias, Mr. Dibo",male,,0,0,2674,7.225,,C
|
||||
775,1,2,"Hocking, Mrs. Elizabeth (Eliza Needs)",female,54,1,3,29105,23,,S
|
||||
776,0,3,"Myhrman, Mr. Pehr Fabian Oliver Malkolm",male,18,0,0,347078,7.75,,S
|
||||
777,0,3,"Tobin, Mr. Roger",male,,0,0,383121,7.75,F38,Q
|
||||
778,1,3,"Emanuel, Miss. Virginia Ethel",female,5,0,0,364516,12.475,,S
|
||||
779,0,3,"Kilgannon, Mr. Thomas J",male,,0,0,36865,7.7375,,Q
|
||||
780,1,1,"Robert, Mrs. Edward Scott (Elisabeth Walton McMillan)",female,43,0,1,24160,211.3375,B3,S
|
||||
781,1,3,"Ayoub, Miss. Banoura",female,13,0,0,2687,7.2292,,C
|
||||
782,1,1,"Dick, Mrs. Albert Adrian (Vera Gillespie)",female,17,1,0,17474,57,B20,S
|
||||
783,0,1,"Long, Mr. Milton Clyde",male,29,0,0,113501,30,D6,S
|
||||
784,0,3,"Johnston, Mr. Andrew G",male,,1,2,W./C. 6607,23.45,,S
|
||||
785,0,3,"Ali, Mr. William",male,25,0,0,SOTON/O.Q. 3101312,7.05,,S
|
||||
786,0,3,"Harmer, Mr. Abraham (David Lishin)",male,25,0,0,374887,7.25,,S
|
||||
787,1,3,"Sjoblom, Miss. Anna Sofia",female,18,0,0,3101265,7.4958,,S
|
||||
788,0,3,"Rice, Master. George Hugh",male,8,4,1,382652,29.125,,Q
|
||||
789,1,3,"Dean, Master. Bertram Vere",male,1,1,2,C.A. 2315,20.575,,S
|
||||
790,0,1,"Guggenheim, Mr. Benjamin",male,46,0,0,PC 17593,79.2,B82 B84,C
|
||||
791,0,3,"Keane, Mr. Andrew ""Andy""",male,,0,0,12460,7.75,,Q
|
||||
792,0,2,"Gaskell, Mr. Alfred",male,16,0,0,239865,26,,S
|
||||
793,0,3,"Sage, Miss. Stella Anna",female,,8,2,CA. 2343,69.55,,S
|
||||
794,0,1,"Hoyt, Mr. William Fisher",male,,0,0,PC 17600,30.6958,,C
|
||||
795,0,3,"Dantcheff, Mr. Ristiu",male,25,0,0,349203,7.8958,,S
|
||||
796,0,2,"Otter, Mr. Richard",male,39,0,0,28213,13,,S
|
||||
797,1,1,"Leader, Dr. Alice (Farnham)",female,49,0,0,17465,25.9292,D17,S
|
||||
798,1,3,"Osman, Mrs. Mara",female,31,0,0,349244,8.6833,,S
|
||||
799,0,3,"Ibrahim Shawah, Mr. Yousseff",male,30,0,0,2685,7.2292,,C
|
||||
800,0,3,"Van Impe, Mrs. Jean Baptiste (Rosalie Paula Govaert)",female,30,1,1,345773,24.15,,S
|
||||
801,0,2,"Ponesell, Mr. Martin",male,34,0,0,250647,13,,S
|
||||
802,1,2,"Collyer, Mrs. Harvey (Charlotte Annie Tate)",female,31,1,1,C.A. 31921,26.25,,S
|
||||
803,1,1,"Carter, Master. William Thornton II",male,11,1,2,113760,120,B96 B98,S
|
||||
804,1,3,"Thomas, Master. Assad Alexander",male,0.42,0,1,2625,8.5167,,C
|
||||
805,1,3,"Hedman, Mr. Oskar Arvid",male,27,0,0,347089,6.975,,S
|
||||
806,0,3,"Johansson, Mr. Karl Johan",male,31,0,0,347063,7.775,,S
|
||||
807,0,1,"Andrews, Mr. Thomas Jr",male,39,0,0,112050,0,A36,S
|
||||
808,0,3,"Pettersson, Miss. Ellen Natalia",female,18,0,0,347087,7.775,,S
|
||||
809,0,2,"Meyer, Mr. August",male,39,0,0,248723,13,,S
|
||||
810,1,1,"Chambers, Mrs. Norman Campbell (Bertha Griggs)",female,33,1,0,113806,53.1,E8,S
|
||||
811,0,3,"Alexander, Mr. William",male,26,0,0,3474,7.8875,,S
|
||||
812,0,3,"Lester, Mr. James",male,39,0,0,A/4 48871,24.15,,S
|
||||
813,0,2,"Slemen, Mr. Richard James",male,35,0,0,28206,10.5,,S
|
||||
814,0,3,"Andersson, Miss. Ebba Iris Alfrida",female,6,4,2,347082,31.275,,S
|
||||
815,0,3,"Tomlin, Mr. Ernest Portage",male,30.5,0,0,364499,8.05,,S
|
||||
816,0,1,"Fry, Mr. Richard",male,,0,0,112058,0,B102,S
|
||||
817,0,3,"Heininen, Miss. Wendla Maria",female,23,0,0,STON/O2. 3101290,7.925,,S
|
||||
818,0,2,"Mallet, Mr. Albert",male,31,1,1,S.C./PARIS 2079,37.0042,,C
|
||||
819,0,3,"Holm, Mr. John Fredrik Alexander",male,43,0,0,C 7075,6.45,,S
|
||||
820,0,3,"Skoog, Master. Karl Thorsten",male,10,3,2,347088,27.9,,S
|
||||
821,1,1,"Hays, Mrs. Charles Melville (Clara Jennings Gregg)",female,52,1,1,12749,93.5,B69,S
|
||||
822,1,3,"Lulic, Mr. Nikola",male,27,0,0,315098,8.6625,,S
|
||||
823,0,1,"Reuchlin, Jonkheer. John George",male,38,0,0,19972,0,,S
|
||||
824,1,3,"Moor, Mrs. (Beila)",female,27,0,1,392096,12.475,E121,S
|
||||
825,0,3,"Panula, Master. Urho Abraham",male,2,4,1,3101295,39.6875,,S
|
||||
826,0,3,"Flynn, Mr. John",male,,0,0,368323,6.95,,Q
|
||||
827,0,3,"Lam, Mr. Len",male,,0,0,1601,56.4958,,S
|
||||
828,1,2,"Mallet, Master. Andre",male,1,0,2,S.C./PARIS 2079,37.0042,,C
|
||||
829,1,3,"McCormack, Mr. Thomas Joseph",male,,0,0,367228,7.75,,Q
|
||||
830,1,1,"Stone, Mrs. George Nelson (Martha Evelyn)",female,62,0,0,113572,80,B28,
|
||||
831,1,3,"Yasbeck, Mrs. Antoni (Selini Alexander)",female,15,1,0,2659,14.4542,,C
|
||||
832,1,2,"Richards, Master. George Sibley",male,0.83,1,1,29106,18.75,,S
|
||||
833,0,3,"Saad, Mr. Amin",male,,0,0,2671,7.2292,,C
|
||||
834,0,3,"Augustsson, Mr. Albert",male,23,0,0,347468,7.8542,,S
|
||||
835,0,3,"Allum, Mr. Owen George",male,18,0,0,2223,8.3,,S
|
||||
836,1,1,"Compton, Miss. Sara Rebecca",female,39,1,1,PC 17756,83.1583,E49,C
|
||||
837,0,3,"Pasic, Mr. Jakob",male,21,0,0,315097,8.6625,,S
|
||||
838,0,3,"Sirota, Mr. Maurice",male,,0,0,392092,8.05,,S
|
||||
839,1,3,"Chip, Mr. Chang",male,32,0,0,1601,56.4958,,S
|
||||
840,1,1,"Marechal, Mr. Pierre",male,,0,0,11774,29.7,C47,C
|
||||
841,0,3,"Alhomaki, Mr. Ilmari Rudolf",male,20,0,0,SOTON/O2 3101287,7.925,,S
|
||||
842,0,2,"Mudd, Mr. Thomas Charles",male,16,0,0,S.O./P.P. 3,10.5,,S
|
||||
843,1,1,"Serepeca, Miss. Augusta",female,30,0,0,113798,31,,C
|
||||
844,0,3,"Lemberopolous, Mr. Peter L",male,34.5,0,0,2683,6.4375,,C
|
||||
845,0,3,"Culumovic, Mr. Jeso",male,17,0,0,315090,8.6625,,S
|
||||
846,0,3,"Abbing, Mr. Anthony",male,42,0,0,C.A. 5547,7.55,,S
|
||||
847,0,3,"Sage, Mr. Douglas Bullen",male,,8,2,CA. 2343,69.55,,S
|
||||
848,0,3,"Markoff, Mr. Marin",male,35,0,0,349213,7.8958,,C
|
||||
849,0,2,"Harper, Rev. John",male,28,0,1,248727,33,,S
|
||||
850,1,1,"Goldenberg, Mrs. Samuel L (Edwiga Grabowska)",female,,1,0,17453,89.1042,C92,C
|
||||
851,0,3,"Andersson, Master. Sigvard Harald Elias",male,4,4,2,347082,31.275,,S
|
||||
852,0,3,"Svensson, Mr. Johan",male,74,0,0,347060,7.775,,S
|
||||
853,0,3,"Boulos, Miss. Nourelain",female,9,1,1,2678,15.2458,,C
|
||||
854,1,1,"Lines, Miss. Mary Conover",female,16,0,1,PC 17592,39.4,D28,S
|
||||
855,0,2,"Carter, Mrs. Ernest Courtenay (Lilian Hughes)",female,44,1,0,244252,26,,S
|
||||
856,1,3,"Aks, Mrs. Sam (Leah Rosen)",female,18,0,1,392091,9.35,,S
|
||||
857,1,1,"Wick, Mrs. George Dennick (Mary Hitchcock)",female,45,1,1,36928,164.8667,,S
|
||||
858,1,1,"Daly, Mr. Peter Denis ",male,51,0,0,113055,26.55,E17,S
|
||||
859,1,3,"Baclini, Mrs. Solomon (Latifa Qurban)",female,24,0,3,2666,19.2583,,C
|
||||
860,0,3,"Razi, Mr. Raihed",male,,0,0,2629,7.2292,,C
|
||||
861,0,3,"Hansen, Mr. Claus Peter",male,41,2,0,350026,14.1083,,S
|
||||
862,0,2,"Giles, Mr. Frederick Edward",male,21,1,0,28134,11.5,,S
|
||||
863,1,1,"Swift, Mrs. Frederick Joel (Margaret Welles Barron)",female,48,0,0,17466,25.9292,D17,S
|
||||
864,0,3,"Sage, Miss. Dorothy Edith ""Dolly""",female,,8,2,CA. 2343,69.55,,S
|
||||
865,0,2,"Gill, Mr. John William",male,24,0,0,233866,13,,S
|
||||
866,1,2,"Bystrom, Mrs. (Karolina)",female,42,0,0,236852,13,,S
|
||||
867,1,2,"Duran y More, Miss. Asuncion",female,27,1,0,SC/PARIS 2149,13.8583,,C
|
||||
868,0,1,"Roebling, Mr. Washington Augustus II",male,31,0,0,PC 17590,50.4958,A24,S
|
||||
869,0,3,"van Melkebeke, Mr. Philemon",male,,0,0,345777,9.5,,S
|
||||
870,1,3,"Johnson, Master. Harold Theodor",male,4,1,1,347742,11.1333,,S
|
||||
871,0,3,"Balkic, Mr. Cerin",male,26,0,0,349248,7.8958,,S
|
||||
872,1,1,"Beckwith, Mrs. Richard Leonard (Sallie Monypeny)",female,47,1,1,11751,52.5542,D35,S
|
||||
873,0,1,"Carlsson, Mr. Frans Olof",male,33,0,0,695,5,B51 B53 B55,S
|
||||
874,0,3,"Vander Cruyssen, Mr. Victor",male,47,0,0,345765,9,,S
|
||||
875,1,2,"Abelson, Mrs. Samuel (Hannah Wizosky)",female,28,1,0,P/PP 3381,24,,C
|
||||
876,1,3,"Najib, Miss. Adele Kiamie ""Jane""",female,15,0,0,2667,7.225,,C
|
||||
877,0,3,"Gustafsson, Mr. Alfred Ossian",male,20,0,0,7534,9.8458,,S
|
||||
878,0,3,"Petroff, Mr. Nedelio",male,19,0,0,349212,7.8958,,S
|
||||
879,0,3,"Laleff, Mr. Kristo",male,,0,0,349217,7.8958,,S
|
||||
880,1,1,"Potter, Mrs. Thomas Jr (Lily Alexenia Wilson)",female,56,0,1,11767,83.1583,C50,C
|
||||
881,1,2,"Shelley, Mrs. William (Imanita Parrish Hall)",female,25,0,1,230433,26,,S
|
||||
882,0,3,"Markun, Mr. Johann",male,33,0,0,349257,7.8958,,S
|
||||
883,0,3,"Dahlberg, Miss. Gerda Ulrika",female,22,0,0,7552,10.5167,,S
|
||||
884,0,2,"Banfield, Mr. Frederick James",male,28,0,0,C.A./SOTON 34068,10.5,,S
|
||||
885,0,3,"Sutehall, Mr. Henry Jr",male,25,0,0,SOTON/OQ 392076,7.05,,S
|
||||
886,0,3,"Rice, Mrs. William (Margaret Norton)",female,39,0,5,382652,29.125,,Q
|
||||
887,0,2,"Montvila, Rev. Juozas",male,27,0,0,211536,13,,S
|
||||
888,1,1,"Graham, Miss. Margaret Edith",female,19,0,0,112053,30,B42,S
|
||||
889,0,3,"Johnston, Miss. Catherine Helen ""Carrie""",female,,1,2,W./C. 6607,23.45,,S
|
||||
890,1,1,"Behr, Mr. Karl Howell",male,26,0,0,111369,30,C148,C
|
||||
891,0,3,"Dooley, Mr. Patrick",male,32,0,0,370376,7.75,,Q
|
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
After Width: | Height: | Size: 56 KiB |
Binary file not shown.
After Width: | Height: | Size: 31 KiB |
|
@ -0,0 +1,388 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="373.29242mm"
|
||||
height="183.85847mm"
|
||||
viewBox="0 0 1322.6897 651.46702"
|
||||
id="svg2"
|
||||
version="1.1"
|
||||
inkscape:version="0.91 r13725"
|
||||
sodipodi:docname="schema-stack.svg">
|
||||
<defs
|
||||
id="defs4">
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
orient="auto"
|
||||
refY="0.0"
|
||||
refX="0.0"
|
||||
id="Arrow1Lend"
|
||||
style="overflow:visible;"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
id="path4828"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
transform="scale(0.8) rotate(180) translate(12.5,0)" />
|
||||
</marker>
|
||||
<marker
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
orient="auto"
|
||||
refY="0"
|
||||
refX="0"
|
||||
id="Arrow1Lend-4"
|
||||
style="overflow:visible"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
inkscape:connector-curvature="0"
|
||||
id="path4828-3"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 0,0 Z"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)" />
|
||||
</marker>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="0.73827934"
|
||||
inkscape:cx="637.12067"
|
||||
inkscape:cy="88.934279"
|
||||
inkscape:document-units="px"
|
||||
inkscape:current-layer="layer1"
|
||||
showgrid="false"
|
||||
inkscape:snap-bbox="true"
|
||||
inkscape:snap-others="false"
|
||||
inkscape:bbox-nodes="true"
|
||||
inkscape:window-width="1869"
|
||||
inkscape:window-height="1056"
|
||||
inkscape:window-x="51"
|
||||
inkscape:window-y="24"
|
||||
inkscape:window-maximized="1"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0"
|
||||
inkscape:snap-global="false">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid4309"
|
||||
originx="750.55045"
|
||||
originy="-1020.5968" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata7">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(750.55045,619.70162)">
|
||||
<g
|
||||
id="g4804"
|
||||
transform="translate(201.82063,-660.99642)">
|
||||
<g
|
||||
transform="translate(-78.561049,14.899509)"
|
||||
id="g4716">
|
||||
<rect
|
||||
style="opacity:1;fill:#b3b3b3;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-6"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-850.25696"
|
||||
y="222.6523" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ff6600;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-2"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-710.1665"
|
||||
y="222.6523" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-3-9-9"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-850.25696"
|
||||
y="296.03296" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ffccaa;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-5-6-1"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-710.1665"
|
||||
y="296.03296" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-6"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-850.25696"
|
||||
y="369.41382" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ffccaa;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-9-06"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-710.1665"
|
||||
y="369.41382" />
|
||||
<rect
|
||||
style="opacity:1;fill:#37c871;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-4-1"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-570.07593"
|
||||
y="222.6523" />
|
||||
<rect
|
||||
style="opacity:1;fill:#0066ff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-92-8"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-429.98541"
|
||||
y="222.6523" />
|
||||
<rect
|
||||
style="opacity:1;fill:#afe9c6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-3-9-5-7"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-570.07593"
|
||||
y="296.03296" />
|
||||
<rect
|
||||
style="opacity:1;fill:#aaccff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-5-6-7-9"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-429.98541"
|
||||
y="296.03296" />
|
||||
<rect
|
||||
style="opacity:1;fill:#afe9c6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-4-2"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-570.07593"
|
||||
y="369.41382" />
|
||||
<rect
|
||||
style="opacity:1;fill:#aaccff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-9-9-0"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="-429.98541"
|
||||
y="369.41382" />
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g4778"
|
||||
transform="translate(79.91555,-623.07039)">
|
||||
<g
|
||||
transform="translate(-2.5424194e-5,0)"
|
||||
id="g4751">
|
||||
<rect
|
||||
style="opacity:1;fill:#b3b3b3;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-6-1"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="73.029778" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-3-9-9-6"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="145.70103" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-6-2"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="218.3723" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-6-1-7"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="291.04355" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-3-9-9-6-5"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="363.71481" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-6-2-5"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="436.38608" />
|
||||
<rect
|
||||
style="opacity:1;fill:#e6e6e6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-6-2-5-8"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="31.82366"
|
||||
y="509.05734" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(-3.4960937e-5,0)"
|
||||
id="g4760">
|
||||
<rect
|
||||
style="opacity:1;fill:#666666;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-2-0"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="73.029778" />
|
||||
<rect
|
||||
style="opacity:1;fill:#37c871;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-5-6-1-3"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="145.70103" />
|
||||
<rect
|
||||
style="opacity:1;fill:#37c871;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-9-06-0"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="218.3723" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ff6600;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-2-0-4"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="291.04355" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ff6600;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-5-6-1-3-2"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="363.71481" />
|
||||
<rect
|
||||
style="opacity:1;fill:#0066ff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-9-06-0-4"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="436.38608" />
|
||||
<rect
|
||||
style="opacity:1;fill:#0066ff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-64-7-9-06-0-4-6"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="171.91418"
|
||||
y="509.05734" />
|
||||
</g>
|
||||
<g
|
||||
transform="translate(2.6074219e-5,0)"
|
||||
id="g4769">
|
||||
<rect
|
||||
style="opacity:1;fill:#666666;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-4-1-6"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="73.029778" />
|
||||
<rect
|
||||
style="opacity:1;fill:#afe9c6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-3-9-5-7-5"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="145.70103" />
|
||||
<rect
|
||||
style="opacity:1;fill:#afe9c6;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-4-2-4"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="218.3723" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ffccaa;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-4-1-6-7"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="291.04355" />
|
||||
<rect
|
||||
style="opacity:1;fill:#ffccaa;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-3-9-5-7-5-4"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="363.71481" />
|
||||
<rect
|
||||
style="opacity:1;fill:#aaccff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-4-2-4-0"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="436.38608" />
|
||||
<rect
|
||||
style="opacity:1;fill:#aaccff;fill-opacity:1;stroke:none;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
id="rect4136-8-8-4-2-4-0-8"
|
||||
width="133.41954"
|
||||
height="66.70977"
|
||||
x="312.0047"
|
||||
y="509.05734" />
|
||||
</g>
|
||||
</g>
|
||||
<path
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend);stroke-miterlimit:4;stroke-dasharray:none"
|
||||
d="m -141.02507,-378.64834 219.42914,0"
|
||||
id="path4819"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
style="fill:none;fill-rule:evenodd;stroke:#000000;stroke-width:2;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Lend-4);stroke-miterlimit:4;stroke-dasharray:none"
|
||||
d="m 80.224095,-271.64277 -219.429135,0"
|
||||
id="path4819-1"
|
||||
inkscape:connector-curvature="0" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:300;font-stretch:normal;font-size:60.94342422px;line-height:125%;font-family:Roboto;-inkscape-font-specification:'Roboto Light';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="-114.53722"
|
||||
y="-403.33038"
|
||||
id="text5104"
|
||||
sodipodi:linespacing="125%"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5106"
|
||||
x="-114.53722"
|
||||
y="-403.33038">stack</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:300;font-stretch:normal;font-size:60.94342422px;line-height:125%;font-family:Roboto;-inkscape-font-specification:'Roboto Light';text-align:start;letter-spacing:0px;word-spacing:0px;writing-mode:lr-tb;text-anchor:start;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
x="-149.12001"
|
||||
y="-198.62782"
|
||||
id="text5104-4"
|
||||
sodipodi:linespacing="125%"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan5106-9"
|
||||
x="-149.12001"
|
||||
y="-198.62782">unstack</tspan></text>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 15 KiB |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue