The app shows weather, basic information about population and GDP per capita for 10 cities: 1. New York City, United States (North America) 2. Tokyo, Japan (East Asia) 3. São Paulo, Brazil (South America) 4. Lagos, Nigeria (Africa) 5. Paris, France (Western Europe) 6. Delhi, India (South Asia) 7. Sydney, Australia (Oceania) 8. Dubai, United Arab Emirates (Middle East) 9. London, United Kingdom (Western Europe) 10. Mexico City, Mexico (Central America) the application pulls data from a weather api and wikipedia
To upload files, please first save the app
import streamlit as st
import requests
from bs4 import BeautifulSoup
import pandas as pd
import time
from datetime import datetime
# Initialize cities data
CITIES = {
'New York City': {'country': 'United States', 'region': 'North America', 'coords': '40.7128,-74.0060'},
'Tokyo': {'country': 'Japan', 'region': 'East Asia', 'coords': '35.6762,139.6503'},
'São Paulo': {'country': 'Brazil', 'region': 'South America', 'coords': '-23.5505,-46.6333'},
'Lagos': {'country': 'Nigeria', 'region': 'Africa', 'coords': '6.5244,3.3792'},
'Paris': {'country': 'France', 'region': 'Western Europe', 'coords': '48.8566,2.3522'},
'Delhi': {'country': 'India', 'region': 'South Asia', 'coords': '28.6139,77.2090'},
'Sydney': {'country': 'Australia', 'region': 'Oceania', 'coords': '-33.8688,151.2093'},
'Dubai': {'country': 'United Arab Emirates', 'region': 'Middle East', 'coords': '25.2048,55.2708'},
'London': {'country': 'United Kingdom', 'region': 'Western Europe', 'coords': '51.5074,-0.1278'},
'Mexico City': {'country': 'Mexico', 'region': 'Central America', 'coords': '19.4326,-99.1332'}
}
st.title('Global Cities Dashboard')
# Function to get weather data
@st.cache_data(ttl=1800) # Cache for 30 minutes
def get_weather(coords):
# NOTE: Using corsproxy.io because we're in a WASM environment. If running locally,
# you can remove the corsproxy.io prefix.
base_url = "https://corsproxy.io/?https://api.open-meteo.com/v1/forecast"
params = {
"latitude": coords.split(',')[0],
"longitude": coords.split(',')[1],
"current_weather": True,
}
try:
response = requests.get(base_url, params=params)
response.raise_for_status()
return response.json()['current_weather']
except Exception as e:
st.error(f"Error fetching weather data: {str(e)}")
return None
# Function to get city info from Wikipedia
@st.cache_data(ttl=86400) # Cache for 24 hours
def get_city_info(city):
# NOTE: Using corsproxy.io because we're in a WASM environment.
base_url = f"https://corsproxy.io/?https://en.wikipedia.org/wiki/{city.replace(' ', '_')}"
try:
response = requests.get(base_url)
response.raise_for_status()
soup = BeautifulSoup(response.text, 'html.parser')
# Extract population and GDP info from infobox
infobox = soup.find('table', {'class': 'infobox'})
if infobox:
text = infobox.get_text()
# Simple extraction - in real application would need more robust parsing
population = "Population data unavailable"
gdp = "GDP data unavailable"
for row in infobox.find_all('tr'):
if 'Population' in str(row):
population = row.find_all('td')[-1].get_text().strip()
if 'GDP' in str(row):
gdp = row.find_all('td')[-1].get_text().strip()
return {'population': population, 'gdp': gdp}
except Exception as e:
st.error(f"Error fetching city info: {str(e)}")
return {'population': 'Data unavailable', 'gdp': 'Data unavailable'}
# Create tabs for each region
regions = sorted(list(set(city['region'] for city in CITIES.values())))
tabs = st.tabs(regions)
# Organize cities by region
cities_by_region = {region: [] for region in regions}
for city, info in CITIES.items():
cities_by_region[info['region']].append(city)
# Display data for each region
for region, tab in zip(regions, tabs):
with tab:
st.header(region)
for city in cities_by_region[region]:
st.subheader(city)
col1, col2 = st.columns(2)
with col1:
# Weather information
weather = get_weather(CITIES[city]['coords'])
if weather:
st.metric("Temperature", f"{weather['temperature']}°C")
st.metric("Wind Speed", f"{weather['windspeed']} km/h")
with col2:
# City information
info = get_city_info(city)
st.write("**Population:**", info['population'])
st.write("**GDP Info:**", info['gdp'])
st.divider()
# Add timestamp
st.sidebar.write("Last updated:", datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
Hi! I can help you with any questions about Streamlit and Python. What would you like to know?