Skip to content

Commit

Permalink
Binder setup (#59)
Browse files Browse the repository at this point in the history
* Adding default-jre to apt.txt

* changed data_dir location

* Trying to fix paths

* bugfix

* Ubuntu font handling for graph labels

* South sudan data exported to vision

* Added weather.dat URL

* Fixed adjective data loading behavior

* added adjective data downoading

* Adding postBuild to enable table of contents

* Fixed url issue with unfiltered pi_mtg_demo statements

* bugfix, removed hairball in exploring unknown unknowns directory

* Added missing import
  • Loading branch information
adarshp authored Aug 4, 2018
1 parent 9e7df5a commit fbb7fc3
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 68 deletions.
1 change: 1 addition & 0 deletions apt.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
default-jre
python3-dev
graphviz
libgraphviz-dev
Expand Down
7 changes: 7 additions & 0 deletions data/postBuild
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
jupyter contrib nbextension install --user
jupyter nbextension enable --py widgetsnbextension
jupyter nbextension enable toc2/main

# Notebooks w/ extensions that auto-run code must be "trusted" to work the first
# time
jupyter trust notebooks/*.ipynb
2 changes: 1 addition & 1 deletion delphi/AnalysisGraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def infer_transition_model(
if adjective_data is None:
adjective_data = adjectiveData

gb = pd.read_csv(adjectiveData, delim_whitespace=True).groupby(
gb = pd.read_csv(adjective_data, delim_whitespace=True).groupby(
"adjective"
)

Expand Down
12 changes: 7 additions & 5 deletions delphi/assembly.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from datetime import datetime
from delphi.paths import concept_to_indicator_mapping, data_dir
from .utils import exists, flatMap, flatten
from .utils import exists, flatMap, flatten, get_data_from_url
from .random_variables import Delta, Indicator
from typing import *
from indra.statements import Influence, Concept
Expand Down Expand Up @@ -229,8 +229,9 @@ def get_data(filename: str) -> pd.DataFrame:
return df


def get_mean_precipitation(year: int, cycles_output=data_dir + "/weather.dat"):
df = pd.read_table(cycles_output)
def get_mean_precipitation(year: int):
url="http://vision.cs.arizona.edu/adarsh/export/demos/data/weather.dat"
df = pd.read_table(get_data_from_url(url))
df.columns = df.columns.str.strip()
df.columns = [c + f" ({df.iloc[0][c].strip()})" for c in df.columns]
df.drop([0], axis=0, inplace=True)
Expand Down Expand Up @@ -278,11 +279,12 @@ def process_variable_name(x: str):
return " ".join(xs[0:2])


def construct_concept_to_indicator_mapping(n: int = 2) -> Dict[str, List[str]]:
def construct_concept_to_indicator_mapping(n: int = 2,
mapping=concept_to_indicator_mapping) -> Dict[str, List[str]]:
""" Create a dictionary mapping high-level concepts to low-level indicators """

df = pd.read_table(
concept_to_indicator_mapping,
mapping,
usecols=[1, 3, 4],
names=["Concept Grounding", "Indicator Grounding", "Score"],
)
Expand Down
14 changes: 12 additions & 2 deletions delphi/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,16 @@
import pickle
from .execution import construct_default_initial_state
from datetime import datetime
import platform

operating_system = platform.system()

if operating_system == "Darwin":
font = "Gill Sans"
elif operating_system == "Windows":
font = "Candara"
else:
font = "Ubuntu"

# ==========================================================================
# Export
Expand All @@ -33,7 +43,7 @@ def to_agraph(G, *args, **kwargs) -> AGraph:
"dpi": 227,
"fontsize": 20,
"rankdir": kwargs.get("rankdir", "TB"),
"fontname": "Gill Sans",
"fontname": font,
}
)

Expand All @@ -42,7 +52,7 @@ def to_agraph(G, *args, **kwargs) -> AGraph:
"shape": "rectangle",
"color": "#650021",
"style": "rounded",
"fontname": "Gill Sans",
"fontname": font,
}
)

Expand Down
12 changes: 3 additions & 9 deletions delphi/parameterization.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,17 @@


def parameterize(
G: AnalysisGraph, time: datetime, data: Optional[pd.DataFrame] = None
G: AnalysisGraph, time: datetime, data = south_sudan_data
) -> AnalysisGraph:
""" Parameterize the analysis graph.
Args:
G
time
data
datafile
"""

if data is not None:
G.data = data
else:
if G.data is None:
G.data = get_data(south_sudan_data)
else:
pass
G.data = get_data(data)

nodes_with_indicators = [
n for n in G.nodes(data=True) if n[1]["indicators"] is not None
Expand Down
2 changes: 1 addition & 1 deletion delphi/program_analysis/scopes.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def to_agraph(self):
elif operating_system == "Windows":
font = "Consolas"
else:
font = "Courier"
font = "Ubuntu Mono"

A.node_attr["fontname"] = font
A.graph_attr["fontname"] = font
Expand Down
7 changes: 2 additions & 5 deletions delphi/quantification.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


def map_concepts_to_indicators(
G: AnalysisGraph, n: int = 1, manual_mapping: Optional[dict] = None
G: AnalysisGraph, n: int = 1, mapping_file: Optional[str] = None
) -> AnalysisGraph:
""" Add indicators to the analysis graph.
Expand All @@ -17,14 +17,11 @@ def map_concepts_to_indicators(
n
manual_mapping
"""
mapping = construct_concept_to_indicator_mapping(n=n)
mapping = construct_concept_to_indicator_mapping(n, mapping_file)

for n in G.nodes(data=True):
n[1]["indicators"] = get_indicators(
n[0].lower().replace(" ", "_"), mapping
)
if manual_mapping is not None:
if n[0] in manual_mapping:
n[1]["indicators"] = manual_mapping[n[0]]

return G
4 changes: 4 additions & 0 deletions delphi/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,10 @@ def download_file(url: str, filename: str):
request.urlretrieve(url, filename, reporthook)


def get_data_from_url(url: str):
return request.urlopen(url)


def _change_directory(destination_directory):
cwd = os.getcwd()
os.chdir(destination_directory)
Expand Down
73 changes: 28 additions & 45 deletions notebooks/PI Meeting 2018 Demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,10 @@
"metadata": {},
"outputs": [],
"source": [
"import urllib.request\n",
"from delphi.utils import get_data_from_url\n",
"import pickle\n",
"url = 'http://vision.cs.arizona.edu/adarsh/export/demos/data/pi_mtg_demo.pkl'\n",
"with urllib.request.urlopen(url) as f:\n",
" sts = pickle.load(f) "
"sts = pickle.load(get_data_from_url(url)) "
]
},
{
Expand Down Expand Up @@ -195,6 +194,16 @@
"## Mapping concepts to indicators"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -206,7 +215,9 @@
"outputs": [],
"source": [
"from delphi.quantification import map_concepts_to_indicators\n",
"map_concepts_to_indicators(G, 2)\n",
"url = 'http://vision.cs.arizona.edu/adarsh/export/demos/data/concept_to_indicator_mapping.txt'\n",
"mapping = get_data_from_url(url)\n",
"map_concepts_to_indicators(G, 2, mapping)\n",
"visualize(G, indicators=True)"
]
},
Expand All @@ -233,6 +244,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": false,
"slideshow": {
"slide_type": "subslide"
}
Expand All @@ -242,7 +254,8 @@
"from datetime import datetime\n",
"from delphi.parameterization import parameterize\n",
"date = datetime(2014, 1, 1)\n",
"parameterize(G, datetime(2014, 1,1))\n",
"url = 'http://vision.cs.arizona.edu/adarsh/export/demos/data/south_sudan_data.csv'\n",
"parameterize(G, datetime(2014, 1,1), get_data_from_url(url))\n",
"visualize(G, indicators=True, indicator_values = True,\n",
" graph_label=f'Causal Analysis Graph for South Sudan, {date.year}')"
]
Expand All @@ -258,15 +271,6 @@
"## Infer transition model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"jt.display_image('images/delphi_model.png')"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -277,7 +281,8 @@
},
"outputs": [],
"source": [
"%time G.infer_transition_model(res=10000)"
"url = 'http://vision.cs.arizona.edu/adarsh/export/demos/data/adjectiveData.tsv'\n",
"G.infer_transition_model(get_data_from_url(url), 10000)"
]
},
{
Expand Down Expand Up @@ -317,16 +322,6 @@
"## Execute model"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand Down Expand Up @@ -422,25 +417,7 @@
"outputs": [],
"source": [
"url = 'http://vision.cs.arizona.edu/adarsh/export/demos/data/pi_mtg_demo_unfiltered.pkl'\n",
"with urllib.request.urlopen(url) as f:\n",
" sts = pickle.load(f) "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"slideshow": {
"slide_type": "subslide"
}
},
"outputs": [],
"source": [
"from delphi.subgraphs import *\n",
"G = AnalysisGraph.from_statements(sts)\n",
"merge_nodes(G, 'food_security', 'food_insecurity', same_polarity=False)\n",
"G = get_subgraph_for_concept(G, 'food_insecurity', depth_limit=1)\n",
"visualize(G, nodes_to_highlight='food_insecurity')"
"sts=pickle.load(get_data_from_url(url))"
]
},
{
Expand All @@ -453,6 +430,7 @@
},
"outputs": [],
"source": [
"from delphi.subgraphs import get_subgraph_for_concept_pair\n",
"G = AnalysisGraph.from_statements(sts)\n",
"merge_nodes(G, 'food_security', 'food_insecurity', same_polarity=False)\n",
"G = get_subgraph_for_concept_pair(G, 'drought', 'food_insecurity', cutoff=2)\n",
Expand Down Expand Up @@ -798,7 +776,12 @@
"title_cell": "Table of Contents",
"title_sidebar": "Contents",
"toc_cell": false,
"toc_position": {},
"toc_position": {
"height": "calc(100% - 180px)",
"left": "10px",
"top": "150px",
"width": "165px"
},
"toc_section_display": true,
"toc_window_display": true
}
Expand Down

0 comments on commit fbb7fc3

Please sign in to comment.