diff --git a/.github/workflows/README.md b/.github/workflows/README.md index d6edf88d..7076ddd9 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -2,7 +2,7 @@ This directory contains workflows to be used for Lessons using the {sandpaper} lesson infrastructure. Two of these workflows require R (`sandpaper-main.yaml` -and `pr-recieve.yaml`) and the rest are bots to handle pull request management. +and `pr-receive.yaml`) and the rest are bots to handle pull request management. These workflows will likely change as {sandpaper} evolves, so it is important to keep them up-to-date. To do this in your lesson you can do the following in your @@ -94,7 +94,7 @@ branch called `update/workflows` and a pull request is created. Maintainers are encouraged to review the changes and accept the pull request if the outputs are okay. -This update is run ~~weekly or~~ on demand. +This update is run weekly or on demand. ### 03 Maintain: Update Package Cache (update-cache.yaml) @@ -140,7 +140,7 @@ Once the checks are finished, a comment is issued to the pull request, which will allow maintainers to determine if it is safe to run the "Receive Pull Request" workflow from new contributors. -### Recieve Pull Request (pr-recieve.yaml) +### Receive Pull Request (pr-receive.yaml) **Note of caution:** This workflow runs arbitrary code by anyone who creates a pull request. GitHub has safeguarded the token used in this workflow to have no @@ -171,7 +171,7 @@ The artifacts produced are used by the next workflow. ### Comment on Pull Request (pr-comment.yaml) -This workflow is triggered if the `pr-recieve.yaml` workflow is successful. +This workflow is triggered if the `pr-receive.yaml` workflow is successful. The steps in this workflow are: 1. Test if the workflow is valid and comment the validity of the workflow to the diff --git a/.github/workflows/pr-close-signal.yaml b/.github/workflows/pr-close-signal.yaml index 9b129d5d..d20a2991 100644 --- a/.github/workflows/pr-close-signal.yaml +++ b/.github/workflows/pr-close-signal.yaml @@ -16,7 +16,7 @@ jobs: mkdir -p ./pr printf ${{ github.event.number }} > ./pr/NUM - name: Upload Diff - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pr path: ./pr diff --git a/.github/workflows/pr-comment.yaml b/.github/workflows/pr-comment.yaml index bb2eb03c..8a2bd3ce 100644 --- a/.github/workflows/pr-comment.yaml +++ b/.github/workflows/pr-comment.yaml @@ -82,7 +82,7 @@ jobs: contents: write steps: - name: 'Checkout md outputs' - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: md-outputs path: built diff --git a/.github/workflows/pr-receive.yaml b/.github/workflows/pr-receive.yaml index 371ef542..2d7d5dbf 100644 --- a/.github/workflows/pr-receive.yaml +++ b/.github/workflows/pr-receive.yaml @@ -25,7 +25,7 @@ jobs: - name: "Upload PR number" id: upload if: ${{ always() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pr path: ${{ github.workspace }}/NR @@ -58,10 +58,10 @@ jobs: MD: ${{ github.workspace }}/site/built steps: - name: "Check Out Main Branch" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "Check Out Staging Branch" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: md-outputs path: ${{ env.MD }} @@ -107,20 +107,20 @@ jobs: shell: Rscript {0} - name: "Upload PR" - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pr path: ${{ env.PR }} - name: "Upload Diff" - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: diff path: ${{ env.CHIVE }} retention-days: 1 - name: "Upload Build" - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: built path: ${{ env.MD }} diff --git a/.github/workflows/sandpaper-main.yaml b/.github/workflows/sandpaper-main.yaml index e17707ac..a4f8dc40 100644 --- a/.github/workflows/sandpaper-main.yaml +++ b/.github/workflows/sandpaper-main.yaml @@ -32,7 +32,7 @@ jobs: steps: - name: "Checkout Lesson" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "Set up R" uses: r-lib/actions/setup-r@v2 diff --git a/.github/workflows/sandpaper-version.txt b/.github/workflows/sandpaper-version.txt index 201a22c8..c3f65805 100644 --- a/.github/workflows/sandpaper-version.txt +++ b/.github/workflows/sandpaper-version.txt @@ -1 +1 @@ -0.16.2 +0.16.6 diff --git a/.github/workflows/update-cache.yaml b/.github/workflows/update-cache.yaml index 676d7424..08ea9c97 100644 --- a/.github/workflows/update-cache.yaml +++ b/.github/workflows/update-cache.yaml @@ -43,7 +43,7 @@ jobs: needed: ${{ steps.renv.outputs.exists }} steps: - name: "Checkout Lesson" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - id: renv run: | if [[ -d renv ]]; then @@ -76,7 +76,7 @@ jobs: steps: - name: "Checkout Lesson" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "Set up R" uses: r-lib/actions/setup-r@v2 diff --git a/.github/workflows/update-workflows.yaml b/.github/workflows/update-workflows.yaml index 288bcd13..a2d6fee1 100644 --- a/.github/workflows/update-workflows.yaml +++ b/.github/workflows/update-workflows.yaml @@ -36,7 +36,7 @@ jobs: if: ${{ needs.check_token.outputs.workflow == 'true' }} steps: - name: "Checkout Repository" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Update Workflows id: update diff --git a/.github/workflows/workshop-setup.yml b/.github/workflows/workshop-setup.yml new file mode 100644 index 00000000..9f681a3e --- /dev/null +++ b/.github/workflows/workshop-setup.yml @@ -0,0 +1,59 @@ +# A workflow for testing the workshop setup in different operating systems + +name: Test Workshop Setup + +# Controls when the action will run. Workflow runs when manually triggered using the UI +on: + workflow_dispatch: + +jobs: + workshop_setup: + runs-on: ${{matrix.os}} + strategy: + matrix: + # list of Os's + R: ['4.4.0'] + os: [ubuntu-latest, macos-latest, windows-latest] + steps: + - uses: actions/checkout@v4 + - name: Setup R + uses: r-lib/actions/setup-r@v2 + with: + r-version: ${{matrix.R}} + rtools-version: '44' + - run: Rscript -e 'print("R was installed successfully")' + - name: Install GDAL, GEOS, and PROJ.4 (macOS) + if: matrix.os == 'macos-latest' + run: | + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + brew update + brew tap osgeo/osgeo4mac && brew tap --repair + brew install proj + brew install geos + brew install gdal + shell: bash + - name: Install GDAL, GEOS, and PROJ.4 (Ubuntu) + if: matrix.os == 'ubuntu-latest' + run: | + sudo add-apt-repository ppa:ubuntugis -y + sudo apt-get update + sudo apt-get install libgdal-dev libgeos-dev libproj-dev -y + shell: bash + # Include Fedora and Arch? + - name: UDUNITS + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt-get install libudunits2-dev -y + shell: bash + - name: Geospatial Packages + uses: r-lib/actions/setup-r-dependencies@v2 + with: + cache-version: 2 + packages: | + any::sessioninfo + any::tidyverse + any::terra + any::sf + - name: Test Lessons + run: | + Rscript -e 'nc <- sf::st_read(system.file("shape/nc.shp", package="sf"), quiet = TRUE); if (sf::st_crs(sf::st_transform(nc, 4326))$epsg == 4326) print("`sf` works as expected"); if (nrow(dplyr::filter(nc, AREA > 0.2)) == 11) print("`tidyverse` works as expected")' diff --git a/episodes/.DS_Store b/episodes/.DS_Store new file mode 100644 index 00000000..bd4890ce Binary files /dev/null and b/episodes/.DS_Store differ diff --git a/episodes/18-import-and-visualise-osm-data.Rmd b/episodes/18-import-and-visualise-osm-data.Rmd index 4c4ebc11..01dbb0a6 100644 --- a/episodes/18-import-and-visualise-osm-data.Rmd +++ b/episodes/18-import-and-visualise-osm-data.Rmd @@ -27,7 +27,7 @@ knitr::opts_chunk$set(warning = FALSE, message = FALSE) ## What is OpenStreetMap? -OpenStreetMap (OSM) is a collaborative project which aims at mapping the world and sharing geospatial data in an open way. Anyone can contribute, by mapping geographical objects they encounter, by adding topical information on existing map objects (their name, function, capacity, etc.), or by mapping buildings and roads from satellite imagery (cf. [HOT: Humanitarian OpenStreetMap Team](https://www.hotosm.org/)). +OpenStreetMap (OSM) is a collaborative project which aims at mapping the world and sharing geospatial data in an open way. Anyone can contribute, by mapping geographical objects they encounter, by adding topical information on existing map objects (their name, function, capacity, etc.), or by mapping buildings and roads from satellite imagery. This information is then validated by other users and eventually added to the common "map" or information system. This ensures that the information is accessible, open, verified, accurate and up-to-date. @@ -48,27 +48,29 @@ assign("has_internet_via_proxy", TRUE, environment(curl::has_internet)) ### Bounding box -The first thing to do is to define the area within which you want to retrieve data, aka the *bounding box*. This can be defined easily using a place name and the package `nominatimlite` to access the free Nominatim API provided by OpenStreetMap. - -We are going to look at *Brielle* together, but you can also work with the small cities of *Naarden*, *Geertruidenberg*, *Gorinchem*, *Enkhuizen* or *Dokkum*. +The first thing to do is to define the area within which you want to retrieve data, aka the *bounding box*. This can be defined easily using a place name and the package `osmdata` to access the free Nominatim API provided by OpenStreetMap. +We are going to look at *Brielle* together. +::::::::::::::::::::::::::::::::::::: callout +Beware that downloading and analysing the data for larger cities might be long, slow and cumbersome on your machine. If you choose another location to work with, please try to choose a city of similar size! +:::::::::::::::::::::::::::::::::::::::::::::::: -We first geocode our spatial text search and extract the corresponding polygon (`geo_lite_sf`) and then extract its bounding box (`st_bbox`). +We first geocode our spatial text search and extract the corresponding bounding box (`getbb`). -```{r} -#install.packages("nominatimlite") -library(nominatimlite) +```{r nominatim} +library(osmdata) -nominatim_polygon <- geo_lite_sf(address = "Brielle", points_only = FALSE) -bb <- st_bbox(nominatim_polygon) +bb <- osmdata::getbb("Brielle") bb ``` ::::::::::::::::::::::::::::::::::::: callout +### Overpass query unavailable without internet + If you encounter an error linked to your internet proxy ("Error: Overpass query unavailable without internet R"), run this line of code. It might not be needed, but ensures that your machine knows it has internet. ```{r} @@ -87,11 +89,10 @@ For example: Brielle (Netherlands) and Brielle (New Jersey) ![Brielle, New Jersey](fig/Brielle_NJ.jpeg "Brielle, New Jersey"){width=40%} -By default, `geo_lite_sf()` from the `nominatimlite` package returns the first item. This means that regardless of the number of returned locations with the given name, the function will return a bounding box and it might be that we are not looking for the first item. We should therefore try to be as unambiguous as possible by adding a country code or district name. +By default, `getbb()` from the `osmdata` package returns the first item. This means that regardless of the number of returned locations with the given name, the function will return a bounding box and it might be that we are not looking for the first item. We should therefore try to be as unambiguous as possible by adding a country code or district name. -```{r} -nominatim_polygon <- geo_lite_sf(address = "Brielle, NL", points_only = FALSE) -bb <- st_bbox(nominatim_polygon) +```{r bbox} +bb <- getbb("Brielle, NL") bb ``` @@ -127,11 +128,7 @@ It appears that there is a function to extract features, using the Overpass API. On this page we can read about the arguments needed for each function: a bounding box for `opq()` and some `key` and `value` for `add_osm_feature()`. Thanks to the examples provided, we can assume that these keys and values correspond to different levels of tags from the OSM classification. In our case, we will keep it at the first level of classification, with "buildings" as `key`, and no value. We also see from the examples that another function is needed when working with the `sf` package: `osmdata_sf()`. This ensures that the type of object is suited for `sf`. With these tips and examples, we can write our feature extraction function as follows: -```{r} -#install.packages("osmdata") -library(osmdata) - - +```{r osm} x <- opq(bbox = bb) %>% add_osm_feature(key = 'building') %>% osmdata_sf() @@ -145,7 +142,7 @@ What is this `x` object made of? It is a data frame of all the buildings contain -```{r} +```{r strbuildings} str(x$osm_polygons) ``` @@ -161,7 +158,7 @@ Let's map the building age of post-1900 Brielle buildings. First, we are going to select the polygons and reproject them with the Amersfoort/RD New projection, suited for maps centred on the Netherlands. This code for this projection is: 28992. -```{r} +```{r transform} buildings <- x$osm_polygons %>% st_transform(.,crs=28992) ``` @@ -180,35 +177,73 @@ Then we create a new variable using the threshold at 1900. Every date before 190 Then we use the `ggplot()` function to visualise the buildings by age. The specific function to represent information as a map is `geom_sf()`. The rest works like other graphs and visualisation, with `aes()` for the aesthetics. -```{r} +```{r map} start_date <- as.numeric(buildings$start_date) buildings$build_date <- if_else(start_date < 1900, 1900, start_date) ggplot(data = buildings) + geom_sf(aes(fill = build_date, colour=build_date)) + scale_fill_viridis_c(option = "viridis")+ - scale_colour_viridis_c(option = "viridis") + scale_colour_viridis_c(option = "viridis") + + coord_sf(datum = st_crs(28992)) ``` So this reveals the historical centre of Brielle (or the city you chose) and the various urban extensions through time. Anything odd? What? Around the centre? Why these limits / isolated points? +## Replicability + +We have produced a proof a concept on Brielle, but can we factorise our work to be replicable with other small fortified cities? You can use any of the following cities: *Naarden*, *Geertruidenberg*, *Gorinchem*, *Enkhuizen* or *Dokkum*. + +We might replace the name in the first line and run everything again. Or we can create a function. +```{r reproducibility} +extract_buildings <- function(cityname, year=1900){ + nominatim_polygon <- geo_lite_sf(address = cityname, points_only = FALSE) + bb <- st_bbox(nominatim_polygon) + + x <- opq(bbox = bb) %>% + add_osm_feature(key = 'building') %>% + osmdata_sf() + + buildings <- x$osm_polygons %>% + st_transform(.,crs=28992) + + start_date <- as.numeric(buildings$start_date) + + buildings$build_date <- if_else(start_date < year, year, start_date) + ggplot(data = buildings) + + geom_sf(aes(fill = build_date, colour=build_date)) + + scale_fill_viridis_c(option = "viridis")+ + scale_colour_viridis_c(option = "viridis") + + ggtitle(paste0("Old buildings in ",cityname)) + + coord_sf(datum = st_crs(28992)) +} + +#test on Brielle +extract_buildings("Brielle, NL") + +#test on Naarden +extract_buildings("Naarden, NL") + +``` ::::::::::::::::::::::::::::::::::::: challenge -## Challenge: import an interactive basemap layer under the buildings with `Leaflet` (20min) +## Challenge: import an interactive basemap layer under the buildings with 'Leaflet' (20min) + +Leaflet is a ["open-source JavaScript library for mobile-friendly interactive maps"](https://leafletjs.com/). Within R, the `leaflet` package allows you to build such interactive maps. As with `ggplot2`, you build a map with a collection of layers. In this case, you will have the leaflet basemap, some tiles, and shapes on top (such as markers, polygons, etc.). -- Check out the [leaflet package documentation](https://rstudio.github.io/leaflet/) +- Check out the [leaflet package documentation](https://rstudio.github.io/leaflet/) and [GDCU cheatsheet](https://github.com/ClementineCttn/r-geospatial-urban/blob/main/instructors/cheatsheet/GDCU_cheatsheet.pdf). - Plot a basemap in Leaflet and try different tiles in the [basemap documentation](https://rstudio.github.io/leaflet/basemaps.html) - Transform the buildings into WGS84 projection and add them to the basemap layer with the `addPolygons()` function. -- Have the `fillColor` of these polygons represent the `build_date` variable. See the [choropleth documentation](https://rstudio.github.io/leaflet/choropleths.html) for use of colors. Tip: use the examples given in the documentation and replace the variable names where needed. +- Have the `fillColor` of these polygons represent the `build_date` variable. See the [choropleth documentation](https://rstudio.github.io/leaflet/choropleths.html) and [GDCU cheatsheet](https://github.com/ClementineCttn/r-geospatial-urban/blob/main/instructors/cheatsheet/GDCU_cheatsheet.pdf) for how use to use fill colors in polygons. Tip: use the examples given in the documentation and replace the variable names where needed. :::::::::::::::::::::::: solution ## One solution -```{r} +```{r leaflet} #install.packages("leaflet") library(leaflet) @@ -216,8 +251,13 @@ library(leaflet) buildings2 <- buildings %>% st_transform(.,crs=4326) +# leaflet(buildings2) %>% +# addTiles() %>% +# addPolygons(fillColor = ~colorQuantile("YlGnBu", -build_date)(-build_date)) + + # For a better visual rendering, try: + leaflet(buildings2) %>% -# addTiles() addProviderTiles(providers$CartoDB.Positron) %>% addPolygons(color = "#444444", weight = 0.1, smoothFactor = 0.5, opacity = 0.2, fillOpacity = 0.8, @@ -226,6 +266,7 @@ leaflet(buildings2) %>% bringToFront = TRUE)) ``` + ::::::::::::::::::::::::::::::::: ::::::::::::::::::::::::::::::::::::: @@ -233,7 +274,7 @@ leaflet(buildings2) %>% ::::::::::::::::::::::::::::::::::::: keypoints - Use the `Nominatim` and `Overpass` APIs within R -- Use the `osmdata` and `nominatimlite` packages to retrieve geospatial data +- Use the `osmdata` package to retrieve geospatial data - Select features and attributes among OSM tags - Use the `ggplot`, `sf` and `leaflet` packages to map data diff --git a/episodes/19-basic-gis-with-r-sf.Rmd b/episodes/19-basic-gis-with-r-sf.Rmd index 669b18c8..d01b1219 100644 --- a/episodes/19-basic-gis-with-r-sf.Rmd +++ b/episodes/19-basic-gis-with-r-sf.Rmd @@ -17,7 +17,7 @@ After completing this episode, participants should be able to… - Perform geoprocessing operations such as unions, joins and intersections with dedicated functions from the `sf` package - Compute the area of spatial polygons - Create buffers and centroids -- Map the results +- Map and save the results :::::::::::::::::::::::::::::::::::::::::::::::: ```{r setup, include=FALSE} @@ -25,12 +25,12 @@ knitr::opts_chunk$set(warning = FALSE, message = FALSE) ``` -```{r message=FALSE} +```{r packages, message=FALSE} library(tidyverse) library(sf) library(osmdata) library(leaflet) -library(nominatimlite) +library(lwgeom) assign("has_internet_via_proxy", TRUE, environment(curl::has_internet)) ``` @@ -49,9 +49,9 @@ Let's focus on old buildings and imagine we're in charge of their conservation. Let's select them and see where they are. -```{r} -nominatim_polygon <- nominatimlite::geo_lite_sf(address = "Brielle, NL", points_only = FALSE) -bb <- sf::st_bbox(nominatim_polygon) +```{r recap} + +bb <- osmdata::getbb("Brielle, NL") x <- opq(bbox = bb) %>% add_osm_feature(key = 'building') %>% osmdata_sf() @@ -69,12 +69,16 @@ buildings$start_date <- as.numeric(buildings$start_date) old_buildings <- buildings %>% filter(start_date <= old) - ggplot(data = old_buildings) + geom_sf(colour="red") + ggplot(data = old_buildings) + + geom_sf(colour="red") + + coord_sf(datum = st_crs(28992)) ``` ::::::::::::::::::::::::::::::::::::: callout +### Overpass query unavailable without internet + If you encounter an error linked to your internet proxy ("Error: Overpass query unavailable without internet R"), run this line of code. It might not be needed, but ensures that your machine knows it has internet. ```{r} @@ -89,7 +93,7 @@ As conservationists, we want to create a zone around historical buildings where Let's say the conservation zone should be 100 meters. In GIS terms, we want to create a _buffer_ around polygons. The corresponding `sf` function is `st_buffer()`, with 2 arguments: the polygons around which to create buffers, and the radius of the buffer. -```{r} +```{r buffer} distance <- 100 # in meters #First, we check that the "old_buildings" layer projection is measured in meters: @@ -99,7 +103,9 @@ st_crs(old_buildings) buffer_old_buildings <- st_buffer(x = old_buildings, dist = distance) -ggplot(data = buffer_old_buildings) + geom_sf() +ggplot(data = buffer_old_buildings) + + geom_sf() + + coord_sf(datum = st_crs(28992)) ``` @@ -107,7 +113,7 @@ ggplot(data = buffer_old_buildings) + geom_sf() Now, we have a lot of overlapping buffers. We would rather create a unique conservation zone rather than overlapping ones in that case. So we have to fuse the overlapping buffers into one polygon. This operation is called _union_ and the corresponding function is `st_union()`. -```{r} +```{r union} single_old_buffer <- st_union(buffer_old_buildings) %>% st_cast(to = "POLYGON") %>% st_as_sf() @@ -126,7 +132,7 @@ We create unique IDs to identify the new polygons. ## Centroids For the sake of visualisation speed, we would like to represent buildings by a single point (for instance: their geometric centre) rather than their actual footprint. This operation means defining their _centroid_ and the corresponding function is `st_centroid()`. -```{r} +```{r centroids} sf::sf_use_s2(FALSE) # s2 works with geographic projections, so to calculate centroids in projected CRS units (meters), we need to disable it. centroids_old <- st_centroid(old_buildings) %>% @@ -134,14 +140,15 @@ centroids_old <- st_centroid(old_buildings) %>% ggplot() + geom_sf(data = single_old_buffer, aes(fill=ID)) + - geom_sf(data = centroids_old) + geom_sf(data = centroids_old) + + coord_sf(datum = st_crs(28992)) ``` ## Intersection Now, we would like to distinguish conservation areas based on the number of historic buildings they contain. In GIS terms, we would like to know how many centroids each fused buffer polygon contains. This operation means _intersecting_ the layer of polygons with the layer of points and the corresponding function is `st_intersection()`. -```{r} +```{r intersection} centroids_buffers <- st_intersection(centroids_old,single_old_buffer) %>% mutate(n = 1) @@ -160,7 +167,8 @@ Now, we would like to distinguish conservation areas based on the number of hist begin = 0.6, end = 1, direction = -1, - option = "B") + option = "B") + + coord_sf(datum = st_crs(28992)) ``` `st_intersection` here adds the attributes of the intersected polygon buffers to the data table of the centroids. This means we will now know about each centroid, the ID of its intersected polygon-buffer, and a variable called "n" which is population with 1 for everyone. This means that all centroids will have the same weight when aggregated. @@ -169,18 +177,24 @@ We aggregate them by ID number (`group_by(ID)`) and sum the variable `n` to know ### Final output: -Let's map this layer over the initial map of individual buildings. +Let's map this layer over the initial map of individual buildings, and save the result. -```{r} -ggplot() + +```{r mapping} +p <- ggplot() + geom_sf(data = buildings) + geom_sf(data = single_buffer, aes(fill=n_buildings), colour = NA) + scale_fill_viridis_c(alpha = 0.6, begin = 0.6, end = 1, direction = -1, - option = "B") + option = "B") + + coord_sf(datum = st_crs(28992)) + p + +ggsave(filename = "fig/ConservationBrielle.png", + plot = p) + ``` ::::::::::::::::::::::::::::::::::::: challenge @@ -192,7 +206,7 @@ The historical threshold now applies to all pre-war buildings, but the distance :::::::::::::::::::::::: solution -```{r} +```{r parameters} old <- 1939 distance <- 10 @@ -227,14 +241,21 @@ centroid_by_buffer <- centroids_buffers %>% single_buffer <- single_old_buffer %>% mutate(n_buildings = centroid_by_buffer$n) - ggplot() + + +pnew <- ggplot() + geom_sf(data = buildings) + geom_sf(data = single_buffer, aes(fill = n_buildings), colour = NA) + scale_fill_viridis_c(alpha = 0.6, begin = 0.6, end = 1, direction = -1, - option = "B") + option = "B") + + coord_sf(datum = st_crs(28992)) + + pnew + +ggsave(filename = "fig/ConservationBrielle_newrules.png", + plot = pnew) ``` :::::::::::::::::::::::: @@ -246,7 +267,7 @@ single_buffer <- single_old_buffer %>% ## Area -```{r} +```{r area} single_buffer$area <- sf::st_area(single_buffer) %>% units::set_units(., km^2) diff --git a/instructors/2-vector-slides.html b/instructors/2-vector-slides.html index e2b96ccc..2ac20db6 100644 --- a/instructors/2-vector-slides.html +++ b/instructors/2-vector-slides.html @@ -27,12 +27,12 @@ ul.task-list{list-style: none;} ul.task-list li input[type="checkbox"] { width: 0.8em; - margin: 0 0.8em 0.2em -1em; /* quarto-specific, see https://github.com/quarto-dev/quarto-cli/issues/4556 */ + margin: 0 0.8em 0.2em -1em; /* quarto-specific, see https://github.com/quarto-dev/quarto-cli/issues/4556 */ vertical-align: middle; } /* CSS for syntax highlighting */ pre > code.sourceCode { white-space: pre; position: relative; } - pre > code.sourceCode > span { line-height: 1.25; } + pre > code.sourceCode > span { display: inline-block; line-height: 1.25; } pre > code.sourceCode > span:empty { height: 1.2em; } .sourceCode { overflow: visible; } code.sourceCode > span { color: inherit; text-decoration: inherit; } @@ -107,11 +107,11 @@ .callout { margin-top: 1em; - margin-bottom: 1em; + margin-bottom: 1em; border-radius: .25rem; } - .callout.callout-style-simple { + .callout.callout-style-simple { padding: 0em 0.5em; border-left: solid #acacac .3rem; border-right: solid 1px silver; @@ -162,7 +162,7 @@ margin-top: 0.5em; margin-bottom: 0.5em; } - + .callout.callout-titled.callout-style-simple .callout-content p { margin-top: 0; } @@ -213,7 +213,7 @@ .callout-title { display: flex } - + .callout-icon::before { margin-top: 1rem; padding-right: .5rem; @@ -224,8 +224,7 @@ } .callout.callout-titled .callout-body > .callout-content > :last-child { - padding-bottom: 0.5rem; - margin-bottom: 0; + margin-bottom: 0.5rem; } .callout.callout-titled .callout-icon::before { @@ -326,12 +325,12 @@ } .reveal .footnotes ol { counter-reset: ol; - list-style-type: none; + list-style-type: none; margin-left: 0; } .reveal .footnotes ol li:before { counter-increment: ol; - content: counter(ol) ". "; + content: counter(ol) ". "; } .reveal .footnotes ol li > p:first-child { display: inline-block; @@ -373,19 +372,19 @@ .reveal .slide > img.r-stretch.quarto-figure-center { display: block; margin-left: auto; - margin-right: auto; + margin-right: auto; } .reveal .slide > img.stretch.quarto-figure-left, .reveal .slide > img.r-stretch.quarto-figure-left { display: block; margin-left: 0; - margin-right: auto; + margin-right: auto; } .reveal .slide > img.stretch.quarto-figure-right, .reveal .slide > img.r-stretch.quarto-figure-right { display: block; margin-left: auto; - margin-right: 0; + margin-right: 0; } @@ -401,12 +400,12 @@
levels(factor(lines_Delft$highway))
-
-motorway_Delft <- lines_Delft %>%
- filter(highway == "motorway")
-
-motorway_Delft %>%
- mutate(length = st_length(.)) %>%
- select(everything(), geometry) %>%
- summarise(total_length = sum(length))
-
-nrow(motorway_Delft)
-
-ggplot(data = motorway_Delft) +
- geom_sf(size = 1.5) +
- ggtitle("Mobility network of Delft", subtitle = "Motorways") +
- coord_sf()
levels(factor(lines_Delft$highway))
+
+motorway_Delft <- lines_Delft %>%
+ filter(highway == "motorway")
+
+motorway_Delft %>%
+ mutate(length = st_length(.)) %>%
+ select(everything(), geometry) %>%
+ summarise(total_length = sum(length))
+
+nrow(motorway_Delft)
+
+ggplot(data = motorway_Delft) +
+ geom_sf(size = 1.5) +
+ ggtitle("Mobility network of Delft", subtitle = "Motorways") +
+ coord_sf()
levels(factor(lines_Delft$highway))
-
-line_widths <- c(0.25, 0.75, 0.5, 1)
-
-ggplot(data = lines_Delft_selection) +
- geom_sf(aes(size = highway)) +
- scale_size_manual(values = line_widths) +
- labs(size = "Road Size") +
- ggtitle("Mobility network of Delft", subtitle = "Roads & Cycleways - Line width varies") +
- coord_sf()
levels(factor(lines_Delft$highway))
+
+line_widths <- c(0.25, 0.75, 0.5, 1)
+
+ggplot(data = lines_Delft_selection) +
+ geom_sf(aes(size = highway)) +
+ scale_size_manual(values = line_widths) +
+ labs(size = "Road Size") +
+ ggtitle("Mobility network of Delft", subtitle = "Roads & Cycleways - Line width varies") +
+ coord_sf()
levels(factor(lines_Delft_selection$highway))
-
-lines_Delft_bicycle <- lines_Delft %>%
- filter(highway == "cycleway")
-
-ggplot() +
- geom_sf(data = lines_Delft) +
- geom_sf(data = lines_Delft_bicycle, color = "magenta", size = 2) +
- ggtitle("Mobility network of Delft", subtitle = "Roads dedicated to bikes") +
- coord_sf()
levels(factor(lines_Delft_selection$highway))
+
+lines_Delft_bicycle <- lines_Delft %>%
+ filter(highway == "cycleway")
+
+ggplot() +
+ geom_sf(data = lines_Delft) +
+ geom_sf(data = lines_Delft_bicycle, color = "magenta", size = 2) +
+ ggtitle("Mobility network of Delft", subtitle = "Roads dedicated to bikes") +
+ coord_sf()
municipal_boundaries_NL <- st_read(here("episodes", "data", "nl-gemeenten.shp"))
-str(municipal_boundaries_NL)
-levels(factor(municipal_boundaries_NL$ligtInPr_1))
-
-ggplot(data = municipal_boundaries_NL) +
- geom_sf(aes(color = ligtInPr_1), size = 1) +
- ggtitle("Contiguous NL Municipal Boundaries") +
- coord_sf()
municipal_boundaries_NL <- st_read(here("episodes", "data", "nl-gemeenten.shp"))
+str(municipal_boundaries_NL)
+levels(factor(municipal_boundaries_NL$ligtInPr_1))
+
+ggplot(data = municipal_boundaries_NL) +
+ geom_sf(aes(color = ligtInPr_1), size = 1) +
+ ggtitle("Contiguous NL Municipal Boundaries") +
+ coord_sf()
leisure_locations_selection <- st_read(here("episodes", "data", "delft-leisure.shp")) %>%
- filter(leisure %in% c("playground", "picnic_table"))
-
-blue_orange <- c("cornflowerblue", "darkorange")
-
-p <- ggplot() +
- geom_sf(data = lines_Delft_selection, aes(color = highway)) +
- scale_color_manual(name = "Line Type", values = road_colors) +
- ggtitle("Road network and leisure")
-
-p +
- geom_sf(data = leisure_locations_selection, aes(fill = leisure), shape = 21) +
- scale_fill_manual(name = "Leisure Type", values = blue_orange)
-
-p +
- geom_sf(data = leisure_locations_selection, aes(fill = leisure, shape = leisure), size = 3) +
- scale_fill_manual(name = "Leisure Type", values = blue_orange) +
- scale_shape_manual(name = "Leisure Type", values = c(21, 22))
leisure_locations_selection <- st_read(here("episodes", "data", "delft-leisure.shp")) %>%
+ filter(leisure %in% c("playground", "picnic_table"))
+
+blue_orange <- c("cornflowerblue", "darkorange")
+
+p <- ggplot() +
+ geom_sf(data = lines_Delft_selection, aes(color = highway)) +
+ scale_color_manual(name = "Line Type", values = road_colors) +
+ ggtitle("Road network and leisure")
+
+p +
+ geom_sf(data = leisure_locations_selection, aes(fill = leisure), shape = 21) +
+ scale_fill_manual(name = "Leisure Type", values = blue_orange)
+
+p +
+ geom_sf(data = leisure_locations_selection, aes(fill = leisure, shape = leisure), size = 3) +
+ scale_fill_manual(name = "Leisure Type", values = blue_orange) +
+ scale_shape_manual(name = "Leisure Type", values = c(21, 22))
boundary_ZH <- municipal_boundary_NL %>%
- filter(ligtInPr_1 == "Zuid-Holland")
-
-ggplot() +
- geom_sf(data = boundary_ZH, aes(color ="color"), show.legend = "line") +
- scale_color_manual(name = "", labels = "Municipal Boundaries", values = c("color" = "gray18")) +
- geom_sf(data = boundary_Delft, aes(shape = "shape"), color = "purple", fill = "purple") +
- scale_shape_manual(name = "", labels = "Municipality of Delft", values = c("shape" = 19)) +
- ggtitle("Delft location in South Holland") +
- theme(legend.background = element_rect(color = NA)) +
- coord_sf()
boundary_ZH <- municipal_boundary_NL %>%
+ filter(ligtInPr_1 == "Zuid-Holland")
+
+ggplot() +
+ geom_sf(data = boundary_ZH, aes(color ="color"), show.legend = "line") +
+ scale_color_manual(name = "", labels = "Municipal Boundaries", values = c("color" = "gray18")) +
+ geom_sf(data = boundary_Delft, aes(shape = "shape"), color = "purple", fill = "purple") +
+ scale_shape_manual(name = "", labels = "Municipality of Delft", values = c("shape" = 19)) +
+ ggtitle("Delft location in South Holland") +
+ theme(legend.background = element_rect(color = NA)) +
+ coord_sf()