diff --git a/069-FabricRealTimeAnalytics/.wordlist.txt b/069-FabricRealTimeAnalytics/.wordlist.txt new file mode 100644 index 0000000000..fb33664431 --- /dev/null +++ b/069-FabricRealTimeAnalytics/.wordlist.txt @@ -0,0 +1,26 @@ +querysets +realtime +cameronkahrsdocker +fabricwthdatapump +fabricwthdatapumpv +MAA +Realtime +setupIoTEnvironment +MAA +OneLake +PBI +Realtime +MAA +queryset +Queryset +AbboCost +Kahrs +Marrero +Hitney +Pardeep +Singla +Kleinhart +percentpricedifference +pricedifference +KQLDB +popout diff --git a/069-FabricRealTimeAnalytics/Coach/Images/ArchitectureSlide1.PNG b/069-FabricRealTimeAnalytics/Coach/Images/ArchitectureSlide1.PNG new file mode 100644 index 0000000000..868657e451 Binary files /dev/null and b/069-FabricRealTimeAnalytics/Coach/Images/ArchitectureSlide1.PNG differ diff --git a/069-FabricRealTimeAnalytics/Coach/Images/ArchitectureSlide2.PNG b/069-FabricRealTimeAnalytics/Coach/Images/ArchitectureSlide2.PNG new file mode 100644 index 0000000000..cda2c0659b Binary files /dev/null and b/069-FabricRealTimeAnalytics/Coach/Images/ArchitectureSlide2.PNG differ diff --git a/069-FabricRealTimeAnalytics/Coach/README.md b/069-FabricRealTimeAnalytics/Coach/README.md new file mode 100644 index 0000000000..0c64ba9341 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Coach/README.md @@ -0,0 +1,63 @@ +# What The Hack - Fabric Real-time Analytics - Coach Guide + +## Introduction + +Welcome to the coach's guide for the Fabric Real-time Analytics What The Hack. Here you will find links to specific guidance for coaches for each of the challenges. + +**NOTE:** If you are a Hackathon participant, this is the answer guide. Don't cheat yourself by looking at these during the hack! Go learn something. :) + +## Coach's Guides + +0. Challenge 00: **[Prerequisites](Solution-00.md)** + - Getting the environment setup for the rest of the challenges +1. Challenge 01: **[Ingesting the Data and Creating the Database](Solution-01.md)** + - Creating a database to store the real-time data +1. Challenge 02: **[Transforming the Data](Solution-02.md)** + - Transforming the data using KQL Querysets +1. Challenge 03: **[Create the Realtime Reporting](Solution-03.md)** + - Creating the Power BI reports, with real-time data + +## Coach Prerequisites + +This hack has pre-reqs that a coach is responsible for understanding and/or setting up BEFORE hosting an event. Please review the [What The Hack Hosting Guide](https://aka.ms/wthhost) for information on how to host a hack event. + +The guide covers the common preparation steps a coach needs to do before any What The Hack event, including how to properly configure Microsoft Teams. + +### Student Resources + +Before the hack, it is the Coach's responsibility to download and package up the contents of the `/Student/Resources` folder of this hack into a "Resources.zip" file. The coach should then provide a copy of the Resources.zip file to all students at the start of the hack. + +Always refer students to the [What The Hack website](https://aka.ms/wth) for the student guide: [https://aka.ms/wth](https://aka.ms/wth) + +**NOTE:** Students should **not** be given a link to the What The Hack repo before or during a hack. The student guide does **NOT** have any links to the Coach's guide or the What The Hack repo on GitHub. + +## Azure Requirements + +This hack requires students to have access to an Azure subscription where they can create and consume Azure resources. These Azure requirements should be shared with a stakeholder in the organization that will be providing the Azure subscription(s) that will be used by the students. + +## Suggested Hack Agenda (Optional) + +- Sample Day 1 + - Challenge 0 (1 hour) + - Challenge 1 (2 hours) + - Challenge 2 (2 hours) + - Challenge 3 (2 hours) + +## Architecture Diagrams + +![Architecture 1](Images/ArchitectureSlide1.PNG) + +![Architecture 2](Images/ArchitectureSlide2.PNG) + +## Repository Contents + +- `./Coach` + - Coach's Guide and related files +- `./Coach/Solutions` + - Solution files with completed example answers to a challenge +- `./Coach/Images` + - Images for coaches guide. +- `./Student` + - Student's Challenge Guide +- `./Student/Resources` + - Resource files, sample code, scripts, etc meant to be provided to students. (Must be packaged up by the coach and provided to students at start of event) diff --git a/069-FabricRealTimeAnalytics/Coach/Solution-00.md b/069-FabricRealTimeAnalytics/Coach/Solution-00.md new file mode 100644 index 0000000000..198a4528bc --- /dev/null +++ b/069-FabricRealTimeAnalytics/Coach/Solution-00.md @@ -0,0 +1,53 @@ +# Challenge 00 - Prerequisites - Ready, Set, GO! - Coach's Guide + +**[Home](./README.md)** - [Next Solution >](./Solution-01.md) + +## Introduction + +Thank you for participating in the Fabric real time analytics WTH. Before you can hack, you will need to set up some prerequisites. + +## Common Prerequisites + +- [Azure Subscription](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-subscription) +- [Managing Cloud Resources](../../000-HowToHack/WTH-Common-Prerequisites.md#managing-cloud-resources) + - [Azure Portal](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-portal) + - [Azure CLI](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-cli) + - [Note for Windows Users](../../000-HowToHack/WTH-Common-Prerequisites.md#note-for-windows-users) + - [Azure PowerShell CmdLets](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-powershell-cmdlets) + - [Azure Cloud Shell](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-cloud-shell) +- [Visual Studio Code](../../000-HowToHack/WTH-Common-Prerequisites.md#visual-studio-code) + - [VS Code plugin for ARM Templates](../../000-HowToHack/WTH-Common-Prerequisites.md#visual-studio-code-plugins-for-arm-templates) +## Description + +Here are the instructions for installing the ARM template. This will create: + +- Event Hub namespace and Event Hub (EH) +- Azure Container Instance (ACI) + +The EH will get created first, then the ACI. The ACI will be started with a container from the Docker Hub "cameronkahrsdocker/fabricwthdatapumpv2" which will automatically stream events to the EH created in the first step. + + +Here is a video link of how to go through the setup, step by step: [MAA Fabric Realtime Analytics](https://www.youtube.com/watch?v=wGox1lf0ve0) + +Steps: + +1. Login to the Azure portal and open the CLI (Command Line Interface) +2. Upload the "setupIoTEnvironment.json" to the storage connected to the CLI. +3. Navigate to those files in the command line. +4. Run + - `az group create --name --location westus3` + - You pick a name for the resource group. +5. Run + - `az deployment group create --resource-group --template-file "fsetupIoTEnvironment.json"` +6. You should now have a resource group in your azure subscription with the EH and ACI resources. +8. Create a Fabric instance through the Azure portal. An F4 SKU is all that is needed. + +## Success Criteria + +To complete this challenge successfully, you should be able to: + +- Verify that the ARM template has deployed the following resources in Azure: + - Event Hub Namespace + - Event Hub (verify it is ingesting data from the container) + - Azure Container Instance (verify that it is running the Docker container and data is streaming out, go to logs to verify this) +- Fabric instance running diff --git a/069-FabricRealTimeAnalytics/Coach/Solution-01.md b/069-FabricRealTimeAnalytics/Coach/Solution-01.md new file mode 100644 index 0000000000..919cb067f5 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Coach/Solution-01.md @@ -0,0 +1,27 @@ +# Challenge 01 - Ingesting and Creating the Database - Coach's Guide + +[< Previous Solution](./Solution-00.md) - **[Home](./README.md)** - [Next Solution >](./Solution-02.md) + +## Setup Steps + +Steps for MAA Fabric Real-time analytics: +1. Create Fabric capacity +1. Change Admin settings to allow 1 second page refresh + - Capacity settings + - PBI Workloads +2. Open PBI in the browser +3. Create a workspace and assign it to the fabric capacity +4. New resource, create kql db +5. Turn on Onelake folder + +## Notes & Guidance + +Unfortunately Fabric does not allow template automation for the items inside of it, so you will have to follow along with the students to create the example from scratch as they are doing. + +The Learning Resource below have a useful tutorial to get you going if the steps laid out here are not enough. + +## Learning Resources + +- [Realtime Analytics in Fabric Tutorial](https://learn.microsoft.com/en-us/fabric/real-time-analytics/tutorial-introduction) +- [Creating a KQL Database](https://learn.microsoft.com/en-us/fabric/real-time-analytics/create-database) +- [Get Data from Event Hubs into KQL](https://learn.microsoft.com/en-us/fabric/real-time-analytics/get-data-event-hub) diff --git a/069-FabricRealTimeAnalytics/Coach/Solution-02.md b/069-FabricRealTimeAnalytics/Coach/Solution-02.md new file mode 100644 index 0000000000..888b7e1831 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Coach/Solution-02.md @@ -0,0 +1,47 @@ +# Challenge 02 - Transforming the Data - Coach's Guide + +[< Previous Solution](./Solution-01.md) - **[Home](./README.md)** - [Next Solution >](./Solution-03.md) + +## Setup Steps + +Steps for MAA Fabric Real-time analytics setup: + +1. Get data from event hub + - Create new table name + - Create connection: + - Event hub namespace + - Event hub + - Sas key name + - Sas key + - Event retrieval start date (under more parameters) + - Schema should be JSON with no nested values +7. Create kql queryset + +## Notes & Guidance + +In this challenge, students should be familiar with KQL and be able to query the real-time data in Stock Table as required with KQL. + +Students should first create a KQL Queryset and select the Stock Table stored in the KQL Database. + +The query should be similar to the following: +``` +stockmarket +| order by timestamp asc +| extend pricedifference = round(price - prev(price, 8), 2) +| extend percentdifference = round(round(price - prev(price, 8), 2) / prev(price, 8), 4) +``` + +For the advanced challenge part, students should create another KQL Queryset. You can take the following query as a reference: +``` +stockmarket +| order by timestamp asc +| extend pricedifference = round(price - prev(price, 8), 2) +| extend percentdifference = round(round(price - prev(price, 8), 2) / prev(price, 8), 4) +| summarize arg_max(pricedifference, timestamp, price) by symbol +``` + +## Learning Resources + +- [Query data in a KQL queryset](https://learn.microsoft.com/en-us/fabric/real-time-analytics/kusto-query-set) +- [Customize results in the KQL Queryset results grid](https://learn.microsoft.com/en-us/fabric/real-time-analytics/customize-results) +- [KQL prev() function](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/prevfunction) diff --git a/069-FabricRealTimeAnalytics/Coach/Solution-03.md b/069-FabricRealTimeAnalytics/Coach/Solution-03.md new file mode 100644 index 0000000000..46b5fd12e8 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Coach/Solution-03.md @@ -0,0 +1,41 @@ +# Challenge 03 - Create the Realtime Reporting - Coach's Guide + +[< Previous Solution](./Solution-02.md) - **[Home](./README.md)** + +## Setup Steps + +1. Change Admin settings to allow 1 second page refresh + - Capacity settings + - PBI Workloads +2. Create PBI report + - Setup auto page refresh + +## Notes & Guidance + +This challenge completes the hack by setting up a PowerBI report that shows the incoming data to the KQL database at real time. + +- Creating the PowerBI report: The easiest way to do so is from the KQL queryset: + - Navigate to the KQL Queryset made in the previous challenge + - On the `KQL Queryset` menu in the ribbon, select `Build Power BI Report` + - The report can be saved in the following pop-up, or edited in full screen after being saved. The one requirement before exiting out of this pop-up is to click on the top right "File" dropdown menu and select "Save". The student should create the required visuals as per the challenge. + - Give the report a name and place it in workspace (it is recommended to be the same one that already contains the KQL database) + +- Adjusting the page refresh settings (admin): + - On the right hand side of Fabric's top navigation bar, select the settings icon. + - Navigate to the Admin portal -> Capacity settings -> Trial/Fabric Capacity (if the student has purchased a Fabric capacity, it will be in the second option. If it is a free trial, it will be on the first one) + - Click on the name of the capacity in use and NOT on the "Actions" settings icon + - From the PowerBI Workloads menu in the following screen, turn on Automatic Page Refresh and set a Minimum Refresh Interval of 1 second. + +- Adjusting the page refresh settings (report): + - Open the report from the workspace + - From the top navigation bar, select "Edit" + - On the second group of tabs, Visualizations, on the right side of the screen, click on the Page Format icon (paintbrush with sheet of paper) + - At the bottom of those options, open the Page refresh dropdown and enter your desired page refresh interval (1 second) + - Click on "show details" to check how often the report page is refreshing + +- Create the graph + - Create a line graph with the following settings: + - Legend: Stock Symbol + - X-axis: Timestamp + - Y-axis: Stock Price + - Edit the filters on the visual so it only displays the previous one minute of information diff --git a/069-FabricRealTimeAnalytics/Coach/Solutions/.gitkeep b/069-FabricRealTimeAnalytics/Coach/Solutions/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/069-FabricRealTimeAnalytics/README.md b/069-FabricRealTimeAnalytics/README.md new file mode 100644 index 0000000000..ba9f232eea --- /dev/null +++ b/069-FabricRealTimeAnalytics/README.md @@ -0,0 +1,39 @@ +# What The Hack - Fabric Real-time Analytics + +## Introduction + +The AbboCost Financial group monitors several crazy stocks! Because of the volatile nature of these stocks, they need Up to the Second real-time reporting on what the stocks are doing, as well as historical data on all previous stock prices. Luckily, you have just the solution for them: Fabric! + +## Learning Objectives + +In this hack you will be solving the common business problem that companies have surrounding creating real-time reporting solutions: + +1. Provision a Fabric instance +2. Create a database to ingest data +3. Create queries and add analysis to the data +4. Create real-time reporting, with up to the second data + +## Challenges + +0. Challenge 00: **[Prerequisites](Student/Challenge-00.md)** + - Getting the environment setup for the rest of the challenges +1. Challenge 01: **[Ingesting the Data and Creating the Database](Student/Challenge-01.md)** + - Creating a database to store the real-time data +1. Challenge 02: **[Transforming the Data](Student/Challenge-02.md)** + - Create a queryset to add information to the data +1. Challenge 03: **[Create Realtime Reporting](Student/Challenge-03.md)** + - Creating the Real-time Power BI reports + +## Prerequisites + +- An Azure subscription + +## Contributors + +- Cameron Kahrs +- Rachel Liu +- Armando Marrero +- Juan Llovet de Casso +- Brian Hitney +- Pardeep Singla +- Alan Kleinhart diff --git a/069-FabricRealTimeAnalytics/Student/Challenge-00.md b/069-FabricRealTimeAnalytics/Student/Challenge-00.md new file mode 100644 index 0000000000..dd034056e2 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Student/Challenge-00.md @@ -0,0 +1,59 @@ +# Challenge 00 - Prerequisites - Ready, Set, GO! + +**[Home](../README.md)** - [Next Challenge >](./Challenge-01.md) + + +## Introduction + +Thank you for participating in the Fabric Real-time Analytics What The Hack. Before you can hack, you will need to set up some prerequisites. + +## Common Prerequisites + +We have compiled a list of common tools and software that will come in handy to complete most What The Hack Azure-based hacks! + +You might not need all of them for the hack you are participating in. However, if you work with Azure on a regular basis, these are all things you should consider having in your toolbox. + + + +- [Azure Subscription](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-subscription) +- [Windows Subsystem for Linux](../../000-HowToHack/WTH-Common-Prerequisites.md#windows-subsystem-for-linux) +- [Managing Cloud Resources](../../000-HowToHack/WTH-Common-Prerequisites.md#managing-cloud-resources) + - [Azure Portal](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-portal) + - [Azure CLI](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-cli) + - [Note for Windows Users](../../000-HowToHack/WTH-Common-Prerequisites.md#note-for-windows-users) + - [Azure PowerShell CmdLets](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-powershell-cmdlets) + - [Azure Cloud Shell](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-cloud-shell) +- [Visual Studio Code](../../000-HowToHack/WTH-Common-Prerequisites.md#visual-studio-code) + - [VS Code plugin for ARM Templates](../../000-HowToHack/WTH-Common-Prerequisites.md#visual-studio-code-plugins-for-arm-templates) +- [Azure Storage Explorer](../../000-HowToHack/WTH-Common-Prerequisites.md#azure-storage-explorer) + +## Description + +Now that you have the common pre-requisites installed on your workstation, there are prerequisites specific to this hack. + +Your coach will provide you with a Resources.zip file that contains resources you will need to complete the hack. If you plan to work locally, you should unpack it on your workstation. If you plan to use the Azure Cloud Shell, you should upload it to the Cloud Shell and unpack it there. + +Please install these additional tools: + +- [Azure IoT Tools](https://learn.microsoft.com/en-us/azure/iot-hub/reference-iot-hub-extension#install-from-the-visual-studio-code-marketplace) extension for Visual Studio Code +- .NET SDK 6.0 or later installed on your development machine. This can be downloaded from [here](https://www.microsoft.com/net/download/all) for multiple platforms. + +In the `/Challenge00/` folder of the Resources.zip file, you will find an ARM template, `setupIoTEnvironment.json` that sets up the initial hack environment in Azure you will work with in subsequent challenges. + +Please deploy the template by running the following Azure CLI commands from the location of the template file: +``` +az group create --name myIoT-rg --location eastus +az group deployment create -g myIoT-rg --name HackEnvironment -f setupIoTEnvironment.json +``` + +After deploying the ARM template, navigate to the resource group and create a Fabric capacity in the Azure portal. F2 SKU is more than enough for this WTH. + +## Success Criteria + +To complete this challenge successfully, you should be able to: + +- Verify that the ARM template has deployed the following resources in Azure: + - Event Hub Namespace + - Event Hub (verify it is ingesting data from the container) + - Azure Container Instance (verify that it is running the Docker container and data is streaming out, go to logs to verify this) +- Fabric instance created and running diff --git a/069-FabricRealTimeAnalytics/Student/Challenge-01.md b/069-FabricRealTimeAnalytics/Student/Challenge-01.md new file mode 100644 index 0000000000..d8ceec486c --- /dev/null +++ b/069-FabricRealTimeAnalytics/Student/Challenge-01.md @@ -0,0 +1,31 @@ +# Challenge 01 - Ingesting the Data and Creating the Database + +[< Previous Challenge](./Challenge-00.md) - **[Home](../README.md)** - [Next Challenge >](./Challenge-02.md) + +## Introduction + +You've been tasked with creating some real time reporting using Power BI based on the data that is constantly being generated every second. + +## Description + +In this challenge, you will create a data ingestion stream from the Event Hub to Fabric and create a way to store that data inside of Fabric that is conducive to real time reporting. You will also need to make sure the data is being stored in the Fabric OneLake. + +## Success Criteria + +To complete this challenge successfully, you should be able to: +- Create a data ingestion method for the Event Hub into Fabric +- Create a database and a table to store the Event Hub data +- Verify that data from the Event Hub is entering Fabric and being stored in the OneLake. + +## Learning Resources + +- [Realtime Analytics in Fabric Tutorial](https://learn.microsoft.com/en-us/fabric/real-time-analytics/tutorial-introduction) +- [Creating a KQL Database](https://learn.microsoft.com/en-us/fabric/real-time-analytics/create-database) +- [Get Data from Event Hubs into KQL](https://learn.microsoft.com/en-us/fabric/real-time-analytics/get-data-event-hub) + + +## Tips + +- You may find it easier to create the database first, before creating the ingestion stream. +- You can query the database to see how many records it has in total, then query it again moments later to verify that there was an increase in records, since this application generates about seven records every second. +- A KQL database does not automatically store its data in the Fabric OneLake. There is a setting you will need to change to do that. diff --git a/069-FabricRealTimeAnalytics/Student/Challenge-02.md b/069-FabricRealTimeAnalytics/Student/Challenge-02.md new file mode 100644 index 0000000000..0ed6a18e76 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Student/Challenge-02.md @@ -0,0 +1,44 @@ +# Challenge 02 - Transforming the Data + +[< Previous Challenge](./Challenge-01.md) - **[Home](../README.md)** - [Next Challenge >](./Challenge-03.md) + + +## Pre-requisites + +* Ingest the Event Hub data into Fabric KQL Database successfully +* Have Stock Table ready + +## Introduction + +You've been tasked with creating some real time reporting using Power BI based on the data that is constantly being generated every second. In this challenge, you will learn how to query and transform the data with Fabric. + +## Description +In this challenge, you need to get the price difference and price difference percent between stock price and its previous price at different timestamps in Stock Table. The data in the new table should be in ascending time stamp order. + + +## Success Criteria + +To complete this challenge successfully, you should be able to: +- Create a KQL Queryset +- Query the original Stock Table in ascending time stamp order +- Calculate the stock price difference and price difference percent at each timestamp + +## Learning Resources + +- [Query data in a KQL queryset](https://learn.microsoft.com/en-us/fabric/real-time-analytics/kusto-query-set) +- [Customize results in the KQL Queryset results grid](https://learn.microsoft.com/en-us/fabric/real-time-analytics/customize-results) +- [KQL prev() function](https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/prevfunction) + + +## Tips + +- Start with ordering the stocks in ascending timestamp order is very beneficial +- You should add two columns in Stock Table: one for "pricedifference", and one for "percentpricedifference" +- You can keep the price difference to two decimal places, and keep the price difference percent to four decimal places + +## Advanced Challenges (Optional) + +Too comfortable? Eager to do more? Try these additional challenges! + + +- Create another KQL queryset and find out the biggest price difference for each stock, and at what time it occurred diff --git a/069-FabricRealTimeAnalytics/Student/Challenge-03.md b/069-FabricRealTimeAnalytics/Student/Challenge-03.md new file mode 100644 index 0000000000..55660bb4a5 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Student/Challenge-03.md @@ -0,0 +1,34 @@ +# Challenge 03 - Create the Realtime Reporting + +[< Previous Challenge](./Challenge-02.md) - **[Home](../README.md)** + +## Pre-requisites + +- Stock Table in the KQLDB +- KQL Queryset with transformations + +## Description + +You've been tasked with creating some real time reporting using Power BI based on the data that is constantly being generated every second. In this challenge, you must bring in the data from the KQL database/queryset to PowerBI. You need to create visualizations that represent the incoming data, but you will also need to modify some settings to ensure that this is truly real-time reporting. + +## Success Criteria + +To complete this challenge, verify that: +- You have a PowerBI report displaying the data from the Stock table. + - Line Graph showing the previous minute of data of each stocks price +- Your report page is auto-refreshing every second with the data your KQL DB is ingesting continuously. + +## Learning Resources +- [Power BI Realtime Settings](https://learn.microsoft.com/en-us/power-bi/create-reports/desktop-automatic-page-refresh) +- [Creating a real-time dataset in the Fabric portal](https://learn.microsoft.com/en-us/fabric/real-time-analytics/create-powerbi-report) +- [Creating a real-time dataset in Power BI Desktop](https://learn.microsoft.com/en-us/fabric/real-time-analytics/power-bi-data-connector) + +## Tips +- Use a Card that has the count of the number of records to help ensure data is being constantly updated in the report. +- To get to the Fabric capacity settings you can go to the `Admin Portal` in Power BI and then on the far right click on `Fabric Capacity`. In there the `Automatic page refresh` setting should be under `Power BI Workloads` +- To create the settings for realtime reporting, make sure you are clicked off of any visuals so that the page settings show up. +- It is easier to save the report in the popout from the KQL Queryset, then open it in the proper editor in the browser, rather than in the small popout from the KQL Queryset option. + + +## Additional Challenges +- Get creative with the Power BI report! What kind of visuals can you create? diff --git a/069-FabricRealTimeAnalytics/Student/Resources/.gitkeep b/069-FabricRealTimeAnalytics/Student/Resources/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/069-FabricRealTimeAnalytics/Student/Resources/setupIoTEnvironment.json b/069-FabricRealTimeAnalytics/Student/Resources/setupIoTEnvironment.json new file mode 100644 index 0000000000..efa096f597 --- /dev/null +++ b/069-FabricRealTimeAnalytics/Student/Resources/setupIoTEnvironment.json @@ -0,0 +1,162 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "variables": { + "randomString": "[substring(guid(resourceGroup().id), 0, 6)]", + "namespaceName": "[format('ehns-{0}-{1}', variables('randomString'), 'fabricrealtime')]", + "containerName": "[format('aci-{0}-{1}', variables('randomString'), 'fabricwthcontainerinstance')]", + "defaultSASKeyName": "RootManageSharedAccessKey", + "authRuleResourceId": "[resourceId('Microsoft.EventHub/namespaces/authorizationRules', variables('namespaceName'), variables('defaultSASKeyName'))]", + "stockstartprice": "100", + "increasechance": "0.45", + "sigma": "0.05", + "mu": "1" + }, + "outputs": { + "PrimaryConnectionString": { + "type": "string", + "value": "[listkeys(variables('authRuleResourceId'), '2017-04-01').primaryConnectionString]" + } + }, + "resources": [ + { + "type": "Microsoft.EventHub/namespaces", + "apiVersion": "2022-10-01-preview", + "name": "[variables('namespaceName')]", + "location": "West US 3", + "sku": { + "name": "Basic", + "tier": "Basic", + "capacity": 1 + }, + "properties": { + "minimumTlsVersion": "1.2", + "publicNetworkAccess": "Enabled", + "disableLocalAuth": false, + "zoneRedundant": true, + "isAutoInflateEnabled": false, + "maximumThroughputUnits": 0, + "kafkaEnabled": false + } + }, + { + "type": "Microsoft.EventHub/namespaces/authorizationrules", + "apiVersion": "2022-10-01-preview", + "name": "[concat(variables('namespaceName'), '/RootManageSharedAccessKey')]", + "location": "westus3", + "dependsOn": [ + "[resourceId('Microsoft.EventHub/namespaces', variables('namespaceName'))]" + ], + "properties": { + "rights": [ + "Listen", + "Manage", + "Send" + ] + } + }, + { + "type": "Microsoft.EventHub/namespaces/eventhubs", + "apiVersion": "2022-10-01-preview", + "name": "[concat(variables('namespaceName'), '/fwtheventhub')]", + "location": "westus3", + "dependsOn": [ + "[resourceId('Microsoft.EventHub/namespaces', variables('namespaceName'))]" + ], + "properties": { + "retentionDescription": { + "cleanupPolicy": "Delete", + "retentionTimeInHours": 1 + }, + "messageRetentionInDays": 1, + "partitionCount": 2, + "status": "Active" + } + }, + { + "type": "Microsoft.EventHub/namespaces/networkrulesets", + "apiVersion": "2022-10-01-preview", + "name": "[concat(variables('namespaceName'), '/default')]", + "location": "westus3", + "dependsOn": [ + "[resourceId('Microsoft.EventHub/namespaces', variables('namespaceName'))]" + ], + "properties": { + "publicNetworkAccess": "Enabled", + "defaultAction": "Allow", + "virtualNetworkRules": [], + "ipRules": [], + "trustedServiceAccessEnabled": false + } + }, + { + "type": "Microsoft.ContainerInstance/containerGroups", + "apiVersion": "2023-05-01", + "name": "[variables('containerName')]", + "location": "westus3", + "properties": { + "sku": "Standard", + "containers": [ + { + "name": "[variables('containerName')]", + "properties": { + "image": "cameronkahrsdocker/fabricwthdatapumpv2", + "ports": [ + { + "protocol": "TCP", + "port": 80 + } + ], + "environmentVariables": [ + { + "name": "eventhubname", + "value": "fwtheventhub" + }, + { + "name": "eventconnectionstring", + "value": "[listkeys(variables('authRuleResourceId'), '2017-04-01').primaryConnectionString]" + }, + { + "name": "mu", + "value": "[variables('mu')]" + }, + { + "name": "sigma", + "value": "[variables('sigma')]" + }, + { + "name": "increasechance", + "value": "[variables('increasechance')]" + }, + { + "name": "stockstartprice", + "value": "[variables('stockstartprice')]" + } + ], + "resources": { + "requests": { + "memoryInGB": 1.5, + "cpu": 1 + } + } + } + } + ], + "initContainers": [], + "restartPolicy": "OnFailure", + "ipAddress": { + "ports": [ + { + "protocol": "TCP", + "port": 80 + } + ], + "ip": "20.14.36.176", + "type": "Public" + }, + "osType": "Linux" + } + } + ] + +} \ No newline at end of file