Search code examples
rrvestrselenium

How to get a table from power BI dashboard using web scraping in R


I am working on a data extraction task using R. The data is allocated in a power BI dashboard so it is very troublesome to get that. I found I solution here on SO:

Scrape website's Power BI dashboard using R

But I am not sure on how to navigate in my page to get the components and extract the tables. My code is next:

library(wdman)
library(RSelenium)
library(xml2)
library(selectr)
library(tidyverse)
library(rvest)

# using wdman to start a selenium server
remDr <- rsDriver(
  port = 4445L,
  browser = "firefox"
)
#remDr$open()
remDr <- remoteDriver(port = 4445L,browser = "firefox")

# open a new Tab on Chrome
remDr$open()

# navigate to the site you wish to analyze
report_url <- "https://app.powerbi.com/view?r=eyJrIjoiOGI5Yzg2MGYtZmNkNy00ZjA5LTlhYTYtZTJjNjg2NTY2YTlmIiwidCI6ImI1NDE0YTdiLTcwYTYtNGUyYi05Yzc0LTM1Yjk0MDkyMjk3MCJ9"
remDr$navigate(report_url)

# fetch the data
data_table <- read_html(remDr$getPageSource()[[1]]) %>%
  querySelector("div.pivotTable")

Although the selenium process work, I do not know how to get my table:

enter image description here

The arrows in blue show the tables I want, then I need to move to other pages to extract the remaining tables. But I think if I can do for the first page, the other pages would be the same.

Many thanks!


Solution

  • These tables are a bit tricky because the new rows only appear in the page source code once you scroll to activate them. My solution is a function that scrapes row by row and individually adds to the overall dataframes, scrolling if necessary. It takes as input the visual-container number of the Power BI table.

    Here are solutions in R:

    library(wdman)
    library(RSelenium)
    
    selServ <- selenium(
      port = 4444L,
      version = 'latest',
      chromever = '103.0.5060.134', # set to available
    )
    
    remDr <- remoteDriver(
      remoteServerAddr = 'localhost',
      port = 4444L,
      browserName = 'chrome'
    )
    
    scrape_powerbi_table <- function(container_number) {
      table_xpath <- paste("//*[@id='pvExplorationHost']/div/div/exploration/div/explore-canvas/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container[",
                           container_number, "]/transform/div/div[3]/div/visual-modern", sep = "")
      Sys.sleep(1)
      try({scroll_button <- remDr$findElement("xpath", paste(table_xpath, "/div/div/div[2]/div[4]/div[2]", sep = ""))
        remDr$mouseMoveToLocation(webElement = scroll_button)}, silent = TRUE)
      col_names <- remDr$findElements("xpath", paste(table_xpath, "/div/div/div[2]/div[1]/div[2]/div[2]/div/div", sep = ""))
      col_names <- vapply(col_names, function(x) stringr::str_split(x$getElementAttribute('innerHTML')[[1]], "<")[[1]][1], character(1))
      df <- data.frame(matrix(ncol = length(col_names), nrow = 0))
      colnames(df) <- col_names
      more_rows_left <- TRUE
      row_count <- 2
      while (more_rows_left == TRUE) {
        data <- remDr$findElements("xpath", paste(table_xpath, "/div/div/div[2]/div[1]/div[4]/div/div[@aria-rowindex='", row_count, "']/div", sep = ""))
        current_row <- vapply(data, function(x) x$getElementAttribute('innerHTML')[[1]], character(1))
        current_row <- current_row[2:length(current_row)]
        if (length(current_row) == 0 | all(is.na(current_row))) {
          tryCatch({for (i in seq(10)) scroll_button$click()
            data <- remDr$findElements("xpath", paste(table_xpath, "/div/div/div[2]/div[1]/div[4]/div/div[@aria-rowindex='", row_count, "']/div", sep = ""))
            current_row <- vapply(data, function(x) x$getElementAttribute('innerHTML')[[1]], character(1))
            current_row <- current_row[2:length(current_row)]
            }, error = function (e) break)
        }
        if (length(current_row) == 0 | all(is.na(current_row))) {break}
        df[nrow(df) + 1,] <- current_row
        row_count <- row_count + 1
      }
      df
    }
    
    remDr$open()
    
    remDr$navigate("https://app.powerbi.com/view?r=eyJrIjoiOGI5Yzg2MGYtZmNkNy00ZjA5LTlhYTYtZTJjNjg2NTY2YTlmIiwidCI6ImI1NDE0YTdiLTcwYTYtNGUyYi05Yzc0LTM1Yjk0MDkyMjk3MCJ9")
    Sys.sleep(15)
    next_button <- remDr$findElement("xpath", "//*[@id='embedWrapperID']/div[2]/logo-bar/div/div/div/logo-bar-navigation/span/button[2]")
    
    df1 <- scrape_powerbi_table(8)
    remDr$mouseMoveToLocation(webElement = next_button)
    next_button$click()
    df2 <- scrape_powerbi_table(8)
    df3 <- scrape_powerbi_table(9)
    remDr$mouseMoveToLocation(webElement = next_button)
    next_button$click()
    next_button$click()
    df4 <- scrape_powerbi_table(5)
    df5 <- scrape_powerbi_table(7)
    remDr$mouseMoveToLocation(webElement = next_button)
    next_button$click()
    df6 <- scrape_powerbi_table(9)
    df7 <- scrape_powerbi_table(10)
    remDr$mouseMoveToLocation(webElement = next_button)
    next_button$click()
    next_button$click()
    df8 <- scrape_powerbi_table(2)
    remDr$mouseMoveToLocation(webElement = next_button)
    next_button$click()
    df9 <- scrape_powerbi_table(5)
    df10 <- scrape_powerbi_table(6)
    
    # > df9
    #      Zona                      Provincia Total Establecimientos
    # 1  ZONA 1                         CARCHI                      8
    # 2  ZONA 1                     ESMERALDAS                      6
    # 3  ZONA 1                       IMBABURA                     32
    # 4  ZONA 1                      SUCUMBIOS                     27
    # 5  ZONA 2                           NAPO                      9
    # 6  ZONA 2                       ORELLANA                     30
    # 7  ZONA 2                      PICHINCHA                     63
    # 8  ZONA 3                     CHIMBORAZO                     56
    # 9  ZONA 3                       COTOPAXI                     54
    # 10 ZONA 3                        PASTAZA                     13
    # 11 ZONA 3                     TUNGURAHUA                    122
    # 12 ZONA 4                         MANABI                    127
    # 13 ZONA 4 SANTO DOMINGO DE LOS TSACHILAS                     49
    # 14 ZONA 5                        BOLIVAR                     24
    # 15 ZONA 5                      GALAPAGOS                      5
    # 16 ZONA 5                         GUAYAS                     27
    # 17 ZONA 5                       LOS RIOS                     53
    # 18 ZONA 5                    SANTA ELENA                     18
    # 19 ZONA 6                          AZUAY                    182
    # 20 ZONA 6                          CAÑAR                     35
    # 21 ZONA 6                MORONA SANTIAGO                     23
    # 22 ZONA 7                         EL ORO                     65
    # 23 ZONA 7                           LOJA                     48
    # 24 ZONA 7               ZAMORA CHINCHIPE                     16
    # 25 ZONA 8                         GUAYAS                     86
    # 26 ZONA 9                      PICHINCHA                    309
    

    And in Python:

    from selenium import webdriver
    from selenium.webdriver.common.by import By
    from selenium.webdriver.support.ui import WebDriverWait
    from selenium.webdriver.support import expected_conditions as EC
    import pandas as pd
    
    driver = webdriver.Chrome()
    
    driver.get("https://app.powerbi.com/view?r=eyJrIjoiOGI5Yzg2MGYtZmNkNy00ZjA5LTlhYTYtZTJjNjg2NTY2YTlmIiwidCI6ImI1NDE0YTdiLTcwYTYtNGUyYi05Yzc0LTM1Yjk0MDkyMjk3MCJ9")
    
    def scrape_powerbi_table(visual_container_number):
        table_xpath = "//*[@id='pvExplorationHost']/div/div/exploration/div/explore-canvas/div/div[2]/div/div[2]/div[2]/visual-container-repeat/visual-container[" + str(visual_container_number) + "]/transform/div/div[3]/div/visual-modern"
        scroll_button = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, table_xpath + "/div/div/div[2]/div[4]/div[2]")))
        col_names = [i.text for i in driver.find_elements(By.XPATH, table_xpath + "/div/div/div[2]/div[1]/div[2]/div[2]/div/div")]
        df = pd.DataFrame(columns = col_names)
        more_rows_left = True
        row_count = 2
        while more_rows_left == True:
            data = driver.find_elements(By.XPATH, table_xpath + "/div/div/div[2]/div[1]/div[4]/div/div[@aria-rowindex='" + str(row_count) + "']/div")
            current_row = [i.get_attribute("innerHTML") for i in data][1:]
            if not current_row:
                try:
                    for i in range(10):
                        scroll_button.click()
                    data = driver.find_elements(By.XPATH, table_xpath + "/div/div/div[2]/div[1]/div[4]/div/div[@aria-rowindex='" + str(row_count) + "']/div")
                    current_row = [i.get_attribute("innerHTML") for i in data][1:]
                except Exception:
                    break
            if not current_row:
                break
            df.loc[len(df)] = current_row
            row_count += 1
        return df
    
    next_button = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//*[@id='embedWrapperID']/div[2]/logo-bar/div/div/div/logo-bar-navigation/span/button[2]")))
    
    df1 = scrape_powerbi_table(8)
    next_button.click()
    df2 = scrape_powerbi_table(8)
    df3 = scrape_powerbi_table(9)
    next_button.click()
    next_button.click()
    df4 = scrape_powerbi_table(5)
    df5 = scrape_powerbi_table(7)
    next_button.click()
    df6 = scrape_powerbi_table(9)
    df7 = scrape_powerbi_table(10)
    next_button.click()
    next_button.click()
    df8 = scrape_powerbi_table(2)
    next_button.click()
    df9 = scrape_powerbi_table(5)
    df10 = scrape_powerbi_table(6)
    
    driver.quit()
    

    Also, here are the ten files in csv format for your convenience.

    https://mega.nz/folder/LtVDiCyQ#5iW1mkd1VVTmcPApeqfGFA