web scraping using shiny app

hello everyone I'm hoping if someone can help me with this
so instead of looping for all of elements inside WEBelements as it shown in the code below , I want if there's a possibility to implement a gadget in a shiny app that I can select in it a number for example the 9th element of WEBelements and do the following scraping

library(shiny)
library(RSelenium)
library(tidyverse)
library(rvest)


ui <- fluidPage(
  actionButton("go", "Scrape")
)
server <- function(input, output, session) {
  observeEvent(input$go, {
    rD <- rsDriver(browser=c("firefox"), port=4444L,verbose = F)
    remDr <- rD[["client"]]
    remDr <- rD$client
    
    # Open browser session
    remDr$open()
    url<-"http://www.bvmt.com.tn/fr/entreprises/list"
    remDr$navigate(url) 
    # Click on links and scrape some stuff
    WEBelements <- remDr$findElements("id", "pano-societe")
    for (i in 1:length(WEBelements)) {
      remDr$navigate(url)
      WEBelements <- remDr$findElements("id", "pano-societe")
      WEBelements[[i]]$clickElement()
      Sys.sleep(2)
      webElem1 <- remDr$findElement("css selector", "li.tt2:nth-child(14) > a:nth-child(1)")
      webElem1$clickElement()
      Sys.sleep(1)
      elements2 <- remDr$findElements("class", "savoir-plus-actu")
      elements2[[1]]$clickElement()
      Sys.sleep(2)
      CurrentUrl<-remDr$getCurrentUrl()
      page<-read_html(CurrentUrl[[1]])
      raw_list<- page %>% 
        html_nodes("a") %>% 
        html_attr("href") %>% 
        str_subset("\\.pdf") %>%
        walk2(., basename(.), download.file, mode = "wb") 
    }
    # Close browser session
    remDr$close()
  })
}

shinyApp(ui, server)


This topic was automatically closed 21 days after the last reply. New replies are no longer allowed.

If you have a query related to it or one of the replies, start a new topic and refer back with a link.