Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Desenvolvimento do webscraping - LinkedIn] Loop pelas abas #21

Open
renata-olivmachado opened this issue Apr 24, 2024 · 1 comment
Open
Assignees
Labels
AWS tarefas dentro da plataforma Eng_Dados LinkedIn
Milestone

Comments

@renata-olivmachado
Copy link

renata-olivmachado commented Apr 24, 2024

Tarefa da Story: #16

Desenvolver webscraping Python, podendo usar selenium ou outra lib.
O webscraping deve logar no LinkedIn, acessar a página do SJ e extrair os dados para o AWS S3.

@renata-olivmachado renata-olivmachado changed the title Loop pelas abas [Desenvolvimento do webscraping - LinkedIn] Loop pelas abas Apr 24, 2024
@renata-olivmachado renata-olivmachado added AWS tarefas dentro da plataforma LinkedIn labels Apr 24, 2024
@renata-olivmachado renata-olivmachado added this to the Sprint 1 milestone Apr 24, 2024
@Moscarde
Copy link

Arquivo da classe ScraperLinkedin no repositório

A função extract_data é responsável por criar um loop com as urls das abas e iterar executando as extrações individualmente:

    def extract_data(
        self, daterange: str = "d365", custom_daterange: list = ["dd/mm/yyyy"]
    ) -> bool:
        """
        Inicia o loop para execução da extração dos dados.

        Args:
            daterange (str): intervalo de datas(d15, d30, d90, d365, custom_date, 1, 15, 30, 90, 365). Padrão d15.
            custom_daterange (list, optional): lista de datas customizadas. Padrão ['dd/mm/yyyy'].

        Returns:
            bool: Retorna True se os dados foram extraídos com sucesso e False caso contrário.
        """
        extraction_urls = [
            f"https://www.linkedin.com/company/{self.company_code}/admin/analytics/updates/",
            f"https://www.linkedin.com/company/{self.company_code}/admin/analytics/visitors/",
            f"https://www.linkedin.com/company/{self.company_code}/admin/analytics/followers/",
            f"https://www.linkedin.com/company/{self.company_code}/admin/analytics/competitors/",
        ]

        for url in extraction_urls:
            self.driver.get(url)

            WebDriverWait(self.driver, 10).until(EC.url_to_be(url))

            if "competitors" in url:
                self.close_competitors_modal_if_open()

            self.get_element(xpath=self.XPATH_BUTTON_EXPORT).click()
            self.get_element(
                xpath=self.XPATH_BUTTON_DATERANGE, force_waiting=True
            ).click()
            sleep(1)
            d15, d30, d90, d365, custom_date = self.get_element(
                xpath=self.XPATH_LI_DATERANGE, multiple=True, force_waiting=True
            )
            daterange_map = {
                "d15": d15,
                "15": d15,
                "d30": d30,
                "30": d30,
                "d90": d90,
                "90": d90,
                "d365": d365,
                "365": d365,
                "custom_date": custom_date,
            }
            daterange_element = daterange_map.get(daterange)
            daterange_element.click()

            if daterange == "custom_date":
                self.select_custom_daterange(custom_daterange)
            # fluxo correto
            # self.get_element(xpath=self.XPATH_BUTTON_EXPORT_MODAL).click()

            # fluxo de testes - Executa apenas um mouse hover
            ActionChains(self.driver).move_to_element(
                self.get_element(xpath=self.XPATH_BUTTON_EXPORT_MODAL)
            ).perform()
            # fim fluxo de testes

            print("Fazendo download da extração de", url.split("/")[-2])

        return True

Outras funções utilizadas dentro de extract_data:

    def get_element(
        self,
        xpath: str,
        origin_element: WebElement = None,
        multiple: bool = False,
        force_waiting: bool = False,
        timeout: int = 10,
    ) -> Union[bool, WebElement, List[WebElement]]:
        """
        Obtém um elemento da pagina.

        Args:
            xpath (str): xpath do elemento.
            origin_element (WebElement, optional): elemento de origem. Padrão None.
            force_waiting (bool, optional): aguarda o elemento estar visível. Padrão False.
            multiple (bool, optional): retorna uma lista. Padrão False.


        Returns:
            Union[bool, WebElement, List[WebElement]]: Elemento ou lista de elementos da página se encontrados ou False.
        """
        origin_element = origin_element or self.driver

        try:
            if force_waiting and not multiple:
                wait = WebDriverWait(origin_element, timeout)
                return wait.until(EC.element_to_be_clickable((By.XPATH, xpath)))

            elif multiple:
                return origin_element.find_elements(By.XPATH, xpath)

            else:
                return origin_element.find_element(By.XPATH, xpath)

        except Exception as e:
            return False
    def select_custom_daterange(self, daterange: list) -> bool:
        """
        Preenche o intervalo de datas customizado.

        Args:
            daterange (list): lista de datas customizadas.

        Returns:
            bool: Retorna True se as datas customizadas foram preenchidas com sucesso e False caso contrário.
        """
        input_range_start = self.get_element(xpath=self.XPATH_INPUT_RANGE_START)
        self.driver.execute_script("arguments[0].focus();", input_range_start)
        self.driver.execute_script("arguments[0].value = '';", input_range_start)
        input_range_start.send_keys(daterange[0])

        input_range_end = self.get_element(xpath=self.XPATH_INPUT_RANGE_END)
        self.driver.execute_script("arguments[0].focus();", input_range_end)
        self.driver.execute_script("arguments[0].value = '';", input_range_end)
        input_range_end.send_keys(daterange[-1])

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
AWS tarefas dentro da plataforma Eng_Dados LinkedIn
Projects
Archived in project
Development

No branches or pull requests

3 participants