|
| 1 | +"""Ingestor file for xlsx files""" |
| 2 | +from typing import List, AsyncGenerator |
| 3 | +import io |
| 4 | +import pandas as pd |
| 5 | + |
| 6 | +from querent.ingestors.ingestor_factory import IngestorFactory |
| 7 | +from querent.ingestors.base_ingestor import BaseIngestor |
| 8 | +from querent.processors.async_processor import AsyncProcessor |
| 9 | +from querent.config.ingestor_config import IngestorBackend |
| 10 | +from querent.common.types.collected_bytes import CollectedBytes |
| 11 | + |
| 12 | + |
| 13 | +class XlsxIngestorFactory(IngestorFactory): |
| 14 | + """Ingestor factory for xlsx files""" |
| 15 | + |
| 16 | + SUPPORTED_EXTENSIONS = {"xlsx"} |
| 17 | + |
| 18 | + async def supports(self, file_extension: str) -> bool: |
| 19 | + return file_extension.lower() in self.SUPPORTED_EXTENSIONS |
| 20 | + |
| 21 | + async def create( |
| 22 | + self, file_extension: str, processors: List[AsyncProcessor] |
| 23 | + ) -> BaseIngestor: |
| 24 | + if not await self.supports(file_extension): |
| 25 | + return None |
| 26 | + return XlsxIngestor(processors) |
| 27 | + |
| 28 | + |
| 29 | +class XlsxIngestor(BaseIngestor): |
| 30 | + """Ingestor for xlsx files""" |
| 31 | + |
| 32 | + def __init__(self, processors: List[AsyncProcessor]): |
| 33 | + super().__init__(IngestorBackend.XLSX) |
| 34 | + self.processors = processors |
| 35 | + |
| 36 | + async def ingest( |
| 37 | + self, poll_function: AsyncGenerator[CollectedBytes, None] |
| 38 | + ) -> AsyncGenerator[str, None]: |
| 39 | + current_file = None |
| 40 | + collected_bytes = b"" |
| 41 | + try: |
| 42 | + async for chunk_bytes in poll_function: |
| 43 | + if chunk_bytes.is_error(): |
| 44 | + # TODO handle error |
| 45 | + continue |
| 46 | + if current_file is None: |
| 47 | + current_file = chunk_bytes.file |
| 48 | + elif current_file != chunk_bytes.file: |
| 49 | + # we have a new file, process the old one |
| 50 | + async for frames in self.extract_and_process_xlsx( |
| 51 | + CollectedBytes(file=current_file, data=collected_bytes) |
| 52 | + ): |
| 53 | + yield frames |
| 54 | + collected_bytes = b"" |
| 55 | + current_file = chunk_bytes.file |
| 56 | + collected_bytes += chunk_bytes.data |
| 57 | + except Exception as e: |
| 58 | + # TODO handle exception |
| 59 | + yield "" |
| 60 | + finally: |
| 61 | + # process the last file |
| 62 | + async for frames in self.extract_and_process_xlsx( |
| 63 | + CollectedBytes(file=current_file, data=collected_bytes) |
| 64 | + ): |
| 65 | + yield frames |
| 66 | + |
| 67 | + async def extract_and_process_xlsx( |
| 68 | + self, collected_bytes: CollectedBytes |
| 69 | + ) -> AsyncGenerator[str, None]: |
| 70 | + """function to extract and process xlsx file bytes""" |
| 71 | + df = await self.extract_text_from_xlsx(collected_bytes) |
| 72 | + yield df |
| 73 | + |
| 74 | + async def extract_text_from_xlsx( |
| 75 | + self, collected_bytes: CollectedBytes |
| 76 | + ) -> pd.DataFrame: |
| 77 | + """function to extract all the rows in the file""" |
| 78 | + excel_buffer = io.BytesIO(collected_bytes.data) |
| 79 | + dataframe = pd.read_excel(excel_buffer) |
| 80 | + return dataframe |
| 81 | + |
| 82 | + async def process_data(self, text: str) -> List[str]: |
| 83 | + processed_data = text |
| 84 | + for processor in self.processors: |
| 85 | + processed_data = await processor.process(processed_data) |
| 86 | + return processed_data |
0 commit comments