|
3 | 3 | import numpy as np |
4 | 4 | import chardet |
5 | 5 | import requests |
| 6 | +import google.oauth2.service_account |
6 | 7 |
|
7 | 8 | from spaceone.core.transaction import Transaction |
8 | 9 | from spaceone.core.connector import BaseConnector |
9 | 10 | from typing import List |
10 | 11 |
|
11 | 12 | from cloudforet.cost_analysis.error import * |
12 | 13 |
|
13 | | -__all__ = ['HTTPFileConnector'] |
| 14 | +__all__ = ["HTTPFileConnector"] |
14 | 15 |
|
15 | 16 | _LOGGER = logging.getLogger(__name__) |
16 | 17 |
|
17 | 18 | _PAGE_SIZE = 1000 |
18 | 19 |
|
19 | 20 |
|
20 | 21 | class HTTPFileConnector(BaseConnector): |
21 | | - |
22 | 22 | def __init__(self, *args, **kwargs): |
23 | 23 | super().__init__(*args, **kwargs) |
24 | 24 | self.base_url = None |
25 | 25 | self.field_mapper = None |
26 | 26 | self.default_vars = None |
27 | 27 |
|
28 | | - def create_session(self, options: dict, secret_data: dict, schema: str = None) -> None: |
| 28 | + def create_session( |
| 29 | + self, options: dict, secret_data: dict, schema: str = None |
| 30 | + ) -> None: |
29 | 31 | self._check_options(options) |
30 | | - self.base_url = options['base_url'] |
| 32 | + self.base_url = options["base_url"] |
31 | 33 |
|
32 | | - if 'field_mapper' in options: |
33 | | - self.field_mapper = options['field_mapper'] |
| 34 | + if "field_mapper" in options: |
| 35 | + self.field_mapper = options["field_mapper"] |
34 | 36 |
|
35 | | - if 'default_vars' in options: |
36 | | - self.default_vars = options['default_vars'] |
| 37 | + if "default_vars" in options: |
| 38 | + self.default_vars = options["default_vars"] |
37 | 39 |
|
38 | 40 | def get_cost_data(self, base_url): |
39 | | - _LOGGER.debug(f'[get_cost_data] base url: {base_url}') |
| 41 | + _LOGGER.debug(f"[get_cost_data] base url: {base_url}") |
40 | 42 |
|
41 | 43 | costs_data = self._get_csv(base_url) |
42 | 44 |
|
43 | | - _LOGGER.debug(f'[get_cost_data] costs count: {len(costs_data)}') |
| 45 | + _LOGGER.debug(f"[get_cost_data] costs count: {len(costs_data)}") |
44 | 46 |
|
45 | 47 | # Paginate |
46 | 48 | page_count = int(len(costs_data) / _PAGE_SIZE) + 1 |
47 | 49 |
|
48 | 50 | for page_num in range(page_count): |
49 | 51 | offset = _PAGE_SIZE * page_num |
50 | | - yield costs_data[offset:offset + _PAGE_SIZE] |
| 52 | + yield costs_data[offset : offset + _PAGE_SIZE] |
51 | 53 |
|
52 | 54 | @staticmethod |
53 | 55 | def _check_options(options: dict) -> None: |
54 | | - if 'base_url' not in options: |
55 | | - raise ERROR_REQUIRED_PARAMETER(key='options.base_url') |
| 56 | + if "base_url" not in options: |
| 57 | + raise ERROR_REQUIRED_PARAMETER(key="options.base_url") |
56 | 58 |
|
57 | 59 | def _get_csv(self, base_url: str) -> List[dict]: |
58 | 60 | try: |
59 | 61 | csv_format = self._search_csv_format(base_url) |
60 | | - df = pd.read_csv(base_url, header=0, sep=',', engine='python', encoding=csv_format, dtype=str) |
| 62 | + df = pd.read_csv( |
| 63 | + base_url, |
| 64 | + header=0, |
| 65 | + sep=",", |
| 66 | + engine="python", |
| 67 | + encoding=csv_format, |
| 68 | + dtype=str, |
| 69 | + ) |
61 | 70 | df = df.replace({np.nan: None}) |
62 | 71 |
|
63 | | - costs_data = df.to_dict('records') |
| 72 | + costs_data = df.to_dict("records") |
64 | 73 | return costs_data |
65 | 74 |
|
66 | 75 | except Exception as e: |
67 | | - _LOGGER.error(f'[_get_csv] download error: {e}', exc_info=True) |
| 76 | + _LOGGER.error(f"[_get_csv] download error: {e}", exc_info=True) |
68 | 77 | raise e |
69 | 78 |
|
70 | 79 | @staticmethod |
71 | 80 | def _search_csv_format(base_url: str) -> str: |
72 | 81 | try: |
73 | 82 | response = requests.get(base_url) |
74 | | - response.encoding = chardet.detect(response.content)['encoding'] |
75 | | - _LOGGER.debug(f'[_search_csv_format] encoding: {response.encoding}') |
| 83 | + response.encoding = chardet.detect(response.content)["encoding"] |
| 84 | + _LOGGER.debug(f"[_search_csv_format] encoding: {response.encoding}") |
76 | 85 | return response.encoding |
77 | 86 |
|
78 | 87 | except Exception as e: |
79 | | - _LOGGER.error(f'[_search_csv_format] download error: {e}', exc_info=True) |
| 88 | + _LOGGER.error(f"[_search_csv_format] download error: {e}", exc_info=True) |
80 | 89 | raise e |
0 commit comments