diff --git a/main/__init__.py b/main/__init__.py index 8fff655..8649361 100644 --- a/main/__init__.py +++ b/main/__init__.py @@ -14,7 +14,7 @@ # env: PYARCHAPPL_LOG_LEVEL: debug, info (default), warning, error, critical DEFAULT_LOGGING_LEVEL = __LOG_LEVEL_MAP.get( - os.environ.get('PYARCHAPPL_LOG_LEVEL', 'INFO').upper()) + os.environ.get('PYARCHAPPL_LOG_LEVEL', 'WARNING').upper()) logging.basicConfig( level=DEFAULT_LOGGING_LEVEL, @@ -58,7 +58,7 @@ def set_logging_level(level='info'): from archappl.data import * -__version__ = '1.0.0' +__version__ = '1.0.2' __author__ = 'Tong Zhang (@zhangt58)' __doc__ ="""PyArchAppl: Python interface of Archiver Appliance, module name: 'archappl'.""" diff --git a/main/contrib/data.py b/main/contrib/data.py index ce7f26f..1053bc0 100644 --- a/main/contrib/data.py +++ b/main/contrib/data.py @@ -35,13 +35,13 @@ def _get_data(pv: str, from_time: str, to_time: str, except HitEmptyDataset: # got nothing r, reason = None, "NotExist" - _LOGGER.error(f"Got nothing, either '{pv}' is not being archived or no data in the given time range.") + _LOGGER.error(f"'{pv}' is not being archived or no data in the given time range.") except HitSingleDataEntry: reason = "SingleEntry" data.drop(columns=['severity', 'status'], inplace=True) data.rename(columns={'val': pv}, inplace=True) r = data - _LOGGER.warning(f"Only got a single sample for '{pv}'") + _LOGGER.warning(f"Got only one sample for '{pv}'") else: data.drop(columns=['severity', 'status'], inplace=True) data.rename(columns={'val': pv}, inplace=True) @@ -123,7 +123,7 @@ def get_dataset_with_pvs(pv_list: list[str], from_time: Union[str, None] = None, if kws.pop('use_json', False): client.format = "json" df_list = [] - _LOGGER.info("Started fetching data...") + _LOGGER.debug("Started fetching data...") if verbose != 0 and TQDM_INSTALLED: from archappl import tqdm pbar = tqdm(pv_list) @@ -151,8 +151,7 @@ def get_dataset_with_pvs(pv_list: list[str], from_time: Union[str, None] = None, _df2 = _df1[~_df1.index.duplicated(keep='first')] data = _df2.resample(resample).ffill() data.dropna(inplace=True) - if verbose > 0: - _LOGGER.info(f"Fetched all data in {time.time() - t0_:.1f} seconds") + _LOGGER.debug(f"Fetched all data in {time.time() - t0_:.1f} seconds") return data diff --git a/main/data/client.py b/main/data/client.py index 9382b2f..31670d4 100644 --- a/main/data/client.py +++ b/main/data/client.py @@ -123,7 +123,7 @@ def get_data(self, pv: str, **kws) -> Union[pd.DataFrame, None]: r = requests.get(url) if not r.ok: - _LOGGER.error(f"Fetched data error: {r.status_code}") + _LOGGER.error(f"Error fetching data for '{pv}': {r.status_code}") return None if self.format == 'raw': return normalize(unpack_raw_data(r.content), tz, last_n=last_n) diff --git a/main/scripts/get.py b/main/scripts/get.py index 15e983f..d90afee 100644 --- a/main/scripts/get.py +++ b/main/scripts/get.py @@ -93,7 +93,7 @@ class Formatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionH def main(): - _LOGGER.info(f"Executing {os.path.basename(sys.argv[0])} ...") + _LOGGER.debug(f"Executing {os.path.basename(sys.argv[0])} {sys.argv[1:]} ...") args = parser.parse_args(sys.argv[1:]) if args.version: @@ -123,7 +123,7 @@ def main(): # time range if args.from_time is None or args.to_time is None: _LOGGER.warning( - "Arguments: --from and/or --to is not set, refer to -h for time range set.") + "Arguments: --from and/or --to is not set, see -h for help.") # sys.exit(1) else: _LOGGER.info(f"Fetch data from {args.from_time} to {args.to_time}") @@ -158,7 +158,7 @@ def main(): client = ArchiverDataClient(url=args.url) if args.use_json: client.format = "json" - _LOGGER.info(f"{client}") + _LOGGER.debug(f"{client}") dset = get_dataset_with_pvs(pv_list, args.from_time, args.to_time, client=client, resample=args.resample, verbose=args.verbose, diff --git a/pyproject.toml b/pyproject.toml index 47ca421..581f4bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "pyarchappl" -version = "1.0.2" +version = "1.0.2-1" authors = [ { name="Tong Zhang" } ] diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..84dc483 --- /dev/null +++ b/setup.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- + +from setuptools import setup + + +def readme() -> str: + with open('README.md', 'r') as f: + return f.read() + + +def read_requires(filepath: str) -> list[str]: + lines = [] + for line in open(filepath, "r"): + lines.append(line.strip()) + return lines + + +install_requires = [ + "numpy", + "pandas", + "openpyxl", + "tqdm", + "requests", + "simplejson", + "tables", + "protobuf>=3.0,<4.0", + "setuptools", +] + + +extras_require = { + 'test': ['pytest'], + 'doc': ['sphinx', 'pydata_sphinx_theme'], +} + + +def set_entry_points(): + r = { + 'console_scripts': [ + 'pyarchappl-get=archappl.scripts.get:main', + 'pyarchappl-inspect=archappl.scripts.inspect:main', + ] + } + return r + + +setup( + name='pyarchappl', + version='1.0.2-1', + description='Python interface to Archiver Appliance', + long_description=readme(), + long_description_content_type='text/markdown', + url="https://github.com/zhangt58/pyarchappl", + author='Tong Zhang', + packages=[ + 'archappl.admin', 'archappl.data', 'archappl.data.pb', + 'archappl.client', 'archappl.contrib', 'archappl.config', + 'archappl.scripts', 'archappl.tests', 'archappl' + ], + package_dir={ + 'archappl.admin': 'main/mgmt', + 'archappl.data': 'main/data', + 'archappl.data.pb': 'main/data/pb', + 'archappl.client': 'main/client', + 'archappl.contrib': 'main/contrib', + 'archappl.config': 'main/config', + 'archappl.scripts': 'main/scripts', + 'archappl.tests': 'main/tests', + 'archappl': 'main' + }, + include_package_data=True, + entry_points=set_entry_points(), + python_requires=">=3.9", + install_requires=install_requires, + extras_require=extras_require, + license='GPL3+', + keywords="Archiver EPICS CA PVA", + classifiers=[ + 'Programming Language :: Python :: 3', + 'Topic :: Scientific/Engineering', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], +)