Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions main/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

# env: PYARCHAPPL_LOG_LEVEL: debug, info (default), warning, error, critical
DEFAULT_LOGGING_LEVEL = __LOG_LEVEL_MAP.get(
os.environ.get('PYARCHAPPL_LOG_LEVEL', 'INFO').upper())
os.environ.get('PYARCHAPPL_LOG_LEVEL', 'WARNING').upper())

logging.basicConfig(
level=DEFAULT_LOGGING_LEVEL,
Expand Down Expand Up @@ -58,7 +58,7 @@ def set_logging_level(level='info'):
from archappl.data import *


__version__ = '1.0.0'
__version__ = '1.0.2'
__author__ = 'Tong Zhang (@zhangt58)'

__doc__ ="""PyArchAppl: Python interface of Archiver Appliance, module name: 'archappl'."""
Expand Down
9 changes: 4 additions & 5 deletions main/contrib/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,13 @@ def _get_data(pv: str, from_time: str, to_time: str,
except HitEmptyDataset:
# got nothing
r, reason = None, "NotExist"
_LOGGER.error(f"Got nothing, either '{pv}' is not being archived or no data in the given time range.")
_LOGGER.error(f"'{pv}' is not being archived or no data in the given time range.")
except HitSingleDataEntry:
reason = "SingleEntry"
data.drop(columns=['severity', 'status'], inplace=True)
data.rename(columns={'val': pv}, inplace=True)
r = data
_LOGGER.warning(f"Only got a single sample for '{pv}'")
_LOGGER.warning(f"Got only one sample for '{pv}'")
else:
data.drop(columns=['severity', 'status'], inplace=True)
data.rename(columns={'val': pv}, inplace=True)
Expand Down Expand Up @@ -123,7 +123,7 @@ def get_dataset_with_pvs(pv_list: list[str], from_time: Union[str, None] = None,
if kws.pop('use_json', False):
client.format = "json"
df_list = []
_LOGGER.info("Started fetching data...")
_LOGGER.debug("Started fetching data...")
if verbose != 0 and TQDM_INSTALLED:
from archappl import tqdm
pbar = tqdm(pv_list)
Expand Down Expand Up @@ -151,8 +151,7 @@ def get_dataset_with_pvs(pv_list: list[str], from_time: Union[str, None] = None,
_df2 = _df1[~_df1.index.duplicated(keep='first')]
data = _df2.resample(resample).ffill()
data.dropna(inplace=True)
if verbose > 0:
_LOGGER.info(f"Fetched all data in {time.time() - t0_:.1f} seconds")
_LOGGER.debug(f"Fetched all data in {time.time() - t0_:.1f} seconds")
Copy link

Copilot AI Sep 19, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This debug log is now always executed regardless of verbose level, but the original code only logged this when verbose > 0. Consider adding the verbose check back if this timing information should only be shown when explicitly requested.

Suggested change
_LOGGER.debug(f"Fetched all data in {time.time() - t0_:.1f} seconds")
if verbose > 0:
_LOGGER.debug(f"Fetched all data in {time.time() - t0_:.1f} seconds")

Copilot uses AI. Check for mistakes.
return data


Expand Down
2 changes: 1 addition & 1 deletion main/data/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def get_data(self, pv: str, **kws) -> Union[pd.DataFrame, None]:

r = requests.get(url)
if not r.ok:
_LOGGER.error(f"Fetched data error: {r.status_code}")
_LOGGER.error(f"Error fetching data for '{pv}': {r.status_code}")
return None
if self.format == 'raw':
return normalize(unpack_raw_data(r.content), tz, last_n=last_n)
Expand Down
6 changes: 3 additions & 3 deletions main/scripts/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ class Formatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionH


def main():
_LOGGER.info(f"Executing {os.path.basename(sys.argv[0])} ...")
_LOGGER.debug(f"Executing {os.path.basename(sys.argv[0])} {sys.argv[1:]} ...")
args = parser.parse_args(sys.argv[1:])

if args.version:
Expand Down Expand Up @@ -123,7 +123,7 @@ def main():
# time range
if args.from_time is None or args.to_time is None:
_LOGGER.warning(
"Arguments: --from and/or --to is not set, refer to -h for time range set.")
"Arguments: --from and/or --to is not set, see -h for help.")
# sys.exit(1)
else:
_LOGGER.info(f"Fetch data from {args.from_time} to {args.to_time}")
Expand Down Expand Up @@ -158,7 +158,7 @@ def main():
client = ArchiverDataClient(url=args.url)
if args.use_json:
client.format = "json"
_LOGGER.info(f"{client}")
_LOGGER.debug(f"{client}")

dset = get_dataset_with_pvs(pv_list, args.from_time, args.to_time, client=client,
resample=args.resample, verbose=args.verbose,
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "pyarchappl"
version = "1.0.2"
version = "1.0.2-1"
authors = [
{ name="Tong Zhang" }
]
Expand Down
83 changes: 83 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# -*- coding: utf-8 -*-

from setuptools import setup


def readme() -> str:
with open('README.md', 'r') as f:
return f.read()


def read_requires(filepath: str) -> list[str]:
lines = []
for line in open(filepath, "r"):
lines.append(line.strip())
return lines


install_requires = [
"numpy",
"pandas",
"openpyxl",
"tqdm",
"requests",
"simplejson",
"tables",
"protobuf>=3.0,<4.0",
"setuptools",
]


extras_require = {
'test': ['pytest'],
'doc': ['sphinx', 'pydata_sphinx_theme'],
}


def set_entry_points():
r = {
'console_scripts': [
'pyarchappl-get=archappl.scripts.get:main',
'pyarchappl-inspect=archappl.scripts.inspect:main',
]
}
return r


setup(
name='pyarchappl',
version='1.0.2-1',
description='Python interface to Archiver Appliance',
long_description=readme(),
long_description_content_type='text/markdown',
url="https://github.com/zhangt58/pyarchappl",
author='Tong Zhang',
packages=[
'archappl.admin', 'archappl.data', 'archappl.data.pb',
'archappl.client', 'archappl.contrib', 'archappl.config',
'archappl.scripts', 'archappl.tests', 'archappl'
],
package_dir={
'archappl.admin': 'main/mgmt',
'archappl.data': 'main/data',
'archappl.data.pb': 'main/data/pb',
'archappl.client': 'main/client',
'archappl.contrib': 'main/contrib',
'archappl.config': 'main/config',
'archappl.scripts': 'main/scripts',
'archappl.tests': 'main/tests',
'archappl': 'main'
},
include_package_data=True,
entry_points=set_entry_points(),
python_requires=">=3.9",
install_requires=install_requires,
extras_require=extras_require,
license='GPL3+',
keywords="Archiver EPICS CA PVA",
classifiers=[
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)