Coverage for /Users/Newville/Codes/xraylarch/larch/utils/__init__.py: 32%
142 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-09 10:08 -0600
« prev ^ index » next coverage.py v7.3.2, created at 2023-11-09 10:08 -0600
1#!/usr/bin/env python
2import sys
3from traceback import format_tb
4import time
5from datetime import datetime
6from gzip import GzipFile
7import io
8import copy
9import json
10import numpy as np
11import logging
13from charset_normalizer import from_bytes
14from .gformat import gformat, getfloat_attr
15from .paths import uname, bindir, nativepath, unixpath, get_homedir, get_cwd
16from .debugtime import debugtime, debugtimer
18from .strutils import (fixName, isValidName, isNumber, bytes2str,
19 str2bytes, fix_filename, fix_varname,
20 isLiteralStr, strip_comments, asfloat,
21 find_delims, version_ge, unique_name,
22 get_sessionid, strict_ascii)
24from .shellutils import (_more, _parent, ls, cd, cwd, mkdir)
26logging.basicConfig(format='%(levelname)s [%(asctime)s]: %(message)s',
27 datefmt='%Y-%m-%d %H:%M:%S', level=logging.WARNING)
29def format_exception(with_traceback=True):
30 """return exception message as list of strings,
31 optionally including traceback
32 """
33 etype, exc, tb = sys.exc_info()
34 out = []
35 if with_traceback:
36 out = ["Traceback (most recent calls last):"]
37 for tline in format_tb(tb):
38 if tline.endswith('\n'): tline = tline[:-1]
39 out.append(tline)
40 out.append(f"{etype.__name__}: {exc}")
41 return out
44def write_log(msg, level='debug'):
45 f = logging.debug
46 if level in ('warn', 'warning', logging.WARNING):
47 f = logging.warning
48 elif level in ('info', logging.INFO):
49 f = logging.info
50 elif level in ('error', logging.ERROR):
51 f = logging.error
52 elif level in ('critical', logging.CRITICAL):
53 f = logging.critical
54 return f(msg)
56def log_warning(msg):
57 return logging.warning(msg)
59def log_debug(msg):
60 return logging.debug(msg)
62def log_info(msg):
63 return logging.info(msg)
65def log_error(msg):
66 return logging.error(msg)
68def log_critical(msg):
69 return logging.critical(msg)
72def is_gzip(filename):
73 "is a file gzipped?"
74 with open(filename, 'rb') as fh:
75 return fh.read(3) == b'\x1f\x8b\x08'
76 return False
78def read_textfile(filename, size=None):
79 """read text from a file as string
81 Argument
82 --------
83 filename (str or file): name of file to read or file-like object
84 size (int or None): number of bytes to read
86 Returns
87 -------
88 text of file as string.
90 Notes
91 ------
92 1. the encoding is detected with charset_normalizer.from_bytes
93 which is then used to decode bytes read from file.
94 2. line endings are normalized to be '\n', so that
95 splitting on '\n' will give a list of lines.
96 3. if filename is given, it can be a gzip-compressed file
97 """
98 text = ''
100 def decode(bytedata):
101 return str(from_bytes(bytedata).best())
103 if isinstance(filename, io.IOBase):
104 text = filename.read(size)
105 if filename.mode == 'rb':
106 text = decode(text)
107 else:
108 fopen = GzipFile if is_gzip(filename) else open
109 with fopen(filename, 'rb') as fh:
110 text = decode(fh.read(size))
111 return text.replace('\r\n', '\n').replace('\r', '\n')
114def group2dict(group, _larch=None):
115 "return dictionary of group members"
116 return group.__dict__
118def dict2group(d, _larch=None):
119 "return group created from a dictionary"
120 from larch import Group
121 return Group(**d)
123def copy_group(group, _larch=None):
124 from larch import Group
125 out = Group(datatype=getattr(group, 'datatype', 'unknown'),
126 copied_from=getattr(group, 'groupname', repr(group)))
128 class NoCopy:
129 c = 'no copy'
131 for attr in dir(group):
132 val = NoCopy
133 try:
134 val = copy.deepcopy(getattr(group, attr))
135 except ValueError:
136 try:
137 val = copy.copy(getattr(group, attr))
138 except:
139 val = NoCopy
141 if val != NoCopy:
142 setattr(out, attr, val)
143 return out
145def copy_xafs_group(group, _larch=None):
146 """specialized group copy for XAFS data groups"""
147 from larch import Group
148 out = Group(datatype=getattr(group, 'datatype', 'unknown'),
149 copied_from=getattr(group, 'groupname', repr(group)))
151 for attr in dir(group):
152 do_copy = True
153 if attr in ('xdat', 'ydat', 'i0', 'data' 'yerr',
154 'energy', 'mu'):
155 val = getattr(group, attr)*1.0
156 elif attr in ('norm', 'flat', 'deriv', 'deconv',
157 'post_edge', 'pre_edge', 'norm_mback',
158 'norm_vict', 'norm_poly'):
159 do_copy = False
160 else:
161 try:
162 val = copy.deepcopy(getattr(group, attr))
163 except ValueError:
164 do_copy = False
165 if do_copy:
166 setattr(out, attr, val)
167 return out
170def isotime(t=None, with_tzone=False, filename=False):
171 if t is None:
172 t = time.time()
173 sout = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(t))
174 if with_tzone:
175 sout = "%s-%2.2i:00" % (sout, time.timezone/3600)
176 if filename:
177 sout = sout.replace(' ', '_').replace(':', '')
178 return sout
180def time_ago(timestamp, precision=2):
181 """
182 give a human-readable 'time ago' from the timestamp.
184 The output gives day, hours, minutes, seconds:
185 52 days, 1 hour
187 the `precision` field gives the number of significant time units to
188 show. This defaults to 2:
189 'N days, H hours',
190 'N hours, M minutes'
191 """
192 def format(x, unit):
193 return "%d %s%s" % (x, unit, "s" if x > 1 else "")
195 tdiff = datetime.now() - datetime.fromtimestamp(timestamp)
196 days = tdiff.days
197 hours = tdiff.seconds//3600
198 minutes = tdiff.seconds%3600//60
199 seconds = tdiff.seconds%3600%60
201 out = []
202 if days > 0:
203 out.append(format(days, "day"))
204 if hours > 0:
205 out.append(format(hours, "hour"))
206 if minutes > 0:
207 out.append(format(minutes, "minute"))
208 out.append(format(seconds, "second"))
209 return ", ".join(out[:precision])
211def json_dump(data, filename):
212 """
213 dump object or group to file using json
214 """
215 from .jsonutils import encode4js
216 with open(filename, 'w') as fh:
217 fh.write(json.dumps(encode4js(data)))
218 fh.write('\n')
220def json_load(filename):
221 """
222 load object from json dump file
223 """
224 from .jsonutils import decode4js
225 with open(filename, 'rb') as fh:
226 data = fh.read().decode('utf-8')
227 return decode4js(json.loads(data))
229def _larch_init(_larch):
230 """initialize xrf"""
231 from ..symboltable import Group
232 _larch.symtable._sys.display = Group(use_color=True,
233 colors=dict(text={'color': 'black'},
234 text2={'color': 'blue'},
235 error={'color': 'red'}))
237_larch_builtins = dict(copy=copy.copy, deepcopy=copy.deepcopy, more= _more,
238 parent=_parent, ls=ls, mkdir=mkdir, cd=cd,
239 cwd=cwd, group2dict=group2dict,
240 copy_group=copy_group, copy_xafs_group=copy_xafs_group,
241 dict2group=dict2group, debugtimer=debugtimer,
242 isotime=isotime, json_dump=json_dump,
243 json_load=json_load, gformat=gformat)