From 8a6bb6183db3180da2b3b7385b57578fe7bbb570 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 10:23:31 -0400 Subject: [PATCH 01/13] Add files for linting * flake8, black, isort --- .flake8 | 5 +++++ .github/workflows/linting.yml | 14 ++++++++++++++ .isort.cfg | 2 ++ .pre-commit-config.yaml | 29 +++++++++++++++++++++++++++++ pyproject.toml | 9 +++++++++ 5 files changed, 59 insertions(+) create mode 100644 .flake8 create mode 100644 .github/workflows/linting.yml create mode 100644 .isort.cfg create mode 100644 .pre-commit-config.yaml create mode 100644 pyproject.toml diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..b2a608d --- /dev/null +++ b/.flake8 @@ -0,0 +1,5 @@ +[flake8] +exclude = + .git, + __pycache__, +max-line-length = 115 diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 0000000..9035d06 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,14 @@ +name: pre-commit + +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.3 diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..b9fb3f3 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,2 @@ +[settings] +profile=black diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..6796401 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,29 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-ast + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: debug-statements + +- repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + +- repo: https://github.com/timothycrosley/isort + rev: 5.9.3 + hooks: + - id: isort + +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..481f86a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,9 @@ +[tool.black] +line-length = 88 +exclude = ''' +( + /( + | \.git + )/ +) +''' From 26e8dcbd39b6297b5417ad3384f6f5909fb854d3 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 10:26:11 -0400 Subject: [PATCH 02/13] changes made by 'pre-commit run --all-files' * do flake8 separately because it only reports errors, does not actually change files --- LICENSE | 1 - README.md | 1 - end_of_run_workflow.py | 9 ++--- export.py | 14 +++---- load_scan.py | 11 +++--- print_scanid.py | 8 ++-- tasks.py | 90 ++++++++++++++++++++++-------------------- test_tasks.py | 6 ++- tomo_recon_tiled.py | 49 ++++++++++++----------- 9 files changed, 97 insertions(+), 92 deletions(-) diff --git a/LICENSE b/LICENSE index a6393ed..3c99b84 100644 --- a/LICENSE +++ b/LICENSE @@ -27,4 +27,3 @@ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - diff --git a/README.md b/README.md index 0e92c9d..1102ad6 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ # Workflows Repository of workflows for the FXI beamline. - diff --git a/end_of_run_workflow.py b/end_of_run_workflow.py index 56a96fd..3d71406 100644 --- a/end_of_run_workflow.py +++ b/end_of_run_workflow.py @@ -1,5 +1,5 @@ import prefect -from prefect import task, Flow, Parameter +from prefect import Flow, Parameter, task from prefect.tasks.prefect import create_flow_run @@ -15,12 +15,9 @@ def log_completion(): validation_flow = create_flow_run( flow_name="general-data-validation", project_name="FXI", - parameters={"beamline_acronym": "fxi" ,"uid": uid} + parameters={"beamline_acronym": "fxi", "uid": uid}, ) export_flow = create_flow_run( - flow_name="export", - project_name="FXI", - parameters={"uid": uid} + flow_name="export", project_name="FXI", parameters={"uid": uid} ) log_completion(upstream_tasks=[validation_flow, export_flow]) - diff --git a/export.py b/export.py index d863a3c..63f9bbf 100644 --- a/export.py +++ b/export.py @@ -1,14 +1,14 @@ -import databroker import datetime +import os +from pathlib import Path + +import databroker import h5py import numpy as np -import os import pandas as pd import prefect - -from pathlib import Path from PIL import Image -from prefect import task, Flow, Parameter +from prefect import Flow, Parameter, task @task @@ -69,9 +69,9 @@ def get_img(run, det="Andor", sli=[]): "Take in a Header and return a numpy array of detA1 image(s)." det_name = f"{det}_image" if len(sli) == 2: - img = np.array(list(run['primary']['data'][det_name])[sli[0] : sli[1]]) + img = np.array(list(run["primary"]["data"][det_name])[sli[0] : sli[1]]) else: - img = np.array(list(run['primary']['data'][det_name])) + img = np.array(list(run["primary"]["data"][det_name])) return np.squeeze(img) diff --git a/load_scan.py b/load_scan.py index ecc556d..cb3e42e 100644 --- a/load_scan.py +++ b/load_scan.py @@ -1,9 +1,9 @@ from datetime import datetime + import numpy as np import pandas as pd from databroker.assets.handlers import AreaDetectorHDF5TimestampHandler - EPICS_EPOCH = datetime(1990, 1, 1, 0, 0) @@ -12,11 +12,12 @@ def convert_AD_timestamps(ts): "US/Eastern" ) + def get_tomo_images(input_dict): - pos = input_dict['pos'] - imgs = input_dict['imgs'] - chunked_timestamps = input_dict['chunked_timestamps'] - mot_pos = input_dict['mot_pos'] + pos = input_dict["pos"] + imgs = input_dict["imgs"] + chunked_timestamps = input_dict["chunked_timestamps"] + mot_pos = input_dict["mot_pos"] raw_timestamps = [] for chunk in chunked_timestamps: diff --git a/print_scanid.py b/print_scanid.py index 548beb0..94e22e8 100644 --- a/print_scanid.py +++ b/print_scanid.py @@ -1,18 +1,20 @@ import databroker import prefect -from prefect import task, Flow +from prefect import Flow, task + @task def print_scanid(): client = databroker.from_profile("nsls2", username=None) - scan_id = client['fxi'][-1].start['scan_id'] + scan_id = client["fxi"][-1].start["scan_id"] logger = prefect.context.get("logger") logger.info(f"Scan ID: {scan_id}") + with Flow("scan_id") as flow: print_scanid() -#flow.register(project_name='TST', +# flow.register(project_name='TST', # labels=['fxi-2022-2.2'], # add_default_labels=False, # set_schedule_active=False) diff --git a/tasks.py b/tasks.py index 6373331..fb1306c 100644 --- a/tasks.py +++ b/tasks.py @@ -1,10 +1,11 @@ -import prefect -from prefect import task, Flow, Parameter -from tiled.client import from_profile from datetime import datetime + import numpy as np import pandas as pd +import prefect from databroker.assets.handlers import AreaDetectorHDF5TimestampHandler +from prefect import Flow, Parameter, task +from tiled.client import from_profile def timestamp_to_float(t): @@ -13,11 +14,12 @@ def timestamp_to_float(t): tf.append(ts) return np.array(tf) + def get_fly_scan_angle(input_dict): timestamp_tomo = input_dict["timestamp_tomo"] pos = input_dict["pos"] mot_pos = input_dict["mot_pos"] - + timestamp_mot = timestamp_to_float(pos["time"]) img_ini_timestamp = timestamp_tomo[0][0] @@ -25,7 +27,7 @@ def get_fly_scan_angle(input_dict): 1 ] # timestamp_mot[1] is the time when taking dark image - print(f'timestamp_tomo: {timestamp_tomo} img_ini_timestamp: {img_ini_timestamp}') + print(f"timestamp_tomo: {timestamp_tomo} img_ini_timestamp: {img_ini_timestamp}") tomo_time = timestamp_tomo[0] - img_ini_timestamp mot_time = timestamp_mot - mot_ini_timestamp @@ -34,6 +36,7 @@ def get_fly_scan_angle(input_dict): img_angle = mot_pos_interp return img_angle + @task(log_stdout=True) def call_find_rot(uid): c = from_profile("nsls2", username=None) @@ -45,19 +48,19 @@ def call_find_rot(uid): # sanity check: make sure we remembered the right stream name assert "zps_pi_r_monitor" in scan_result pos = scan_result["zps_pi_r_monitor"]["data"] - logger.info('extracting data from tiled') + logger.info("extracting data from tiled") imgs = np.array(list(scan_result["primary"]["data"]["Andor_image"])) s1 = imgs.shape chunk_size = s1[1] imgs = imgs.reshape(-1, s1[2], s1[3]) - logger.info('done with primary images') + logger.info("done with primary images") # load darks and bkgs img_dark = np.array(list(scan_result["dark"]["data"]["Andor_image"]))[0] - logger.info('done with darks') + logger.info("done with darks") img_bkg = np.array(list(scan_result["flat"]["data"]["Andor_image"]))[0] - logger.info('done with background') + logger.info("done with background") img_dark_avg = np.mean(img_dark, axis=0, keepdims=True) img_bkg_avg = np.mean(img_bkg, axis=0, keepdims=True) @@ -65,16 +68,15 @@ def call_find_rot(uid): mot_pos = np.array(pos["zps_pi_r"]) - input_dict = {'pos': pos, - 'timestamp_tomo': chunked_timestamps, - 'mot_pos': mot_pos} + input_dict = {"pos": pos, "timestamp_tomo": chunked_timestamps, "mot_pos": mot_pos} img_tomo = np.array(list(scan_result["primary"]["data"]["Andor_image"]))[0] logger.info(img_tomo) img_angle = get_fly_scan_angle(input_dict) - logger.info('calculating rotation center') + logger.info("calculating rotation center") img, cen = rotcen_test2(img_tomo, img_bkg_avg, img_dark_avg, img_angle) return img, cen + with Flow("test-find-rot") as flow1: uid = Parameter("uid") call_find_rot(uid) @@ -87,11 +89,12 @@ def convert_AD_timestamps(ts): "US/Eastern" ) + def get_tomo_images(input_dict): - pos = input_dict['pos'] - imgs = input_dict['imgs'] - chunked_timestamps = input_dict['chunked_timestamps'] - mot_pos = input_dict['mot_pos'] + pos = input_dict["pos"] + imgs = input_dict["imgs"] + chunked_timestamps = input_dict["chunked_timestamps"] + mot_pos = input_dict["mot_pos"] raw_timestamps = [] for chunk in chunked_timestamps: @@ -138,6 +141,7 @@ def get_tomo_images(input_dict): img_tomo = imgs[: pos2 - chunk_size] # tomo images return img_tomo, img_angle + import numpy as np import tomopy from scipy.interpolate import interp1d @@ -147,6 +151,7 @@ def find_nearest(data, value): data = np.array(data) return np.abs(data - value).argmin() + def rotcen_test2( img_tomo, img_bkg_avg, @@ -162,16 +167,16 @@ def rotcen_test2( txm_normed_flag=0, denoise_flag=0, fw_level=9, - algorithm='gridrec', + algorithm="gridrec", n_iter=5, circ_mask_ratio=0.95, options={}, atten=None, clim=[], dark_scale=1, - filter_name='None', + filter_name="None", ): - print('beginning of rotcen2') + print("beginning of rotcen2") s = [1, data.shape[0], data.shape[1]] if not atten is None: @@ -213,7 +218,7 @@ def rotcen_test2( prj_norm -= bkg_level - print('tomopy prep') + print("tomopy prep") prj_norm = tomopy.prep.stripe.remove_stripe_fw( prj_norm, level=fw_level, wname="db5", sigma=1, pad=True ) @@ -246,36 +251,36 @@ def rotcen_test2( steps = 26 cen = np.linspace(start, stop, steps) img = np.zeros([len(cen), s[2], s[2]]) - print('tomopy start reconstructions') + print("tomopy start reconstructions") for i in range(len(cen)): if print_flag: print("{}: rotcen {}".format(i + 1, cen[i])) - if algorithm == 'gridrec': + if algorithm == "gridrec": img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm="gridrec", - filter_name=filter_name + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm="gridrec", + filter_name=filter_name, ) - elif 'astra' in algorithm: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=tomopy.astra, - options=options + elif "astra" in algorithm: + img[i] = tomopy.recon( + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm=tomopy.astra, + options=options, ) else: img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=algorithm, - num_iter=n_iter, - filter_name=filter_name + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm=algorithm, + num_iter=n_iter, + filter_name=filter_name, ) - print('tomopy circ_mask') + print("tomopy circ_mask") img = tomopy.circ_mask(img, axis=0, ratio=circ_mask_ratio) return img, cen @@ -299,4 +304,3 @@ def denoise(prj, denoise_flag): prj = gf(prj, [0, 1, 1]) return prj - diff --git a/test_tasks.py b/test_tasks.py index c8687ed..0107ed6 100644 --- a/test_tasks.py +++ b/test_tasks.py @@ -1,12 +1,14 @@ import prefect -from prefect import task, Flow +from prefect import Flow, task + @task def print_stuff(): logger = prefect.context.get("logger") logger.info("printing stuff") + with Flow("print_stuff") as flow1: print_stuff() -#flow1.register(project_name="TST", labels=["tst-2022-2.2"], add_default_labels=False, set_schedule_active=False) +# flow1.register(project_name="TST", labels=["tst-2022-2.2"], add_default_labels=False, set_schedule_active=False) diff --git a/tomo_recon_tiled.py b/tomo_recon_tiled.py index 4439170..973e644 100644 --- a/tomo_recon_tiled.py +++ b/tomo_recon_tiled.py @@ -7,6 +7,7 @@ def find_nearest(data, value): data = np.array(data) return np.abs(data - value).argmin() + def rotcen_test2( img_tomo, img_bkg_avg, @@ -22,14 +23,14 @@ def rotcen_test2( txm_normed_flag=0, denoise_flag=0, fw_level=9, - algorithm='gridrec', + algorithm="gridrec", n_iter=5, circ_mask_ratio=0.95, options={}, atten=None, clim=[], dark_scale=1, - filter_name='None', + filter_name="None", ): s = [1, data.shape[0], data.shape[1]] @@ -66,7 +67,7 @@ def rotcen_test2( prj_norm = -np.log(prj) f.close() - prj_norm = denoise(prj_norm, denoise_flag) + prj_norm = denoise(prj_norm, denoise_flag) prj_norm[np.isnan(prj_norm)] = 0 prj_norm[np.isinf(prj_norm)] = 0 prj_norm[prj_norm < 0] = 0 @@ -76,7 +77,7 @@ def rotcen_test2( prj_norm = tomopy.prep.stripe.remove_stripe_fw( prj_norm, level=fw_level, wname="db5", sigma=1, pad=True ) - """ + """ if denoise_flag == 1: # denoise using wiener filter ss = prj_norm.shape for i in range(ss[0]): @@ -108,34 +109,34 @@ def rotcen_test2( for i in range(len(cen)): if print_flag: print("{}: rotcen {}".format(i + 1, cen[i])) - if algorithm == 'gridrec': + if algorithm == "gridrec": img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm="gridrec", - filter_name=filter_name + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm="gridrec", + filter_name=filter_name, ) - elif 'astra' in algorithm: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=tomopy.astra, - options=options + elif "astra" in algorithm: + img[i] = tomopy.recon( + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm=tomopy.astra, + options=options, ) else: img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=algorithm, - num_iter=n_iter, - filter_name=filter_name + prj_norm[:, addition_slice : addition_slice + 1], + theta, + center=cen[i], + algorithm=algorithm, + num_iter=n_iter, + filter_name=filter_name, ) img = tomopy.circ_mask(img, axis=0, ratio=circ_mask_ratio) return img, cen - + def denoise(prj, denoise_flag): if denoise_flag == 1: # Wiener denoise From bda3049c52212f0d99bd6cec4f33c0fc459ec411 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 10:53:34 -0400 Subject: [PATCH 03/13] initial commit of flake8 fixes * still have some variable issues to verify --- export.py | 4 ++-- tasks.py | 14 +++++--------- tomo_recon_tiled.py | 6 +++--- 3 files changed, 10 insertions(+), 14 deletions(-) diff --git a/export.py b/export.py index 63f9bbf..bfd2a91 100644 --- a/export.py +++ b/export.py @@ -92,12 +92,12 @@ def bin_ndarray(ndarray, new_shape=None, operation="mean"): [262 270 278 286 294] [342 350 358 366 374]] """ - if new_shape == None: + if new_shape is None: s = np.array(ndarray.shape) s1 = np.int32(s / 2) new_shape = tuple(s1) operation = operation.lower() - if not operation in ["sum", "mean"]: + if operation not in ["sum", "mean"]: raise ValueError("Operation not supported.") if ndarray.ndim != len(new_shape): raise ValueError("Shape mismatch: {} -> {}".format(ndarray.shape, new_shape)) diff --git a/tasks.py b/tasks.py index fb1306c..370a134 100644 --- a/tasks.py +++ b/tasks.py @@ -3,8 +3,9 @@ import numpy as np import pandas as pd import prefect -from databroker.assets.handlers import AreaDetectorHDF5TimestampHandler +import tomopy from prefect import Flow, Parameter, task +from scipy.interpolate import interp1d from tiled.client import from_profile @@ -142,11 +143,6 @@ def get_tomo_images(input_dict): return img_tomo, img_angle -import numpy as np -import tomopy -from scipy.interpolate import interp1d - - def find_nearest(data, value): data = np.array(data) return np.abs(data - value).argmin() @@ -179,7 +175,7 @@ def rotcen_test2( print("beginning of rotcen2") s = [1, data.shape[0], data.shape[1]] - if not atten is None: + if atten is not None: ref_ang = atten[:, 0] ref_atten = atten[:, 1] fint = interp1d(ref_ang, ref_atten) @@ -205,7 +201,7 @@ def rotcen_test2( img_bkg = np.array(img_bkg_avg[:, sli_exp[0] : sli_exp[1], :]) img_dark = np.array(img_dark_avg[:, sli_exp[0] : sli_exp[1], :]) / dark_scale prj = (img_tomo - img_dark) / (img_bkg - img_dark) - if not atten is None: + if atten is not None: for i in range(len(tomo_angle)): att = fint(tomo_angle[i]) prj[i] = prj[i] / att @@ -245,7 +241,7 @@ def rotcen_test2( allow_list = list(set(np.arange(len(prj_norm))) - set(block_list)) prj_norm = prj_norm[allow_list] theta = theta[allow_list] - if start == None or stop == None or steps == None: + if start is None or stop is None or steps is None: start = int(s[2] / 2 - 50) stop = int(s[2] / 2 + 50) steps = 26 diff --git a/tomo_recon_tiled.py b/tomo_recon_tiled.py index 973e644..670c751 100644 --- a/tomo_recon_tiled.py +++ b/tomo_recon_tiled.py @@ -34,7 +34,7 @@ def rotcen_test2( ): s = [1, data.shape[0], data.shape[1]] - if not atten is None: + if atten is not None: ref_ang = atten[:, 0] ref_atten = atten[:, 1] fint = interp1d(ref_ang, ref_atten) @@ -60,7 +60,7 @@ def rotcen_test2( img_bkg = np.array(img_bkg_avg[:, sli_exp[0] : sli_exp[1], :]) img_dark = np.array(img_dark_avg[:, sli_exp[0] : sli_exp[1], :]) / dark_scale prj = (img_tomo - img_dark) / (img_bkg - img_dark) - if not atten is None: + if atten is not None: for i in range(len(tomo_angle)): att = fint(tomo_angle[i]) prj[i] = prj[i] / att @@ -100,7 +100,7 @@ def rotcen_test2( allow_list = list(set(np.arange(len(prj_norm))) - set(block_list)) prj_norm = prj_norm[allow_list] theta = theta[allow_list] - if start == None or stop == None or steps == None: + if start is None or stop is None or steps is None: start = int(s[2] / 2 - 50) stop = int(s[2] / 2 + 50) steps = 26 From 24885b0d8c1e663f591f13c9c2c1ab53481e042e Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 12:06:00 -0400 Subject: [PATCH 04/13] ignore flake8 E203 * prevent conflicts with black in handling spaces around array slicing with ":" * see https://github.com/pre-commit/pre-commit-hooks/issues/112 comment from April 28, 2016 by dwaynebailey for where I saw this configuration method used --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6796401..fa18ade 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,6 +17,7 @@ repos: rev: 4.0.1 hooks: - id: flake8 + args: ["--ignore=E203"] - repo: https://github.com/timothycrosley/isort rev: 5.9.3 From c9b19262045441319b15d36191761fa59bba0e02 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 12:06:41 -0400 Subject: [PATCH 05/13] remove unused import - flake8 --- load_scan.py | 1 - 1 file changed, 1 deletion(-) diff --git a/load_scan.py b/load_scan.py index cb3e42e..c3e9319 100644 --- a/load_scan.py +++ b/load_scan.py @@ -2,7 +2,6 @@ import numpy as np import pandas as pd -from databroker.assets.handlers import AreaDetectorHDF5TimestampHandler EPICS_EPOCH = datetime(1990, 1, 1, 0, 0) From 46da6a37c06ebb7dbe03d457704941022259fb4a Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:11:27 -0400 Subject: [PATCH 06/13] break up flake8-problem long lines manually --- export.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/export.py b/export.py index bfd2a91..3daa4b3 100644 --- a/export.py +++ b/export.py @@ -727,7 +727,8 @@ def export_raster_2D_2(run, binning=4, filepath="", **kwargs): y_list[j] * pix * img_sizeY / 1000, ] pos_file.append( - f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t{y_list[j]*pix*img_sizeY/1000:3.3f}\n" + f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t" + + f"{y_list[j]*pix*img_sizeY/1000:3.3f}\n" ) index = index + 1 s = img_patch.shape @@ -804,7 +805,8 @@ def export_raster_2D(run, binning=4, filepath="", **kwargs): y_list[j] * pix * img_sizeY / 1000, ] pos_file.append( - f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t{y_list[j]*pix*img_sizeY/1000:3.3f}\n" + f"{x_list[i]:3.0f}\t{y_list[j]:3.0f}\t{x_list[i]*pix*img_sizeX/1000:3.3f}\t\t" + + f"{y_list[j]*pix*img_sizeY/1000:3.3f}\n" ) index = index + 1 s = img_patch.shape From 6fa9e1992e604609703fe2b43e0a58e96589b6c4 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:33:46 -0400 Subject: [PATCH 07/13] add W503 * resolve the conflict between flake8 and black, which follows the anti-pattern described here https://www.flake8rules.com/rules/W503.html --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fa18ade..3b085b9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: rev: 4.0.1 hooks: - id: flake8 - args: ["--ignore=E203"] + args: ["--ignore=E203,W503"] - repo: https://github.com/timothycrosley/isort rev: 5.9.3 From ff6698bd8e632cde4284353ce3ce4e062873b6a7 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:43:55 -0400 Subject: [PATCH 08/13] remove unused load_scan --- load_scan.py | 64 ---------------------------------------------------- 1 file changed, 64 deletions(-) delete mode 100644 load_scan.py diff --git a/load_scan.py b/load_scan.py deleted file mode 100644 index c3e9319..0000000 --- a/load_scan.py +++ /dev/null @@ -1,64 +0,0 @@ -from datetime import datetime - -import numpy as np -import pandas as pd - -EPICS_EPOCH = datetime(1990, 1, 1, 0, 0) - - -def convert_AD_timestamps(ts): - return pd.to_datetime(ts, unit="s", origin=EPICS_EPOCH, utc=True).dt.tz_convert( - "US/Eastern" - ) - - -def get_tomo_images(input_dict): - pos = input_dict["pos"] - imgs = input_dict["imgs"] - chunked_timestamps = input_dict["chunked_timestamps"] - mot_pos = input_dict["mot_pos"] - - raw_timestamps = [] - for chunk in chunked_timestamps: - raw_timestamps.extend(chunk.tolist()) - - timestamps = convert_AD_timestamps(pd.Series(raw_timestamps)) - pos["time"] = pos["time"].dt.tz_localize("US/Eastern") - - img_day, img_hour = ( - timestamps.dt.day, - timestamps.dt.hour, - ) - img_min, img_sec, img_msec = ( - timestamps.dt.minute, - timestamps.dt.second, - timestamps.dt.microsecond, - ) - img_time = ( - img_day * 86400 + img_hour * 3600 + img_min * 60 + img_sec + img_msec * 1e-6 - ) - img_time = np.array(img_time) - - mot_day, mot_hour = ( - pos["time"].dt.day, - pos["time"].dt.hour, - ) - mot_min, mot_sec, mot_msec = ( - pos["time"].dt.minute, - pos["time"].dt.second, - pos["time"].dt.microsecond, - ) - mot_time = ( - mot_day * 86400 + mot_hour * 3600 + mot_min * 60 + mot_sec + mot_msec * 1e-6 - ) - mot_time = np.array(mot_time) - - offset = np.min([np.min(img_time), np.min(mot_time)]) - img_time -= offset - mot_time -= offset - mot_pos_interp = np.interp(img_time, mot_time, mot_pos) - - pos2 = mot_pos_interp.argmax() + 1 - img_angle = mot_pos_interp[: pos2 - chunk_size] # rotation angles - img_tomo = imgs[: pos2 - chunk_size] # tomo images - return img_tomo, img_angle From efe1bceb80274600c7e28fe819fde65ab78de00c Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:45:22 -0400 Subject: [PATCH 09/13] remove unused file * after all code moved into tasks --- tomo_recon_tiled.py | 159 -------------------------------------------- 1 file changed, 159 deletions(-) delete mode 100644 tomo_recon_tiled.py diff --git a/tomo_recon_tiled.py b/tomo_recon_tiled.py deleted file mode 100644 index 670c751..0000000 --- a/tomo_recon_tiled.py +++ /dev/null @@ -1,159 +0,0 @@ -import numpy as np -import tomopy -from scipy.interpolate import interp1d - - -def find_nearest(data, value): - data = np.array(data) - return np.abs(data - value).argmin() - - -def rotcen_test2( - img_tomo, - img_bkg_avg, - img_dark_avg, - img_angle, - start=None, - stop=None, - steps=None, - sli=0, - block_list=[], - print_flag=1, - bkg_level=0, - txm_normed_flag=0, - denoise_flag=0, - fw_level=9, - algorithm="gridrec", - n_iter=5, - circ_mask_ratio=0.95, - options={}, - atten=None, - clim=[], - dark_scale=1, - filter_name="None", -): - s = [1, data.shape[0], data.shape[1]] - - if atten is not None: - ref_ang = atten[:, 0] - ref_atten = atten[:, 1] - fint = interp1d(ref_ang, ref_atten) - - if denoise_flag: - addition_slice = 100 - else: - addition_slice = 0 - - if sli == 0: - sli = int(s[1] / 2) - sli_exp = [ - np.max([0, sli - addition_slice // 2]), - np.min([sli + addition_slice // 2 + 1, s[1]]), - ] - tomo_angle = np.arrayimg_angle - theta = tomo_angle / 180.0 * np.pi - img_tomo = np.array(img_tomo[:, sli_exp[0] : sli_exp[1], :]) - - if txm_normed_flag: - prj_norm = img_tomo - else: - img_bkg = np.array(img_bkg_avg[:, sli_exp[0] : sli_exp[1], :]) - img_dark = np.array(img_dark_avg[:, sli_exp[0] : sli_exp[1], :]) / dark_scale - prj = (img_tomo - img_dark) / (img_bkg - img_dark) - if atten is not None: - for i in range(len(tomo_angle)): - att = fint(tomo_angle[i]) - prj[i] = prj[i] / att - prj_norm = -np.log(prj) - f.close() - - prj_norm = denoise(prj_norm, denoise_flag) - prj_norm[np.isnan(prj_norm)] = 0 - prj_norm[np.isinf(prj_norm)] = 0 - prj_norm[prj_norm < 0] = 0 - - prj_norm -= bkg_level - - prj_norm = tomopy.prep.stripe.remove_stripe_fw( - prj_norm, level=fw_level, wname="db5", sigma=1, pad=True - ) - """ - if denoise_flag == 1: # denoise using wiener filter - ss = prj_norm.shape - for i in range(ss[0]): - prj_norm[i] = skr.wiener(prj_norm[i], psf=psf, reg=reg, balance=balance, is_real=is_real, clip=clip) - elif denoise_flag == 2: - from skimage.filters import gaussian as gf - prj_norm = gf(prj_norm, [0, 1, 1]) - """ - s = prj_norm.shape - if len(s) == 2: - prj_norm = prj_norm.reshape(s[0], 1, s[1]) - s = prj_norm.shape - - if theta[-1] > theta[1]: - pos = find_nearest(theta, theta[0] + np.pi) - else: - pos = find_nearest(theta, theta[0] - np.pi) - block_list = list(block_list) + list(np.arange(pos + 1, len(theta))) - if len(block_list): - allow_list = list(set(np.arange(len(prj_norm))) - set(block_list)) - prj_norm = prj_norm[allow_list] - theta = theta[allow_list] - if start is None or stop is None or steps is None: - start = int(s[2] / 2 - 50) - stop = int(s[2] / 2 + 50) - steps = 26 - cen = np.linspace(start, stop, steps) - img = np.zeros([len(cen), s[2], s[2]]) - for i in range(len(cen)): - if print_flag: - print("{}: rotcen {}".format(i + 1, cen[i])) - if algorithm == "gridrec": - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm="gridrec", - filter_name=filter_name, - ) - elif "astra" in algorithm: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=tomopy.astra, - options=options, - ) - else: - img[i] = tomopy.recon( - prj_norm[:, addition_slice : addition_slice + 1], - theta, - center=cen[i], - algorithm=algorithm, - num_iter=n_iter, - filter_name=filter_name, - ) - img = tomopy.circ_mask(img, axis=0, ratio=circ_mask_ratio) - return img, cen - - -def denoise(prj, denoise_flag): - if denoise_flag == 1: # Wiener denoise - import skimage.restoration as skr - - ss = prj.shape - psf = np.ones([2, 2]) / (2**2) - reg = None - balance = 0.3 - is_real = True - clip = True - for j in range(ss[0]): - prj[j] = skr.wiener( - prj[j], psf=psf, reg=reg, balance=balance, is_real=is_real, clip=clip - ) - elif denoise_flag == 2: # Gaussian denoise - from skimage.filters import gaussian as gf - - prj = gf(prj, [0, 1, 1]) - return prj From 4962e7d355c673cc045c858e90226412f78ec7c7 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:48:04 -0400 Subject: [PATCH 10/13] remove unused function --- tasks.py | 52 ---------------------------------------------------- 1 file changed, 52 deletions(-) diff --git a/tasks.py b/tasks.py index 370a134..7e3b148 100644 --- a/tasks.py +++ b/tasks.py @@ -91,58 +91,6 @@ def convert_AD_timestamps(ts): ) -def get_tomo_images(input_dict): - pos = input_dict["pos"] - imgs = input_dict["imgs"] - chunked_timestamps = input_dict["chunked_timestamps"] - mot_pos = input_dict["mot_pos"] - - raw_timestamps = [] - for chunk in chunked_timestamps: - raw_timestamps.extend(chunk.tolist()) - - timestamps = convert_AD_timestamps(pd.Series(raw_timestamps)) - pos["time"] = pos["time"].dt.tz_localize("US/Eastern") - - img_day, img_hour = ( - timestamps.dt.day, - timestamps.dt.hour, - ) - img_min, img_sec, img_msec = ( - timestamps.dt.minute, - timestamps.dt.second, - timestamps.dt.microsecond, - ) - img_time = ( - img_day * 86400 + img_hour * 3600 + img_min * 60 + img_sec + img_msec * 1e-6 - ) - img_time = np.array(img_time) - - mot_day, mot_hour = ( - pos["time"].dt.day, - pos["time"].dt.hour, - ) - mot_min, mot_sec, mot_msec = ( - pos["time"].dt.minute, - pos["time"].dt.second, - pos["time"].dt.microsecond, - ) - mot_time = ( - mot_day * 86400 + mot_hour * 3600 + mot_min * 60 + mot_sec + mot_msec * 1e-6 - ) - mot_time = np.array(mot_time) - - offset = np.min([np.min(img_time), np.min(mot_time)]) - img_time -= offset - mot_time -= offset - mot_pos_interp = np.interp(img_time, mot_time, mot_pos) - - pos2 = mot_pos_interp.argmax() + 1 - img_angle = mot_pos_interp[: pos2 - chunk_size] # rotation angles - img_tomo = imgs[: pos2 - chunk_size] # tomo images - return img_tomo, img_angle - - def find_nearest(data, value): data = np.array(data) return np.abs(data - value).argmin() From b590cd5b97b3e8f2d4e5be8881da461f92900798 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:48:43 -0400 Subject: [PATCH 11/13] remove unused variable --- tasks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tasks.py b/tasks.py index 7e3b148..30799d9 100644 --- a/tasks.py +++ b/tasks.py @@ -53,7 +53,6 @@ def call_find_rot(uid): imgs = np.array(list(scan_result["primary"]["data"]["Andor_image"])) s1 = imgs.shape - chunk_size = s1[1] imgs = imgs.reshape(-1, s1[2], s1[3]) logger.info("done with primary images") From 31e0a2910d8a5a1e3c71edd66aecb96a9bea9c7b Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:50:29 -0400 Subject: [PATCH 12/13] fix variable name * looking back at the beamline profile_collection, the tomography images are to be used here --- tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasks.py b/tasks.py index 30799d9..68d100a 100644 --- a/tasks.py +++ b/tasks.py @@ -120,7 +120,7 @@ def rotcen_test2( filter_name="None", ): print("beginning of rotcen2") - s = [1, data.shape[0], data.shape[1]] + s = [1, img_tomo.shape[0], img_tomo.shape[1]] if atten is not None: ref_ang = atten[:, 0] From c1235e21ea07ac55011d7983c8383b8761ffb982 Mon Sep 17 00:00:00 2001 From: Jun Aishima Date: Fri, 29 Jul 2022 14:53:27 -0400 Subject: [PATCH 13/13] fix E741 by renaming * use "ell" instead of the lowercase letter "l" to prevent possible mis-reading (as capital I, for instance) --- export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/export.py b/export.py index 3daa4b3..edeb9fc 100644 --- a/export.py +++ b/export.py @@ -102,7 +102,7 @@ def bin_ndarray(ndarray, new_shape=None, operation="mean"): if ndarray.ndim != len(new_shape): raise ValueError("Shape mismatch: {} -> {}".format(ndarray.shape, new_shape)) compression_pairs = [(d, c // d) for d, c in zip(new_shape, ndarray.shape)] - flattened = [l for p in compression_pairs for l in p] + flattened = [ell for p in compression_pairs for ell in p] ndarray = ndarray.reshape(flattened) for i in range(len(new_shape)): op = getattr(ndarray, operation)