From 5731b640352f1ab535cd344ea325c3a466571164 Mon Sep 17 00:00:00 2001 From: Jacob Date: Wed, 3 May 2023 09:33:47 +0200 Subject: [PATCH 1/2] rebased --- clean_python/analysis.py | 194 +++++++++++++++++++++++++++++-------- clean_python/ditact_pic.py | 55 +++++++---- clean_python/ising.py | 81 ++++++++++++++++ 3 files changed, 269 insertions(+), 61 deletions(-) create mode 100644 clean_python/ising.py diff --git a/clean_python/analysis.py b/clean_python/analysis.py index 4bb1b08..3c4bda2 100644 --- a/clean_python/analysis.py +++ b/clean_python/analysis.py @@ -1,17 +1,87 @@ +from ditact_pic import plot +from spin_image import SpinImage, FFT import sys import numpy as np import matplotlib.pyplot as plt import glob import scipy.interpolate as ip -plt.style.use(["style", "colors", "one_column"]) from spin_image import SpinImage, FFT from ditact_pic import plot +from lattices import VO2_Lattice +plt.style.use(["style", "colors", "one_column"]) + def check_percentage(p1, p2): plt.figure() plt.plot(p1, p2) +def average_mean(arr, window_size=20): + arr_sum = np.cumsum(arr) + arr = (arr_sum[window_size:] - arr_sum[:-window_size]) / window_size + return arr + + +def new_merge(files): + wp = [] + op = [] + spot_1 = [] + spot_2 = [] + spot_3 = [] + plt.figure() + for file in files: + print(file) + data = np.load(file, allow_pickle=True) + old_percentage = data["percentage"] + w_percentage = data["w_percentage"] + wp.append(w_percentage) + op.append(old_percentage) + # check_percentage(old_percentage, w_percentage) + out = [] + for o in ["out_1", "out_2", "out_3", "out_4"]: + out.append(np.array(data[o])) + print(out) + out = np.array(out)[:, :, 0] + + spot_1.append(out[0, :]) + spot_2.append(out[3, :]) + spot_3.append(out[2, :]) + wp = np.concatenate(wp, axis=0) + op = np.concatenate(op, axis=0) + spot_1 = np.concatenate(spot_1, axis=0) + spot_2 = np.concatenate(spot_2, axis=0) + spot_3 = np.concatenate(spot_3, axis=0) + + arg_sort = np.argsort(op) + wp = wp[arg_sort] + op = op[arg_sort] + spot_1 = spot_1[arg_sort] + spot_2 = spot_2[arg_sort] + spot_3 = spot_3[arg_sort] + + win = 100 + wp = average_mean(wp, win) + op = average_mean(op, win) + spot_1 = average_mean(spot_1, win) + spot_2 = average_mean(spot_2, win) + spot_3 = average_mean(spot_3, win) + + x = op + plt.plot(x, spot_1, "r.") + plt.plot(x, spot_2, "g.") + plt.plot(x, spot_3, "b.") + + ma = np.max(spot_1+spot_2+spot_3) + spot_1 /= ma + spot_2 /= ma + spot_3 /= ma + + print("debug....") + print(wp.shape) + plt.savefig("debug.png") + return op, np.stack([spot_2, spot_1, spot_3]) + + def merge(files): merge = [] plt.figure() @@ -32,17 +102,18 @@ def merge(files): out = out / summe merge.append(out) - plt.plot(out[0, :], "r") - plt.plot(out[3, :], "b") - plt.plot(out[2, :], "g") + plt.plot(w_percentage, out[0, :], "r.") + plt.plot(w_percentage, out[3, :], "b.") + plt.plot(w_percentage, out[2, :], "g.") all = sum(merge) summe = np.max(np.sum(all, axis=0)) all = all / summe - plt.plot(all[0, :], "k") - plt.plot(all[3, :], "k") - plt.plot(all[2, :], "k") + # plt.plot(all[0, :], "k") + # plt.plot(all[3, :], "k") + # plt.plot(all[2, :], "k") + plt.savefig("debug.png") percentage = 1-percentage return percentage, all @@ -58,18 +129,23 @@ def debug(percentage, out): def stacked_plot(ax, percentage, out, title=""): - stacks = ax.stackplot(percentage, out[[0, 3, 1, 2]], colors=[ - "w"], ls="solid", ec="k") - hatches = ["/", "", "\\", "\\"] - for stack, hatch in zip(stacks, hatches): + stacks = ax.stackplot(percentage, out[[0, 1, 2]], colors=[ + "w"], ls=(0, (0, 1)), ec="w") + hatches = ["//", "|", "\\\\"] + for stack, hatch, color in zip(stacks, hatches, ["C1", "C0", "C2"]): stack.set_hatch(hatch) + stack.set_edgecolor(color) ax.set_xlabel("Metallic Phase (%)") ax.set_ylabel("normalized Intensity ") ax.set_ylim([0.4, 1]) ax.set_xlim([0., 1]) - ax.text(0.1, 0.9, "monoclinic", backgroundcolor="w") - ax.text(0.6, 0.5, "rutile", backgroundcolor="w") - ax.text(0.35, 0.75, "diffusive", backgroundcolor="w") + ax.text(0.1, 0.9, "monoclinic", backgroundcolor="w", + bbox=dict(boxstyle='square,pad=0.0', ec="None", fc="w")) + ax.text(0.6, 0.5, "rutile", backgroundcolor="w", + bbox=dict(boxstyle='square,pad=0.0', ec="None", fc="w")) + ax.text(0.35, 0.73, "diffusive", backgroundcolor="w", + bbox=dict(boxstyle='square,pad=0.0', ec="None", fc="w")) + ax.stackplot(percentage, out[[0, 1, 2]], colors=["None"], ec="k") def time_scale(ax, p, o): @@ -86,21 +162,23 @@ def time_scale(ax, p, o): cs_rut = ip.interp1d(p[::-1], rut_perc[::-1]) cs_mono = ip.interp1d(p[::-1], mono_perc[::-1]) - #plt.figure() - #ph = np.linspace(0.01, 0.99, 100) - #plt.plot(ph, cs_rut(ph)) - #plt.plot(ph, cs_mono(ph)) + # plt.figure() + # ph = np.linspace(0.01, 0.99, 100) + # plt.plot(ph, cs_rut(ph)) + # plt.plot(ph, cs_mono(ph)) time = np.linspace(0.01, 3, 1000) - phy_phase = np.exp(-time) + phy_phase = 1-np.exp(-time) rut_phase = cs_rut(phy_phase) mono_phase = cs_mono(phy_phase) ax.plot(time, phy_phase, "k:", label="physical") - ax.plot(time, rut_phase, label="rutile") - ax.plot(time, mono_phase, label="monoclinic") + ax.plot(time, rut_phase, label="rutile", color="C1") + ax.plot(time, mono_phase, label="monoclinic", color="C2") ax.set_xlabel("time (a.u.)") ax.set_ylabel("Metallic Phase (%)") + ax.set_xlim([0, 3]) + ax.set_ylim([0, 1]) ax.legend() @@ -114,25 +192,61 @@ def intens(ax, file, p, o): intens = FFT() intens.load(file) plot(intens, ax) - ax.set_xlim([-.8,0.8]) - ax.set_ylim([-.8,1.6]) - axins = ax.inset_axes([0.0, 0.5, 0.47, 0.5]) - axins.plot(p, o[0], label="rut") - axins.plot(p, o[3], label="diff") - axins.plot(p, o[2], label="mono") - axins.legend(loc='lower left', bbox_to_anchor=(1, 0.5)) - #axins.get_yaxis().set_visible(False) - axins.yaxis.tick_right() - axins.set_yticks([0,0.5] -if __name__ == "__main__": - p, o = merge(sys.argv[2:]) - np.savez("merged.npz", p=p, o=o) - # eval_data_print(f) + ax.set_xlim([-1, 1]) + ax.set_ylim([-.9, .9]) + ax.axis("off") - fig, axs = plt.subplots(1,3) - fig.set_figheight(3) - stacked_plot(axs[1],p, o) - time_scale(axs[2],p, o) - intens(axs[0], sys.argv[1], p ,o) + # rect = plt.Rectangle((-1, -.8), 2, 1.6, facecolor="None", hatch="//") + # ax.add_patch(rect) + lat = VO2_Lattice(20, 20) + reci = lat.get_spots() + print(reci) + size = (intens.freqx[1] - intens.freqx[0]) * 20 + size2 = size/2 + # big_rect = plt.Rectangle((-10, -10), 20, 20, fc="None", ec="k", hatch="//") + # ax.add_patch(big_rect) + for x, y in zip(reci[0][0], reci[0][1]): + if x < 1 and x > -1: + if y < 1 and y > -1: + print(x, y) + rect = plt.Rectangle((-y-size2, x-size2), + size, size, fc="C1", ec="k", alpha=0.5) + # big_rect.set_clip_path(rect) + ax.add_patch(rect) + for x, y in zip(reci[1][0], reci[1][1]): + if x < 1 and x > -1: + if y < 1 and y > -1: + print(x, y) + rect = plt.Rectangle((-y-size2, x-size2), + size, size, fc="C2", ec="k", alpha=0.5) + ax.add_patch(rect) + + axins = ax.inset_axes([0.0, 0.0, 0.5, 0.5]) + axins.plot(p, o[0], label="rut.", color="C1") + axins.plot(p, o[2], label="mono.", color="C2") + axins.plot(p, o[1], label="diff.", color="C0") + axins.legend(loc='center left', bbox_to_anchor=(1, 0.5)) + axins.set_xlim([0, 1]) + axins.set_ylim([0, 1]) + axins.set_xlabel("phase (%)") + axins.set_ylabel("signal", labelpad=-5) + + # axins.get_yaxis().set_visible(False) + # axins.yaxis.tick_right() + axins.set_yticks([0, 1]) + + +if __name__ == "__main__": + p, o = new_merge(sys.argv[2:]) + np.savez("merged.npz", p=p, o=o) + + fig, axs = plt.subplots(1, 3) + fig.set_figheight(2) + stacked_plot(axs[1], p, o) + time_scale(axs[2], p, o) + if "intens" in sys.argv[1]: + intens(axs[0], sys.argv[1], p, o) plt.tight_layout() + plt.savefig("analysis.pdf") + plt.savefig("analysis.png") plt.show() diff --git a/clean_python/ditact_pic.py b/clean_python/ditact_pic.py index 92754b2..8425a83 100644 --- a/clean_python/ditact_pic.py +++ b/clean_python/ditact_pic.py @@ -6,7 +6,7 @@ import numpy as np import matplotlib.pyplot as plt if __name__ == "__main__": - plt.style.use(["style", "colors","two_column"]) + plt.style.use(["style", "colors", "two_column"]) def simulate(): @@ -47,18 +47,28 @@ def plot(fft, ax): fft.intens, extent=fft.extents(), norm=matplotlib.colors.LogNorm(vmin=1e-10, vmax=1), - #norm=matplotlib.colors.Normalize(vmax=1, vmin=1e-10), + # norm=matplotlib.colors.Normalize(vmax=1, vmin=1e-10), cmap="magma", origin="lower" ) + def norm(*intenss): max = 1e-10 for intens in intenss: m = np.max(intens.intens) - max = np.maximum(max,m) + max = np.maximum(max, m) return max + +def norm(*intenss): + max = 1e-10 + for intens in intenss: + m = np.max(intens.intens) + max = np.maximum(max, m) + return max + + def plot_all(intens_rutile, intens_mono, intens_mixed): fig, axs = plt.subplots(3, 2) fig.set_figheight(5.2) @@ -75,19 +85,22 @@ def plot_all(intens_rutile, intens_mono, intens_mixed): h_shift = 2*y_shift l_shift = 0.108 big_shift = l_shift + h_shift - c = plt.Circle((-l_shift, y_shift), radius=0.07, - label='patch', fill=False, ec="w", ls=":") - ax.add_patch(c) - c = plt.Circle((l_shift, -y_shift), radius=0.07, - label='patch', fill=False, ec="w", ls=":") - ax.add_patch(c) + # c = plt.Circle((-l_shift, y_shift), radius=0.07, + # label='patch', fill=False, ec="w", ls=":") + # ax.add_patch(c) + # c = plt.Circle((l_shift, -y_shift), radius=0.07, + # label='patch', fill=False, ec="w", ls=":") + # ax.add_patch(c) - c = plt.Circle((-h_shift, -y_shift), radius=0.07, - label='patch', fill=False, ec="w", ls=":") - ax.add_patch(c) - c = plt.Circle((h_shift, y_shift), radius=0.07, - label='patch', fill=False, ec="w", ls=":") - ax.add_patch(c) + # c = plt.Circle((-h_shift, -y_shift), radius=0.07, + # label='patch', fill=False, ec="w", ls=":") + # ax.add_patch(c) + # c = plt.Circle((h_shift, y_shift), radius=0.07, + # label='patch', fill=False, ec="w", ls=":") + # ax.add_patch(c) + + ax.annotate("", (-l_shift, y_shift), (0, 0), + arrowprops=dict(ec='w', facecolor='white', width=.8, headwidth=3, headlength=5)) ax = axs[2] cmap = plot(intens_mixed, ax) @@ -98,11 +111,11 @@ def plot_all(intens_rutile, intens_mono, intens_mixed): ax.set_xlim(-cut_off, cut_off) ax.set_ylim(-cut_off, cut_off) plt.tight_layout() - fig.subplots_adjust(bottom=0.1,right=0.95,left=0.15,wspace=0.) + fig.subplots_adjust(bottom=0.1, right=0.95, left=0.15, wspace=0.) cbar_ax = fig.add_axes([0.55, 0.07, 0.4, 0.015]) - cbar = fig.colorbar(cmap, cax=cbar_ax, orientation="horizontal", ticks=[1e-10, 1e-5, 1e0]) - #cbar.ax.set_xticklabels(['Low', 'Medium', 'High']) - + cbar = fig.colorbar(cmap, cax=cbar_ax, + orientation="horizontal", ticks=[1e-10, 1e-5, 1e0]) + # cbar.ax.set_xticklabels(['Low', 'Medium', 'High']) fig.savefig("erklaerbaer.pdf") fig.savefig("erklaerbaer.png") @@ -122,10 +135,10 @@ def load(): if __name__ == "__main__": np.random.seed(1234) - #simulate() + # simulate() # np.savez("intens.npz", r=r, mo=mo, mi=mi) r, mo, mi = load() - max = norm(r,mo,mi) + max = norm(r, mo, mi) r.intens = r.intens/max mo.intens = mo.intens/max mi.intens = mi.intens/max diff --git a/clean_python/ising.py b/clean_python/ising.py new file mode 100644 index 0000000..5b62dbb --- /dev/null +++ b/clean_python/ising.py @@ -0,0 +1,81 @@ +import logging +import scipy.fftpack as sfft +from plotter import Plotter +from scipy import signal +from cache import timeit +from extractors import Rect_Evaluator +import tqdm +from lattices import SCC_Lattice, VO2_Lattice, VO2_New +import sys +from spin_image import SpinImage +import numpy as np +import matplotlib.pyplot as plt +plt.style.use(["style", "colors", "two_column"]) +logger = logging.getLogger('fft') +# logger.setLevel(logging.DEBUG) +ch = logging.StreamHandler() +ch.setLevel(logging.DEBUG) +formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s') +ch.setFormatter(formatter) +logger.addHandler(ch) + + +def ising(file, num): + LEN = 60 + #lat = VO2_New(LEN, LEN) + lat = VO2_New(LEN, LEN) + rect = Rect_Evaluator(lat.get_spots()) + + out_rect = [[] for x in range(4)] + percentage = [] + weighted_percentage = [] + si = SpinImage(lat.get_phases()) + already_inited = False + + spins = np.load(file)["s1"] + spins[spins==-1] = 0 + + for i in tqdm.tqdm(range(1000)): + + (ix,iy) = np.random.randint(2000-LEN-LEN,size=2) + maske = spins[ix:ix+2*LEN, iy:iy+2*LEN] + + si.apply_mask(lat.parse_mask(maske)) + si.gaussian(20) + + intens = si.fft() + if not already_inited: + rect.generate_mask(intens, merge=True) + already_inited = True + + ir, vr = rect.extract(intens) + for lis, val in zip(out_rect, vr): + lis.append(val) + percentage.append(np.sum(maske)) + [p1, p2] = si.get_intens(lat.parse_mask(maske)) + weighted_percentage.append(p1/(p1+p2)) + + percentage = np.array(percentage) + weighted_percentage = np.array(weighted_percentage) + percentage /= np.max(percentage) + + np.savez(f"ising_rect_{num}.npz", + w_percentage=weighted_percentage, percentage=percentage, out_1=out_rect[0], + out_2=out_rect[1], out_3=out_rect[2], out_4=out_rect[3]) + +def runner(file, idx): + np.random.seed(1234) + print(f"runnig: {file}") + ising(file,idx) + + +if __name__ == "__main__": + files = sys.argv[2:] + idx = int(sys.argv[1]) + print(f"{idx}/{len(files)}") + if idx > len(files): + exit() + if idx < 1: + exit() + runner(files[idx-1], idx) From da394e3bda9570f7e3a4b56845f0a72cbe2fc76d Mon Sep 17 00:00:00 2001 From: Jacob Date: Wed, 3 May 2023 09:48:15 +0200 Subject: [PATCH 2/2] cleanup done --- crystal_V.xyz | 24 - crystal_maker.xyz | 42 - fft_1d.py | 202 ---- imgs/erklaerbaer.svg | 1293 ++++++++++++++++++++++ imgs/ref_imgs.svg | 189 ++++ {clean_python => software}/analysis.py | 0 {clean_python => software}/cache.py | 0 {clean_python => software}/ditact_pic.py | 0 {clean_python => software}/extractors.py | 0 {clean_python => software}/ising.py | 0 {clean_python => software}/lattices.py | 0 {clean_python => software}/main.py | 0 {clean_python => software}/mypy.conf | 0 {clean_python => software}/plotter.py | 0 {clean_python => software}/spin_image.py | 0 {clean_python => software}/test.py | 0 {clean_python => software}/tools.py | 0 test_fft.py | 137 --- 18 files changed, 1482 insertions(+), 405 deletions(-) delete mode 100644 crystal_V.xyz delete mode 100644 crystal_maker.xyz delete mode 100644 fft_1d.py create mode 100644 imgs/erklaerbaer.svg create mode 100644 imgs/ref_imgs.svg rename {clean_python => software}/analysis.py (100%) rename {clean_python => software}/cache.py (100%) rename {clean_python => software}/ditact_pic.py (100%) rename {clean_python => software}/extractors.py (100%) rename {clean_python => software}/ising.py (100%) rename {clean_python => software}/lattices.py (100%) rename {clean_python => software}/main.py (100%) rename {clean_python => software}/mypy.conf (100%) rename {clean_python => software}/plotter.py (100%) rename {clean_python => software}/spin_image.py (100%) rename {clean_python => software}/test.py (100%) rename {clean_python => software}/tools.py (100%) delete mode 100644 test_fft.py diff --git a/crystal_V.xyz b/crystal_V.xyz deleted file mode 100644 index 682fb25..0000000 --- a/crystal_V.xyz +++ /dev/null @@ -1,24 +0,0 @@ --1.317409 0.112925 -0.113205 --1.317410 4.629925 -0.113205 -9.907514 -0.112924 4.641385 -9.907513 4.404076 4.641385 -1.317409 -0.112925 0.113205 --1.578486 -0.112924 4.641385 -4.425591 0.112925 -0.113205 -4.425590 4.629925 -0.113205 -7.060409 -0.112925 0.113205 -4.164514 -0.112924 4.641385 -10.168591 0.112925 -0.113205 -10.168590 4.629925 -0.113205 -1.317408 4.404075 0.113205 -2.977644 2.145576 2.150886 -1.529697 0.112926 4.414976 --0.130539 2.371426 2.377295 --1.578487 4.404076 4.641385 -1.529696 4.629926 4.414976 -7.060408 4.404075 0.113205 -8.720644 2.145576 2.150886 -7.272697 0.112926 4.414976 -5.612461 2.371426 2.377295 -4.164513 4.404076 4.641385 -7.272696 4.629926 4.414976 diff --git a/crystal_maker.xyz b/crystal_maker.xyz deleted file mode 100644 index 64d1742..0000000 --- a/crystal_maker.xyz +++ /dev/null @@ -1,42 +0,0 @@ -V -1.317409 0.112925 -0.113205 -O -1.443069 3.207070 1.358454 -O -2.891016 3.568431 3.622545 -V -1.317410 4.629925 -0.113205 -V 9.907514 -0.112924 4.641385 -O 11.481121 0.948570 0.905636 -O 10.033174 1.309931 3.169726 -V 9.907513 4.404076 4.641385 -V 1.317409 -0.112925 0.113205 -V -1.578486 -0.112924 4.641385 -V 4.425591 0.112925 -0.113205 -V 4.425590 4.629925 -0.113205 -V 7.060409 -0.112925 0.113205 -V 4.164514 -0.112924 4.641385 -V 10.168591 0.112925 -0.113205 -V 10.168590 4.629925 -0.113205 -O -0.004879 0.948570 0.905636 -O 4.299931 3.207070 1.358454 -O 2.851984 3.568431 3.622545 -O 1.399960 3.116730 1.313172 -O 2.895092 0.858230 0.950918 -O 1.447145 1.400271 3.215008 -O -0.047988 3.658771 3.577263 -V 1.317408 4.404075 0.113205 -V 2.977644 2.145576 2.150886 -V 1.529697 0.112926 4.414976 -V -0.130539 2.371426 2.377295 -V -1.578487 4.404076 4.641385 -V 1.529696 4.629926 4.414976 -O 5.738121 0.948570 0.905636 -O 8.594984 3.568431 3.622545 -O 4.290174 1.309931 3.169726 -O 7.142960 3.116730 1.313172 -O 8.638092 0.858230 0.950918 -O 7.190145 1.400271 3.215008 -O 5.695012 3.658771 3.577263 -V 7.060408 4.404075 0.113205 -V 8.720644 2.145576 2.150886 -V 7.272697 0.112926 4.414976 -V 5.612461 2.371426 2.377295 -V 4.164513 4.404076 4.641385 -V 7.272696 4.629926 4.414976 diff --git a/fft_1d.py b/fft_1d.py deleted file mode 100644 index c5cf5be..0000000 --- a/fft_1d.py +++ /dev/null @@ -1,202 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt -import tqdm - -import multiprocessing as mp - -RESOLUTION = 0.01 -LENGTH = 500 - - -def generate_image_from_mask(mask: np.array): - pos_mono = np.arange(0, mask.size * 2.89 * 2, 2.89) - pos_mono[::2][mask] -= 0.27 - pos_mono[1::2][mask] += 0.27 - return pos_mono - - -def pad_zero(img, length): - pad = np.zeros(length) - img = np.append(img, pad) - img = np.append(pad, img) - return img - - -def image_from_pos(pos): - length = np.max(pos) + RESOLUTION - x = np.arange(0, length, RESOLUTION) # angstrom - y = np.zeros_like(x) - ind = np.searchsorted(x, pos) - y[ind] = 1 - return y - - -def beugung(y, resolution): - fft = np.fft.fft(y) - fft_clean = np.fft.fftshift(fft) - fft_freq = np.fft.fftfreq(y.size, resolution) - fft_freq_clean = np.fft.fftshift(fft_freq) - return fft_freq_clean, np.abs(fft_clean) ** 2 - - -def gaussian_convol(img): - sigma = 100 / RESOLUTION - mu = img.size/2 - x = np.arange(0, img.size) - gauss = 1/(sigma * np.sqrt(2 * np.pi)) * \ - np.exp(- (x - mu)**2 / (2 * sigma**2)) - - return img*gauss - - -def analyisis(mask): - pos_h = generate_image_from_mask(mask) - img = image_from_pos(pos_h) - img = gaussian_convol(img) - padded = pad_zero(img, int(100 / RESOLUTION)) - freq, intens = beugung(padded, RESOLUTION) - return freq, intens - - -def get_peaks(): - orders = np.arange(1, 2, 1) - orders = orders / 5.78 - return np.array(orders) - - -def eval_peaks(freq, fft): - orders = get_peaks() - ind = np.searchsorted(freq, orders) - return fft[ind] - - -def basic_test(): - mask_h = np.zeros(LENGTH).astype(bool) - mask_l = np.ones(LENGTH).astype(bool) - - mask_mixed = np.zeros(LENGTH).astype(bool) - ind = (np.random.rand(30) * (LENGTH - 1)).astype(int) - mask_mixed[ind] = True - - mask_ner = np.zeros(LENGTH).astype(bool) - ind = (np.random.rand(1) * (LENGTH - 31)).astype(int) - ind = np.arange(ind, ind+30).astype(int) - mask_ner[ind] = True - - fig, axs = plt.subplots(4, 1) - for mask, ax in zip([mask_h, mask_l, mask_mixed, mask_ner], axs): - freq, ffty = analyisis(mask) - ax.plot(freq, ffty) - for ax in axs: - ax.plot([1.0 / 5.78, 1.0 / 2.62, 1.0 / 3.16], [0, 0, 0], "kx") - ax.plot([2.0 / 5.78, 2.0 / 2.62, 2.0 / 3.16], [0, 0, 0], "rx") - ax.plot([3.0 / 5.78, 3.0 / 2.62, 3.0 / 3.16], [0, 0, 0], "bx") - ax.set_xlim(0, 3) - plt.show() - - -def norm(arr): - return arr/np.sum(arr) - - -def norm2(arr): - return arr - # return arr/np.max(arr) - - -def next_mask(mask): - prob = np.exp((np.roll(mask, 1)*1.0 + np.roll(mask, -1)) / .1) - prob[mask] = 0.0 - prob = norm(prob) - - ind = np.random.choice(LENGTH, p=prob) - mask[ind] = True - return mask - - -def random_loop(): - mask = np.zeros(LENGTH).astype(bool) - ind = np.arange(0, LENGTH) - np.random.shuffle(ind) - percentage = [] - peaks = [] - - masks = [] - for i in ind: - mask[i] = True - freq, fft = analyisis(mask) - peak = eval_peaks(freq, fft) - percentage.append(np.mean(mask)) - peaks.append(peak) - masks.append(mask.copy()) - masks = np.array(masks) - plt.figure() - plt.imshow(masks) - plt.plot([0, 500], [406, 406]) - print() - percentage = np.array(percentage) - peaks = np.array(peaks) - return percentage, peaks - - -def nearest_loop(): - mask = np.zeros(LENGTH).astype(bool) - percentage = [] - peaks = [] - for i in range(LENGTH): - mask = next_mask(mask) - freq, fft = analyisis(mask) - peak = eval_peaks(freq, fft) - percentage.append(np.mean(mask)) - peaks.append(peak) - percentage = np.array(percentage) - peaks = np.array(peaks) - return percentage, peaks - - -def random_helper(seed): - np.random.seed(seed) - #percentage_near, peaks_near = nearest_loop() - percentage_rand, peaks_rand = random_loop() - print("done") - return percentage_rand, peaks_rand - # for i in range(peaks_near.shape[1]): - # axs[2].plot(percentage_near, norm2( - # peaks_near[:, i]), "-", label="near") - - # for i in range(peaks_rand.shape[1]): - # axs[2].plot(percentage_rand, norm2( - # peaks_rand[:, i]), ":", label="rand") - - -def random_increase(): - fig, axs = plt.subplots(3, 1) - - results = [] - for i in np.arange(10): - results.append(random_helper(i)) - - for percentage_rand, peaks_rand in results: - for i in range(peaks_rand.shape[1]): - axs[2].plot(percentage_rand, norm2( - peaks_rand[:, i]), ":", label="rand") - - for ax in [axs[0], axs[1]]: - orders = get_peaks() - ax.plot(orders, np.zeros_like(orders), "kx") - ax.set_xlim(0, 3) - - mask_l = np.ones(LENGTH).astype(bool) - mask_h = np.zeros(LENGTH).astype(bool) - freq, ffty = analyisis(mask_l) - axs[0].plot(freq, ffty) - freq, ffty = analyisis(mask_h) - axs[1].plot(freq, ffty) - plt.xlabel("percentage") - plt.ylabel("peak intensity") - plt.show() - plt.legend() - - -if __name__ == "__main__": - random_increase() diff --git a/imgs/erklaerbaer.svg b/imgs/erklaerbaer.svg new file mode 100644 index 0000000..ff96aec --- /dev/null +++ b/imgs/erklaerbaer.svg @@ -0,0 +1,1293 @@ + + + + diff --git a/imgs/ref_imgs.svg b/imgs/ref_imgs.svg new file mode 100644 index 0000000..36e93a3 --- /dev/null +++ b/imgs/ref_imgs.svg @@ -0,0 +1,189 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/clean_python/analysis.py b/software/analysis.py similarity index 100% rename from clean_python/analysis.py rename to software/analysis.py diff --git a/clean_python/cache.py b/software/cache.py similarity index 100% rename from clean_python/cache.py rename to software/cache.py diff --git a/clean_python/ditact_pic.py b/software/ditact_pic.py similarity index 100% rename from clean_python/ditact_pic.py rename to software/ditact_pic.py diff --git a/clean_python/extractors.py b/software/extractors.py similarity index 100% rename from clean_python/extractors.py rename to software/extractors.py diff --git a/clean_python/ising.py b/software/ising.py similarity index 100% rename from clean_python/ising.py rename to software/ising.py diff --git a/clean_python/lattices.py b/software/lattices.py similarity index 100% rename from clean_python/lattices.py rename to software/lattices.py diff --git a/clean_python/main.py b/software/main.py similarity index 100% rename from clean_python/main.py rename to software/main.py diff --git a/clean_python/mypy.conf b/software/mypy.conf similarity index 100% rename from clean_python/mypy.conf rename to software/mypy.conf diff --git a/clean_python/plotter.py b/software/plotter.py similarity index 100% rename from clean_python/plotter.py rename to software/plotter.py diff --git a/clean_python/spin_image.py b/software/spin_image.py similarity index 100% rename from clean_python/spin_image.py rename to software/spin_image.py diff --git a/clean_python/test.py b/software/test.py similarity index 100% rename from clean_python/test.py rename to software/test.py diff --git a/clean_python/tools.py b/software/tools.py similarity index 100% rename from clean_python/tools.py rename to software/tools.py diff --git a/test_fft.py b/test_fft.py deleted file mode 100644 index 583d6ab..0000000 --- a/test_fft.py +++ /dev/null @@ -1,137 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt - - -def analysis(y, RESOLUTION): - fft = np.fft.fft(y) - fft_clean = np.fft.fftshift(fft) - fft_freq = np.fft.fftfreq(y.size, RESOLUTION) - fft_freq_clean = np.fft.fftshift(fft_freq) - - return fft_freq_clean, np.abs(fft_clean) ** 2 - - -def play_1d(): - RESOLUTION = 0.001 - LENGTH = 10000 - - x = np.arange(0, LENGTH, RESOLUTION) # angstrom - y = np.zeros_like(x) - - pos_mono = np.arange(0, x.size, 2890) - pos_mono = ( - pos_mono + np.random.normal(size=pos_mono.shape, loc=0, scale=10) - ).astype(int) - - pos_rut = np.arange(0, x.size, 5780) - - pos_rut = np.append(pos_rut, pos_rut - 3160) - - # pos_rut = (pos_rut + np.random.normal(size=pos_rut.shape, loc=0, scale=10)).astype(int) - - y[pos_rut] = 1 - y[pos_rut + 1] = 1 - y[pos_rut + 2] = 1 - - # y = np.sin(x) - - fig, axs = plt.subplots(3, 1) - ax = axs[0] - ax.plot(x, y) - - ax = axs[1] - fft_x, fft_y = analysis(y, RESOLUTION) - ax.plot(fft_x, fft_y) - ax.plot([1.0 / 5.78, 1.0 / 2.62, 1.0 / 3.16], [0, 0, 0], "kx") - ax.plot([2.0 / 5.78, 2.0 / 2.62, 2.0 / 3.16], [0, 0, 0], "rx") - ax.plot([3.0 / 5.78, 3.0 / 2.62, 3.0 / 3.16], [0, 0, 0], "bx") - ax.set_xlim(0, 3) - - -def from_mask(mask): - pos_mono = np.arange(0, mask.size * 2.89 * 2, 2.89) - - pos_mono[::2][mask] -= 0.27 - pos_mono[1::2][mask] += 0.27 - - return pos_mono - - -def image_from_pos(pos): - RESOLUTION = 0.001 - LENGTH = 1000000 - - x = np.arange(0, LENGTH, RESOLUTION) # angstrom - y = np.zeros_like(x) - ind = np.searchsorted(x, pos) - if np.any(ind > LENGTH): - print("overflow") - ind = ind[ind < LENGTH] - y[ind] = 1 - - sigma = 500 - mu = int(LENGTH / 2) - gaussian = ( - 1 - / (sigma * np.sqrt(2 * np.pi)) - * np.exp(-((x - mu) ** 2) / (2 * sigma * sigma)) - ) - # y = np.multiply(y, gaussian) - - return x, y - - -def plot_img(x, y, ax): - ax.plot(x, y) - - -if __name__ == "__main__": - RESOLUTION = 0.001 - print("Done") - - LENGTH = 1000 - mask_h = np.ones(LENGTH).astype(bool) - pos_h = from_mask(mask_h) - x, img_h = image_from_pos(pos_h) - fftx, ffty_h = analysis(img_h, RESOLUTION) - - mask_l = np.zeros(LENGTH).astype(bool) - pos_l = from_mask(mask_l) - x, img_l = image_from_pos(pos_l) - fftx, ffty_l = analysis(img_l, RESOLUTION) - print("Done") - - mask_mixed = np.zeros(LENGTH).astype(bool) - ind = (np.random.rand(400) * (LENGTH - 1)).astype(int) - mask_mixed[ind] = True - pos_mixed = from_mask(mask_mixed) - x, img_mixed = image_from_pos(pos_mixed) - fftx, ffty_mixed = analysis(img_mixed, RESOLUTION) - - print("Done") - mask_near = np.zeros(LENGTH).astype(bool) - ind = (np.random.rand(50) * (LENGTH - 1)).astype(int) - #for i in range(1, 8): - # ind = np.append(ind, ind+i) - print("Done") - - - mask_near[ind] = True - pos_near = from_mask(mask_near) - x, img_near = image_from_pos(pos_near) - fftx, ffty_near = analysis(img_near, RESOLUTION) - - fig, axs = plt.subplots(4, 1) - plot_img(fftx, ffty_h, axs[0]) - plot_img(fftx, ffty_l, axs[1]) - plot_img(fftx, ffty_mixed, axs[2]) - plot_img(fftx, ffty_near, axs[3]) - for ax in axs: - ax.plot([1.0 / 5.78, 1.0 / 2.62, 1.0 / 3.16], [0, 0, 0], "kx") - ax.plot([2.0 / 5.78, 2.0 / 2.62, 2.0 / 3.16], [0, 0, 0], "rx") - ax.plot([3.0 / 5.78, 3.0 / 2.62, 3.0 / 3.16], [0, 0, 0], "bx") - ax.set_xlim(0, 3) - # play_1d() - plt.show() - print("Done") - pass