Skip to content
Snippets Groups Projects
Commit bd773b48 authored by tomrink's avatar tomrink
Browse files

minor...

parent 665dd1e9
No related branches found
No related tags found
No related merge requests found
......@@ -1717,26 +1717,62 @@ def split_data(times):
[get_timestamp('2022-01-01_00:00'), get_timestamp('2022-01-07_23:59')],
[get_timestamp('2022-03-01_00:00'), get_timestamp('2022-03-07_23:59')]]
test_time_idxs = []
keep_out = 10800 # 3 hrs
vld_time_idxs = []
for t_rng in time_ranges:
t_rng[0] -= keep_out
t_rng[1] += keep_out
tidxs = np.searchsorted(times, t_rng)
vld_time_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
vld_time_idxs = np.concatenate(vld_time_idxs, axis=None)
# train_time_idxs = time_idxs[np.in1d(time_idxs, vld_time_idxs, invert=True)]
# Save this just in case.
# # Keep out
# out_idxs = []
# for k, t_rng in enumerate(time_ranges):
# t_a = time_ranges[k][0]
# t_b = time_ranges[k][1]
# tidxs = np.searchsorted(times, [t_a - 10800, t_a])
# out_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
# tidxs = np.searchsorted(times, [t_b, t_b + 10800])
# out_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
# out_idxs = np.concatenate(out_idxs, axis=None)
# train_time_idxs = train_time_idxs[np.in1d(train_time_idxs, out_idxs, invert=True)]
time_ranges = [[get_timestamp('2018-02-01_00:00'), get_timestamp('2018-02-05_23:59')],
[get_timestamp('2018-04-01_00:00'), get_timestamp('2018-04-05_23:59')],
[get_timestamp('2018-06-01_00:00'), get_timestamp('2018-06-05_23:59')],
[get_timestamp('2018-08-01_00:00'), get_timestamp('2018-08-05_23:59')],
[get_timestamp('2018-10-01_00:00'), get_timestamp('2018-10-05_23:59')],
[get_timestamp('2018-12-01_00:00'), get_timestamp('2018-12-05_23:59')],
[get_timestamp('2019-02-01_00:00'), get_timestamp('2019-02-05_23:59')],
[get_timestamp('2019-04-01_00:00'), get_timestamp('2019-04-05_23:59')],
[get_timestamp('2019-06-01_00:00'), get_timestamp('2019-06-05_23:59')],
[get_timestamp('2019-08-01_00:00'), get_timestamp('2019-08-05_23:59')],
[get_timestamp('2019-10-01_00:00'), get_timestamp('2019-10-05_23:59')],
[get_timestamp('2019-12-01_00:00'), get_timestamp('2019-12-05_23:59')],
[get_timestamp('2021-10-05_00:00'), get_timestamp('2021-10-10_23:59')],
[get_timestamp('2021-12-01_00:00'), get_timestamp('2021-12-05_23:59')],
[get_timestamp('2022-02-01_00:00'), get_timestamp('2022-02-05_23:59')],
[get_timestamp('2022-03-25_00:00'), get_timestamp('2022-03-30_23:59')]]
tst_time_idxs = []
for t_rng in time_ranges:
t_rng[0] -= keep_out
t_rng[1] += keep_out
tidxs = np.searchsorted(times, t_rng)
test_time_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
test_time_idxs = np.concatenate(test_time_idxs, axis=None)
train_time_idxs = time_idxs[np.in1d(time_idxs, test_time_idxs, invert=True)]
# Keep out
out_idxs = []
for k, t_rng in enumerate(time_ranges):
t_a = time_ranges[k][0]
t_b = time_ranges[k][1]
tidxs = np.searchsorted(times, [t_a - 10800, t_a])
out_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
tidxs = np.searchsorted(times, [t_b, t_b + 10800])
out_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
out_idxs = np.concatenate(out_idxs, axis=None)
train_time_idxs = train_time_idxs[np.in1d(train_time_idxs, out_idxs, invert=True)]
return train_time_idxs, test_time_idxs
tst_time_idxs.append(np.arange(tidxs[0], tidxs[1], 1))
tst_time_idxs = np.concatenate(tst_time_idxs, axis=None)
vld_tst_time_idxs = np.concatenate([vld_time_idxs, tst_time_idxs])
vld_tst_time_idxs = np.sort(vld_tst_time_idxs)
train_time_idxs = time_idxs[np.in1d(time_idxs, vld_tst_time_idxs, invert=True)]
return train_time_idxs, vld_time_idxs, tst_time_idxs
def normalize(data, param, mean_std_dict, add_noise=False, noise_scale=1.0, seed=None):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment