-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcell_metrics.py
142 lines (121 loc) · 6.73 KB
/
cell_metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
"""
This module contains the functions required to create the
cell metrics file to your local computer. It will use the DANDI
dataset that you have downloaded and run analyses and will save them to a
.csv file titled "cellmetricsA37.csv"
Creating this file is a prerequisite for downstream analysis.
@Author: Selen Calgin
@Date created: 26/08/2024
@Last updated: 14/09/2024
"""
import numpy as np
import pandas as pd
import os
import pynapple as nap
import scipy
import matplotlib.pyplot as plt
import configparser
from functions import *
import seaborn as sns
from angular_tuning_curves import *
# set up configuration
data_dir, results_dir, cell_metrics_dir, cell_metrics_path = config()
def create_cell_metrics():
try:
# Check if the file exists
cell_metrics = load_cell_metrics(path=cell_metrics_path)
if cell_metrics.empty:
cell_metrics_data = [] # will hold data
global_cell_count = 1
sessions = get_sessions()
# Iterate through sessions, load data, and analyze data
for session in sessions:
print(session) # for debugging and tracking
# Load current session
data = load_data_DANDI_postsub(session, remove_noise=False, lazy_loading=False)
## This block cleans up the data to the epoch of interest
## change based on your needs, the rest of the analysis is based on this final epoch
# select the desired epoch, data has wake_square and wake_triangle
desired_epoch = 'wake_square'
epoch = data['epochs'][desired_epoch]
# restrict angle (head direction) to the epoch
angle = data['head-direction'].restrict(epoch)
# we also have to restrict all the time series to the time of the angles (because the Motive software/Optptrack was turned on after the start of the electrophysiology recording)
epoch2 = nap.IntervalSet(start=angle.index[0], end=angle.index[-1])
#restrict units and behavioral data to the epoch
units = data['units'].restrict(epoch2)
angle = angle.restrict(epoch2)
position = data['position'].restrict(epoch2)
speed = calculate_speed(position)
# Further restrict epoch by high speed
desired_speed_threshold = 3
high_speed_ep = speed.threshold(desired_speed_threshold, 'above').time_support
epoch3 = epoch2.intersect(high_speed_ep)
units = units.restrict(epoch3)
angle = angle.restrict(epoch3)
position = position.restrict(epoch3)
speed = speed.restrict(epoch3)
# further restrict epoch by removing time support where angle has nans (Adrian says it's normal to have NAN angles but pynapple yet doesn't support nan angles)
epoch4 = remove_na(epoch3,angle)
units = units.restrict(epoch4)
angle = angle.restrict(epoch4)
position = position.restrict(epoch4)
speed = speed.restrict(epoch4)
final_desired_epoch = epoch4
## End of block
# Get cell types
cell_type_labels = get_cell_types_from_DANDI(units)
# Compute tuning curves
tuning_curves = compute_angular_tuning_curves(units,angle)
# Compute control tuning curves
angle_reversed = time_reverse_feature(angle)
tuning_curves_control = compute_angular_tuning_curves(units,angle_reversed)
# compute tuning curves for two halves of the session
tc_half1, tc_half2 = compute_split_angular_tuning_curves(units,angle,final_desired_epoch)
tc_half1_control, tc_half2_control = compute_split_angular_tuning_curves(units,angle_reversed,final_desired_epoch)
# Compute HD info
hd_info = nap.compute_1d_mutual_info(tuning_curves, angle, minmax=(0, 2 * np.pi))
hd_info_control = nap.compute_1d_mutual_info(tuning_curves_control, angle_reversed, minmax=(0, 2 * np.pi))
#hd_info = compute_hd_info(data, tc, control=False)
#hd_info_control = compute_hd_info(data, tc_control, control=True)
# Compute split tuning curve correlations
tc_correlations = compute_tuning_curve_correlations(tc_half1, tc_half2)
tc_correlations_control = compute_tuning_curve_correlations(tc_half1_control, tc_half2_control)
# Get rates
#TODO: make open field and wake epoch consistent
average_rate = units['rate']
wake_explore = units.restrict(get_open_field_ep(data))['rate']
# Get trough to peak
trough_to_peak = units['trough_to_peak']
for unit in units:
cellID = unit+1
# Add all information of one neuron to one row of information
cell_metrics_data.append({
'sessionName': session,
'cell': global_cell_count, # Global count of all cells
'cellID': cellID, # Local count within the session
'firingRate': average_rate[unit],
'firingRateExplore': wake_explore[unit],
'troughtoPeak': trough_to_peak[unit],
'ex': cell_type_labels['ex'][unit],
'hd': cell_type_labels['hd'][unit],
'fs': cell_type_labels['fs'][unit],
'nhd': cell_type_labels['nhd'][unit],
'other': cell_type_labels['other'][unit],
'hdInfo': hd_info.values.flatten()[unit],
'hdInfo_rev': hd_info_control.values.flatten()[unit],
'pearsonR': tc_correlations['pearsonR'][unit],
'pearsonR_rev': tc_correlations_control['pearsonR'][unit]
})
# Increment global cell counter
global_cell_count +=1
# Create DataFrame from collected cell metrics data
cell_metrics_df = pd.DataFrame(cell_metrics_data)
# Save the DataFrame to CSV
cell_metrics_df.to_csv(cell_metrics_path)
print(f"Cell metrics file created at {cell_metrics_path}")
else:
print(f"Cell metrics file already exists at {cell_metrics_path}")
except Exception as e:
# Handle any exceptions during file creation or writing
print(f"An error occurred while creating the cell metrics file: {str(e)}")