id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
7,700
adamrehn/slidingwindow
slidingwindow/WindowDistance.py
generateDistanceMatrix
def generateDistanceMatrix(width, height): """ Generates a matrix specifying the distance of each point in a window to its centre. """ # Determine the coordinates of the exact centre of the window originX = width / 2 originY = height / 2 # Generate the distance matrix distances = zerosFactory((height,width), dtype=np.float) for index, val in np.ndenumerate(distances): y,x = index distances[(y,x)] = math.sqrt( math.pow(x - originX, 2) + math.pow(y - originY, 2) ) return distances
python
def generateDistanceMatrix(width, height): """ Generates a matrix specifying the distance of each point in a window to its centre. """ # Determine the coordinates of the exact centre of the window originX = width / 2 originY = height / 2 # Generate the distance matrix distances = zerosFactory((height,width), dtype=np.float) for index, val in np.ndenumerate(distances): y,x = index distances[(y,x)] = math.sqrt( math.pow(x - originX, 2) + math.pow(y - originY, 2) ) return distances
[ "def", "generateDistanceMatrix", "(", "width", ",", "height", ")", ":", "# Determine the coordinates of the exact centre of the window", "originX", "=", "width", "/", "2", "originY", "=", "height", "/", "2", "# Generate the distance matrix", "distances", "=", "zerosFactor...
Generates a matrix specifying the distance of each point in a window to its centre.
[ "Generates", "a", "matrix", "specifying", "the", "distance", "of", "each", "point", "in", "a", "window", "to", "its", "centre", "." ]
17ea9395b48671e8cb7321b9510c6b25fec5e45f
https://github.com/adamrehn/slidingwindow/blob/17ea9395b48671e8cb7321b9510c6b25fec5e45f/slidingwindow/WindowDistance.py#L5-L20
7,701
adamrehn/slidingwindow
slidingwindow/ArrayUtils.py
_requiredSize
def _requiredSize(shape, dtype): """ Determines the number of bytes required to store a NumPy array with the specified shape and datatype. """ return math.floor(np.prod(np.asarray(shape, dtype=np.uint64)) * np.dtype(dtype).itemsize)
python
def _requiredSize(shape, dtype): """ Determines the number of bytes required to store a NumPy array with the specified shape and datatype. """ return math.floor(np.prod(np.asarray(shape, dtype=np.uint64)) * np.dtype(dtype).itemsize)
[ "def", "_requiredSize", "(", "shape", ",", "dtype", ")", ":", "return", "math", ".", "floor", "(", "np", ".", "prod", "(", "np", ".", "asarray", "(", "shape", ",", "dtype", "=", "np", ".", "uint64", ")", ")", "*", "np", ".", "dtype", "(", "dtype"...
Determines the number of bytes required to store a NumPy array with the specified shape and datatype.
[ "Determines", "the", "number", "of", "bytes", "required", "to", "store", "a", "NumPy", "array", "with", "the", "specified", "shape", "and", "datatype", "." ]
17ea9395b48671e8cb7321b9510c6b25fec5e45f
https://github.com/adamrehn/slidingwindow/blob/17ea9395b48671e8cb7321b9510c6b25fec5e45f/slidingwindow/ArrayUtils.py#L5-L10
7,702
adamrehn/slidingwindow
slidingwindow/ArrayUtils.py
arrayFactory
def arrayFactory(shape, dtype=float): """ Creates a new ndarray of the specified shape and datatype, storing it in memory if there is sufficient available space or else using a memory-mapped temporary file to provide the underlying buffer. """ # Determine the number of bytes required to store the array requiredBytes = _requiredSize(shape, dtype) # Determine if there is sufficient available memory vmem = psutil.virtual_memory() if vmem.available > requiredBytes: return np.ndarray(shape=shape, dtype=dtype) else: return TempfileBackedArray(shape=shape, dtype=dtype)
python
def arrayFactory(shape, dtype=float): """ Creates a new ndarray of the specified shape and datatype, storing it in memory if there is sufficient available space or else using a memory-mapped temporary file to provide the underlying buffer. """ # Determine the number of bytes required to store the array requiredBytes = _requiredSize(shape, dtype) # Determine if there is sufficient available memory vmem = psutil.virtual_memory() if vmem.available > requiredBytes: return np.ndarray(shape=shape, dtype=dtype) else: return TempfileBackedArray(shape=shape, dtype=dtype)
[ "def", "arrayFactory", "(", "shape", ",", "dtype", "=", "float", ")", ":", "# Determine the number of bytes required to store the array", "requiredBytes", "=", "_requiredSize", "(", "shape", ",", "dtype", ")", "# Determine if there is sufficient available memory", "vmem", "...
Creates a new ndarray of the specified shape and datatype, storing it in memory if there is sufficient available space or else using a memory-mapped temporary file to provide the underlying buffer.
[ "Creates", "a", "new", "ndarray", "of", "the", "specified", "shape", "and", "datatype", "storing", "it", "in", "memory", "if", "there", "is", "sufficient", "available", "space", "or", "else", "using", "a", "memory", "-", "mapped", "temporary", "file", "to", ...
17ea9395b48671e8cb7321b9510c6b25fec5e45f
https://github.com/adamrehn/slidingwindow/blob/17ea9395b48671e8cb7321b9510c6b25fec5e45f/slidingwindow/ArrayUtils.py#L40-L55
7,703
adamrehn/slidingwindow
slidingwindow/ArrayUtils.py
arrayCast
def arrayCast(source, dtype): """ Casts a NumPy array to the specified datatype, storing the copy in memory if there is sufficient available space or else using a memory-mapped temporary file to provide the underlying buffer. """ # Determine the number of bytes required to store the array requiredBytes = _requiredSize(source.shape, dtype) # Determine if there is sufficient available memory vmem = psutil.virtual_memory() if vmem.available > requiredBytes: return source.astype(dtype, subok=False) else: dest = arrayFactory(source.shape, dtype) np.copyto(dest, source, casting='unsafe') return dest
python
def arrayCast(source, dtype): """ Casts a NumPy array to the specified datatype, storing the copy in memory if there is sufficient available space or else using a memory-mapped temporary file to provide the underlying buffer. """ # Determine the number of bytes required to store the array requiredBytes = _requiredSize(source.shape, dtype) # Determine if there is sufficient available memory vmem = psutil.virtual_memory() if vmem.available > requiredBytes: return source.astype(dtype, subok=False) else: dest = arrayFactory(source.shape, dtype) np.copyto(dest, source, casting='unsafe') return dest
[ "def", "arrayCast", "(", "source", ",", "dtype", ")", ":", "# Determine the number of bytes required to store the array", "requiredBytes", "=", "_requiredSize", "(", "source", ".", "shape", ",", "dtype", ")", "# Determine if there is sufficient available memory", "vmem", "=...
Casts a NumPy array to the specified datatype, storing the copy in memory if there is sufficient available space or else using a memory-mapped temporary file to provide the underlying buffer.
[ "Casts", "a", "NumPy", "array", "to", "the", "specified", "datatype", "storing", "the", "copy", "in", "memory", "if", "there", "is", "sufficient", "available", "space", "or", "else", "using", "a", "memory", "-", "mapped", "temporary", "file", "to", "provide"...
17ea9395b48671e8cb7321b9510c6b25fec5e45f
https://github.com/adamrehn/slidingwindow/blob/17ea9395b48671e8cb7321b9510c6b25fec5e45f/slidingwindow/ArrayUtils.py#L67-L84
7,704
adamrehn/slidingwindow
slidingwindow/ArrayUtils.py
determineMaxWindowSize
def determineMaxWindowSize(dtype, limit=None): """ Determines the largest square window size that can be used, based on the specified datatype and amount of currently available system memory. If `limit` is specified, then this value will be returned in the event that it is smaller than the maximum computed size. """ vmem = psutil.virtual_memory() maxSize = math.floor(math.sqrt(vmem.available / np.dtype(dtype).itemsize)) if limit is None or limit >= maxSize: return maxSize else: return limit
python
def determineMaxWindowSize(dtype, limit=None): """ Determines the largest square window size that can be used, based on the specified datatype and amount of currently available system memory. If `limit` is specified, then this value will be returned in the event that it is smaller than the maximum computed size. """ vmem = psutil.virtual_memory() maxSize = math.floor(math.sqrt(vmem.available / np.dtype(dtype).itemsize)) if limit is None or limit >= maxSize: return maxSize else: return limit
[ "def", "determineMaxWindowSize", "(", "dtype", ",", "limit", "=", "None", ")", ":", "vmem", "=", "psutil", ".", "virtual_memory", "(", ")", "maxSize", "=", "math", ".", "floor", "(", "math", ".", "sqrt", "(", "vmem", ".", "available", "/", "np", ".", ...
Determines the largest square window size that can be used, based on the specified datatype and amount of currently available system memory. If `limit` is specified, then this value will be returned in the event that it is smaller than the maximum computed size.
[ "Determines", "the", "largest", "square", "window", "size", "that", "can", "be", "used", "based", "on", "the", "specified", "datatype", "and", "amount", "of", "currently", "available", "system", "memory", ".", "If", "limit", "is", "specified", "then", "this", ...
17ea9395b48671e8cb7321b9510c6b25fec5e45f
https://github.com/adamrehn/slidingwindow/blob/17ea9395b48671e8cb7321b9510c6b25fec5e45f/slidingwindow/ArrayUtils.py#L87-L100
7,705
kwikteam/phy
phy/cluster/views/base.py
ManualClusteringView.set_state
def set_state(self, state): """Set the view state. The passed object is the persisted `self.state` bunch. May be overriden. """ for k, v in state.items(): setattr(self, k, v)
python
def set_state(self, state): """Set the view state. The passed object is the persisted `self.state` bunch. May be overriden. """ for k, v in state.items(): setattr(self, k, v)
[ "def", "set_state", "(", "self", ",", "state", ")", ":", "for", "k", ",", "v", "in", "state", ".", "items", "(", ")", ":", "setattr", "(", "self", ",", "k", ",", "v", ")" ]
Set the view state. The passed object is the persisted `self.state` bunch. May be overriden.
[ "Set", "the", "view", "state", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/base.py#L128-L137
7,706
kwikteam/phy
phy/utils/_misc.py
_fullname
def _fullname(o): """Return the fully-qualified name of a function.""" return o.__module__ + "." + o.__name__ if o.__module__ else o.__name__
python
def _fullname(o): """Return the fully-qualified name of a function.""" return o.__module__ + "." + o.__name__ if o.__module__ else o.__name__
[ "def", "_fullname", "(", "o", ")", ":", "return", "o", ".", "__module__", "+", "\".\"", "+", "o", ".", "__name__", "if", "o", ".", "__module__", "else", "o", ".", "__name__" ]
Return the fully-qualified name of a function.
[ "Return", "the", "fully", "-", "qualified", "name", "of", "a", "function", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/utils/_misc.py#L123-L125
7,707
kwikteam/phy
phy/cluster/views/feature.py
FeatureView._get_axis_label
def _get_axis_label(self, dim): """Return the channel id from a dimension, if applicable.""" if u(dim[:-1]).isdecimal(): n = len(self.channel_ids) return str(self.channel_ids[int(dim[:-1]) % n]) + dim[-1] else: return dim
python
def _get_axis_label(self, dim): """Return the channel id from a dimension, if applicable.""" if u(dim[:-1]).isdecimal(): n = len(self.channel_ids) return str(self.channel_ids[int(dim[:-1]) % n]) + dim[-1] else: return dim
[ "def", "_get_axis_label", "(", "self", ",", "dim", ")", ":", "if", "u", "(", "dim", "[", ":", "-", "1", "]", ")", ".", "isdecimal", "(", ")", ":", "n", "=", "len", "(", "self", ".", "channel_ids", ")", "return", "str", "(", "self", ".", "channe...
Return the channel id from a dimension, if applicable.
[ "Return", "the", "channel", "id", "from", "a", "dimension", "if", "applicable", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/feature.py#L132-L138
7,708
kwikteam/phy
phy/cluster/views/feature.py
FeatureView._get_axis_data
def _get_axis_data(self, bunch, dim, cluster_id=None, load_all=None): """Extract the points from the data on a given dimension. bunch is returned by the features() function. dim is the string specifying the dimensions to extract for the data. """ if dim in self.attributes: return self.attributes[dim](cluster_id, load_all=load_all) masks = bunch.get('masks', None) assert dim not in self.attributes # This is called only on PC data. s = 'ABCDEFGHIJ' # Channel relative index. c_rel = int(dim[:-1]) # Get the channel_id from the currently-selected channels. channel_id = self.channel_ids[c_rel % len(self.channel_ids)] # Skup the plot if the channel id is not displayed. if channel_id not in bunch.channel_ids: # pragma: no cover return None # Get the column index of the current channel in data. c = list(bunch.channel_ids).index(channel_id) # Principal component: A=0, B=1, etc. d = s.index(dim[-1]) if masks is not None: masks = masks[:, c] return Bunch(data=bunch.data[:, c, d], masks=masks, )
python
def _get_axis_data(self, bunch, dim, cluster_id=None, load_all=None): """Extract the points from the data on a given dimension. bunch is returned by the features() function. dim is the string specifying the dimensions to extract for the data. """ if dim in self.attributes: return self.attributes[dim](cluster_id, load_all=load_all) masks = bunch.get('masks', None) assert dim not in self.attributes # This is called only on PC data. s = 'ABCDEFGHIJ' # Channel relative index. c_rel = int(dim[:-1]) # Get the channel_id from the currently-selected channels. channel_id = self.channel_ids[c_rel % len(self.channel_ids)] # Skup the plot if the channel id is not displayed. if channel_id not in bunch.channel_ids: # pragma: no cover return None # Get the column index of the current channel in data. c = list(bunch.channel_ids).index(channel_id) # Principal component: A=0, B=1, etc. d = s.index(dim[-1]) if masks is not None: masks = masks[:, c] return Bunch(data=bunch.data[:, c, d], masks=masks, )
[ "def", "_get_axis_data", "(", "self", ",", "bunch", ",", "dim", ",", "cluster_id", "=", "None", ",", "load_all", "=", "None", ")", ":", "if", "dim", "in", "self", ".", "attributes", ":", "return", "self", ".", "attributes", "[", "dim", "]", "(", "clu...
Extract the points from the data on a given dimension. bunch is returned by the features() function. dim is the string specifying the dimensions to extract for the data.
[ "Extract", "the", "points", "from", "the", "data", "on", "a", "given", "dimension", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/feature.py#L140-L167
7,709
kwikteam/phy
phy/cluster/views/feature.py
FeatureView._plot_labels
def _plot_labels(self): """Plot feature labels along left and bottom edge of subplots""" # iterate simultaneously over kth row in left column and # kth column in bottom row: br = self.n_cols - 1 # bottom row for k in range(0, self.n_cols): dim_x, _ = self.grid_dim[0][k].split(',') _, dim_y = self.grid_dim[k][br].split(',') # Get the channel ids corresponding to the relative channel indices # specified in the dimensions. Channel 0 corresponds to the first # best channel for the selected cluster, and so on. dim_x = self._get_axis_label(dim_x) dim_y = self._get_axis_label(dim_y) # Left edge of left column of subplots. self[k, 0].text(pos=[-1., 0.], text=dim_y, anchor=[-1.03, 0.], data_bounds=None, ) # Bottom edge of bottom row of subplots. self[br, k].text(pos=[0., -1.], text=dim_x, anchor=[0., -1.04], data_bounds=None, )
python
def _plot_labels(self): """Plot feature labels along left and bottom edge of subplots""" # iterate simultaneously over kth row in left column and # kth column in bottom row: br = self.n_cols - 1 # bottom row for k in range(0, self.n_cols): dim_x, _ = self.grid_dim[0][k].split(',') _, dim_y = self.grid_dim[k][br].split(',') # Get the channel ids corresponding to the relative channel indices # specified in the dimensions. Channel 0 corresponds to the first # best channel for the selected cluster, and so on. dim_x = self._get_axis_label(dim_x) dim_y = self._get_axis_label(dim_y) # Left edge of left column of subplots. self[k, 0].text(pos=[-1., 0.], text=dim_y, anchor=[-1.03, 0.], data_bounds=None, ) # Bottom edge of bottom row of subplots. self[br, k].text(pos=[0., -1.], text=dim_x, anchor=[0., -1.04], data_bounds=None, )
[ "def", "_plot_labels", "(", "self", ")", ":", "# iterate simultaneously over kth row in left column and", "# kth column in bottom row:", "br", "=", "self", ".", "n_cols", "-", "1", "# bottom row", "for", "k", "in", "range", "(", "0", ",", "self", ".", "n_cols", ")...
Plot feature labels along left and bottom edge of subplots
[ "Plot", "feature", "labels", "along", "left", "and", "bottom", "edge", "of", "subplots" ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/feature.py#L199-L223
7,710
kwikteam/phy
phy/cluster/views/feature.py
FeatureView.on_channel_click
def on_channel_click(self, channel_id=None, key=None, button=None): """Respond to the click on a channel.""" channels = self.channel_ids if channels is None: return if len(channels) == 1: self.on_select() return assert len(channels) >= 2 # Get the axis from the pressed button (1, 2, etc.) # axis = 'x' if button == 1 else 'y' d = 0 if button == 1 else 1 # Change the first or second best channel. old = channels[d] # Avoid updating the view if the channel doesn't change. if channel_id == old: return channels[d] = channel_id # Ensure that the first two channels are different. if channels[1 - d] == channel_id: channels[1 - d] = old assert channels[0] != channels[1] # Remove duplicate channels. self.channel_ids = _uniq(channels) logger.debug("Choose channels %d and %d in feature view.", *channels[:2]) # Fix the channels temporarily. self.on_select(fixed_channels=True)
python
def on_channel_click(self, channel_id=None, key=None, button=None): """Respond to the click on a channel.""" channels = self.channel_ids if channels is None: return if len(channels) == 1: self.on_select() return assert len(channels) >= 2 # Get the axis from the pressed button (1, 2, etc.) # axis = 'x' if button == 1 else 'y' d = 0 if button == 1 else 1 # Change the first or second best channel. old = channels[d] # Avoid updating the view if the channel doesn't change. if channel_id == old: return channels[d] = channel_id # Ensure that the first two channels are different. if channels[1 - d] == channel_id: channels[1 - d] = old assert channels[0] != channels[1] # Remove duplicate channels. self.channel_ids = _uniq(channels) logger.debug("Choose channels %d and %d in feature view.", *channels[:2]) # Fix the channels temporarily. self.on_select(fixed_channels=True)
[ "def", "on_channel_click", "(", "self", ",", "channel_id", "=", "None", ",", "key", "=", "None", ",", "button", "=", "None", ")", ":", "channels", "=", "self", ".", "channel_ids", "if", "channels", "is", "None", ":", "return", "if", "len", "(", "channe...
Respond to the click on a channel.
[ "Respond", "to", "the", "click", "on", "a", "channel", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/feature.py#L317-L344
7,711
kwikteam/phy
phy/cluster/views/feature.py
FeatureView.on_request_split
def on_request_split(self): """Return the spikes enclosed by the lasso.""" if (self.lasso.count < 3 or not len(self.cluster_ids)): # pragma: no cover return np.array([], dtype=np.int64) assert len(self.channel_ids) # Get the dimensions of the lassoed subplot. i, j = self.lasso.box dim = self.grid_dim[i][j] dim_x, dim_y = dim.split(',') # Get all points from all clusters. pos = [] spike_ids = [] for cluster_id in self.cluster_ids: # Load all spikes. bunch = self.features(cluster_id, channel_ids=self.channel_ids, load_all=True) px = self._get_axis_data(bunch, dim_x, cluster_id=cluster_id, load_all=True) py = self._get_axis_data(bunch, dim_y, cluster_id=cluster_id, load_all=True) points = np.c_[px.data, py.data] # Normalize the points. xmin, xmax = self._get_axis_bounds(dim_x, px) ymin, ymax = self._get_axis_bounds(dim_y, py) r = Range((xmin, ymin, xmax, ymax)) points = r.apply(points) pos.append(points) spike_ids.append(bunch.spike_ids) pos = np.vstack(pos) spike_ids = np.concatenate(spike_ids) # Find lassoed spikes. ind = self.lasso.in_polygon(pos) self.lasso.clear() return np.unique(spike_ids[ind])
python
def on_request_split(self): """Return the spikes enclosed by the lasso.""" if (self.lasso.count < 3 or not len(self.cluster_ids)): # pragma: no cover return np.array([], dtype=np.int64) assert len(self.channel_ids) # Get the dimensions of the lassoed subplot. i, j = self.lasso.box dim = self.grid_dim[i][j] dim_x, dim_y = dim.split(',') # Get all points from all clusters. pos = [] spike_ids = [] for cluster_id in self.cluster_ids: # Load all spikes. bunch = self.features(cluster_id, channel_ids=self.channel_ids, load_all=True) px = self._get_axis_data(bunch, dim_x, cluster_id=cluster_id, load_all=True) py = self._get_axis_data(bunch, dim_y, cluster_id=cluster_id, load_all=True) points = np.c_[px.data, py.data] # Normalize the points. xmin, xmax = self._get_axis_bounds(dim_x, px) ymin, ymax = self._get_axis_bounds(dim_y, py) r = Range((xmin, ymin, xmax, ymax)) points = r.apply(points) pos.append(points) spike_ids.append(bunch.spike_ids) pos = np.vstack(pos) spike_ids = np.concatenate(spike_ids) # Find lassoed spikes. ind = self.lasso.in_polygon(pos) self.lasso.clear() return np.unique(spike_ids[ind])
[ "def", "on_request_split", "(", "self", ")", ":", "if", "(", "self", ".", "lasso", ".", "count", "<", "3", "or", "not", "len", "(", "self", ".", "cluster_ids", ")", ")", ":", "# pragma: no cover", "return", "np", ".", "array", "(", "[", "]", ",", "...
Return the spikes enclosed by the lasso.
[ "Return", "the", "spikes", "enclosed", "by", "the", "lasso", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/feature.py#L346-L387
7,712
kwikteam/phy
phy/plot/interact.py
Boxed.get_closest_box
def get_closest_box(self, pos): """Get the box closest to some position.""" pos = np.atleast_2d(pos) d = np.sum((np.array(self.box_pos) - pos) ** 2, axis=1) idx = np.argmin(d) return idx
python
def get_closest_box(self, pos): """Get the box closest to some position.""" pos = np.atleast_2d(pos) d = np.sum((np.array(self.box_pos) - pos) ** 2, axis=1) idx = np.argmin(d) return idx
[ "def", "get_closest_box", "(", "self", ",", "pos", ")", ":", "pos", "=", "np", ".", "atleast_2d", "(", "pos", ")", "d", "=", "np", ".", "sum", "(", "(", "np", ".", "array", "(", "self", ".", "box_pos", ")", "-", "pos", ")", "**", "2", ",", "a...
Get the box closest to some position.
[ "Get", "the", "box", "closest", "to", "some", "position", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/interact.py#L259-L264
7,713
kwikteam/phy
phy/plot/interact.py
Boxed.update_boxes
def update_boxes(self, box_pos, box_size): """Set the box bounds from specified box positions and sizes.""" assert box_pos.shape == (self.n_boxes, 2) assert len(box_size) == 2 self.box_bounds = _get_boxes(box_pos, size=box_size, keep_aspect_ratio=self.keep_aspect_ratio, )
python
def update_boxes(self, box_pos, box_size): """Set the box bounds from specified box positions and sizes.""" assert box_pos.shape == (self.n_boxes, 2) assert len(box_size) == 2 self.box_bounds = _get_boxes(box_pos, size=box_size, keep_aspect_ratio=self.keep_aspect_ratio, )
[ "def", "update_boxes", "(", "self", ",", "box_pos", ",", "box_size", ")", ":", "assert", "box_pos", ".", "shape", "==", "(", "self", ".", "n_boxes", ",", "2", ")", "assert", "len", "(", "box_size", ")", "==", "2", "self", ".", "box_bounds", "=", "_ge...
Set the box bounds from specified box positions and sizes.
[ "Set", "the", "box", "bounds", "from", "specified", "box", "positions", "and", "sizes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/interact.py#L266-L273
7,714
kwikteam/phy
phy/gui/qt.py
require_qt
def require_qt(func): """Specify that a function requires a Qt application. Use this decorator to specify that a function needs a running Qt application before it can run. An error is raised if that is not the case. """ @wraps(func) def wrapped(*args, **kwargs): if not QApplication.instance(): # pragma: no cover raise RuntimeError("A Qt application must be created.") return func(*args, **kwargs) return wrapped
python
def require_qt(func): """Specify that a function requires a Qt application. Use this decorator to specify that a function needs a running Qt application before it can run. An error is raised if that is not the case. """ @wraps(func) def wrapped(*args, **kwargs): if not QApplication.instance(): # pragma: no cover raise RuntimeError("A Qt application must be created.") return func(*args, **kwargs) return wrapped
[ "def", "require_qt", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "QApplication", ".", "instance", "(", ")", ":", "# pragma: no cover", "raise", "RuntimeError...
Specify that a function requires a Qt application. Use this decorator to specify that a function needs a running Qt application before it can run. An error is raised if that is not the case.
[ "Specify", "that", "a", "function", "requires", "a", "Qt", "application", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/qt.py#L103-L116
7,715
kwikteam/phy
phy/gui/qt.py
create_app
def create_app(): """Create a Qt application.""" global QT_APP QT_APP = QApplication.instance() if QT_APP is None: # pragma: no cover QT_APP = QApplication(sys.argv) return QT_APP
python
def create_app(): """Create a Qt application.""" global QT_APP QT_APP = QApplication.instance() if QT_APP is None: # pragma: no cover QT_APP = QApplication(sys.argv) return QT_APP
[ "def", "create_app", "(", ")", ":", "global", "QT_APP", "QT_APP", "=", "QApplication", ".", "instance", "(", ")", "if", "QT_APP", "is", "None", ":", "# pragma: no cover", "QT_APP", "=", "QApplication", "(", "sys", ".", "argv", ")", "return", "QT_APP" ]
Create a Qt application.
[ "Create", "a", "Qt", "application", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/qt.py#L123-L129
7,716
kwikteam/phy
phy/gui/qt.py
AsyncCaller.set
def set(self, f): """Call a function after a delay, unless another function is set in the meantime.""" self.stop() self._create_timer(f) self.start()
python
def set(self, f): """Call a function after a delay, unless another function is set in the meantime.""" self.stop() self._create_timer(f) self.start()
[ "def", "set", "(", "self", ",", "f", ")", ":", "self", ".", "stop", "(", ")", "self", ".", "_create_timer", "(", "f", ")", "self", ".", "start", "(", ")" ]
Call a function after a delay, unless another function is set in the meantime.
[ "Call", "a", "function", "after", "a", "delay", "unless", "another", "function", "is", "set", "in", "the", "meantime", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/qt.py#L150-L155
7,717
kwikteam/phy
phy/gui/qt.py
AsyncCaller.stop
def stop(self): """Stop the current timer if there is one and cancel the async call.""" if self._timer: self._timer.stop() self._timer.deleteLater()
python
def stop(self): """Stop the current timer if there is one and cancel the async call.""" if self._timer: self._timer.stop() self._timer.deleteLater()
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "_timer", ":", "self", ".", "_timer", ".", "stop", "(", ")", "self", ".", "_timer", ".", "deleteLater", "(", ")" ]
Stop the current timer if there is one and cancel the async call.
[ "Stop", "the", "current", "timer", "if", "there", "is", "one", "and", "cancel", "the", "async", "call", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/qt.py#L162-L166
7,718
kwikteam/phy
phy/gui/actions.py
_wrap_callback_args
def _wrap_callback_args(f, docstring=None): # pragma: no cover """Display a Qt dialog when a function has arguments. The user can write function arguments as if it was a snippet. """ def wrapped(checked, *args): if args: return f(*args) if isinstance(f, partial): argspec = inspect.getargspec(f.func) else: argspec = inspect.getargspec(f) f_args = argspec.args if 'self' in f_args: f_args.remove('self') # Remove arguments with defaults from the list. if len(argspec.defaults or ()): f_args = f_args[:-len(argspec.defaults)] # Remove arguments supplied in a partial. if isinstance(f, partial): f_args = f_args[len(f.args):] f_args = [arg for arg in f_args if arg not in f.keywords] # If there are no remaining args, we can just run the fu nction. if not f_args: return f() # There are args, need to display the dialog. # Extract Example: `...` in the docstring to put a predefined text # in the input dialog. r = re.search('Example: `([^`]+)`', docstring) docstring_ = docstring[:r.start()].strip() if r else docstring text = r.group(1) if r else None s, ok = _input_dialog(getattr(f, '__name__', 'action'), docstring_, text) if not ok or not s: return # Parse user-supplied arguments and call the function. args = _parse_snippet(s) return f(*args) return wrapped
python
def _wrap_callback_args(f, docstring=None): # pragma: no cover """Display a Qt dialog when a function has arguments. The user can write function arguments as if it was a snippet. """ def wrapped(checked, *args): if args: return f(*args) if isinstance(f, partial): argspec = inspect.getargspec(f.func) else: argspec = inspect.getargspec(f) f_args = argspec.args if 'self' in f_args: f_args.remove('self') # Remove arguments with defaults from the list. if len(argspec.defaults or ()): f_args = f_args[:-len(argspec.defaults)] # Remove arguments supplied in a partial. if isinstance(f, partial): f_args = f_args[len(f.args):] f_args = [arg for arg in f_args if arg not in f.keywords] # If there are no remaining args, we can just run the fu nction. if not f_args: return f() # There are args, need to display the dialog. # Extract Example: `...` in the docstring to put a predefined text # in the input dialog. r = re.search('Example: `([^`]+)`', docstring) docstring_ = docstring[:r.start()].strip() if r else docstring text = r.group(1) if r else None s, ok = _input_dialog(getattr(f, '__name__', 'action'), docstring_, text) if not ok or not s: return # Parse user-supplied arguments and call the function. args = _parse_snippet(s) return f(*args) return wrapped
[ "def", "_wrap_callback_args", "(", "f", ",", "docstring", "=", "None", ")", ":", "# pragma: no cover", "def", "wrapped", "(", "checked", ",", "*", "args", ")", ":", "if", "args", ":", "return", "f", "(", "*", "args", ")", "if", "isinstance", "(", "f", ...
Display a Qt dialog when a function has arguments. The user can write function arguments as if it was a snippet.
[ "Display", "a", "Qt", "dialog", "when", "a", "function", "has", "arguments", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L60-L99
7,719
kwikteam/phy
phy/gui/actions.py
_get_shortcut_string
def _get_shortcut_string(shortcut): """Return a string representation of a shortcut.""" if shortcut is None: return '' if isinstance(shortcut, (tuple, list)): return ', '.join([_get_shortcut_string(s) for s in shortcut]) if isinstance(shortcut, string_types): if hasattr(QKeySequence, shortcut): shortcut = QKeySequence(getattr(QKeySequence, shortcut)) else: return shortcut.lower() assert isinstance(shortcut, QKeySequence) s = shortcut.toString() or '' return str(s).lower()
python
def _get_shortcut_string(shortcut): """Return a string representation of a shortcut.""" if shortcut is None: return '' if isinstance(shortcut, (tuple, list)): return ', '.join([_get_shortcut_string(s) for s in shortcut]) if isinstance(shortcut, string_types): if hasattr(QKeySequence, shortcut): shortcut = QKeySequence(getattr(QKeySequence, shortcut)) else: return shortcut.lower() assert isinstance(shortcut, QKeySequence) s = shortcut.toString() or '' return str(s).lower()
[ "def", "_get_shortcut_string", "(", "shortcut", ")", ":", "if", "shortcut", "is", "None", ":", "return", "''", "if", "isinstance", "(", "shortcut", ",", "(", "tuple", ",", "list", ")", ")", ":", "return", "', '", ".", "join", "(", "[", "_get_shortcut_str...
Return a string representation of a shortcut.
[ "Return", "a", "string", "representation", "of", "a", "shortcut", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L106-L119
7,720
kwikteam/phy
phy/gui/actions.py
_get_qkeysequence
def _get_qkeysequence(shortcut): """Return a QKeySequence or list of QKeySequence from a shortcut string.""" if shortcut is None: return [] if isinstance(shortcut, (tuple, list)): return [_get_qkeysequence(s) for s in shortcut] assert isinstance(shortcut, string_types) if hasattr(QKeySequence, shortcut): return QKeySequence(getattr(QKeySequence, shortcut)) sequence = QKeySequence.fromString(shortcut) assert not sequence.isEmpty() return sequence
python
def _get_qkeysequence(shortcut): """Return a QKeySequence or list of QKeySequence from a shortcut string.""" if shortcut is None: return [] if isinstance(shortcut, (tuple, list)): return [_get_qkeysequence(s) for s in shortcut] assert isinstance(shortcut, string_types) if hasattr(QKeySequence, shortcut): return QKeySequence(getattr(QKeySequence, shortcut)) sequence = QKeySequence.fromString(shortcut) assert not sequence.isEmpty() return sequence
[ "def", "_get_qkeysequence", "(", "shortcut", ")", ":", "if", "shortcut", "is", "None", ":", "return", "[", "]", "if", "isinstance", "(", "shortcut", ",", "(", "tuple", ",", "list", ")", ")", ":", "return", "[", "_get_qkeysequence", "(", "s", ")", "for"...
Return a QKeySequence or list of QKeySequence from a shortcut string.
[ "Return", "a", "QKeySequence", "or", "list", "of", "QKeySequence", "from", "a", "shortcut", "string", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L122-L133
7,721
kwikteam/phy
phy/gui/actions.py
_show_shortcuts
def _show_shortcuts(shortcuts, name=None): """Display shortcuts.""" name = name or '' print('') if name: name = ' for ' + name print('Keyboard shortcuts' + name) for name in sorted(shortcuts): shortcut = _get_shortcut_string(shortcuts[name]) if not name.startswith('_'): print('- {0:<40}: {1:s}'.format(name, shortcut))
python
def _show_shortcuts(shortcuts, name=None): """Display shortcuts.""" name = name or '' print('') if name: name = ' for ' + name print('Keyboard shortcuts' + name) for name in sorted(shortcuts): shortcut = _get_shortcut_string(shortcuts[name]) if not name.startswith('_'): print('- {0:<40}: {1:s}'.format(name, shortcut))
[ "def", "_show_shortcuts", "(", "shortcuts", ",", "name", "=", "None", ")", ":", "name", "=", "name", "or", "''", "print", "(", "''", ")", "if", "name", ":", "name", "=", "' for '", "+", "name", "print", "(", "'Keyboard shortcuts'", "+", "name", ")", ...
Display shortcuts.
[ "Display", "shortcuts", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L136-L146
7,722
kwikteam/phy
phy/gui/actions.py
Actions.add
def add(self, callback=None, name=None, shortcut=None, alias=None, docstring=None, menu=None, verbose=True): """Add an action with a keyboard shortcut.""" if callback is None: # Allow to use either add(func) or @add or @add(...). return partial(self.add, name=name, shortcut=shortcut, alias=alias, menu=menu) assert callback # Get the name from the callback function if needed. name = name or callback.__name__ alias = alias or _alias(name) name = name.replace('&', '') shortcut = shortcut or self._default_shortcuts.get(name, None) # Skip existing action. if name in self._actions_dict: return # Set the status tip from the function's docstring. docstring = docstring or callback.__doc__ or name docstring = re.sub(r'[ \t\r\f\v]{2,}', ' ', docstring.strip()) # Create and register the action. action = _create_qaction(self.gui, name, callback, shortcut, docstring=docstring, alias=alias, ) action_obj = Bunch(qaction=action, name=name, alias=alias, shortcut=shortcut, callback=callback, menu=menu) if verbose and not name.startswith('_'): logger.log(5, "Add action `%s` (%s).", name, _get_shortcut_string(action.shortcut())) self.gui.addAction(action) # Add the action to the menu. menu = menu or self.menu # Do not show private actions in the menu. if menu and not name.startswith('_'): self.gui.get_menu(menu).addAction(action) self._actions_dict[name] = action_obj # Register the alias -> name mapping. self._aliases[alias] = name # Set the callback method. if callback: setattr(self, name, callback)
python
def add(self, callback=None, name=None, shortcut=None, alias=None, docstring=None, menu=None, verbose=True): """Add an action with a keyboard shortcut.""" if callback is None: # Allow to use either add(func) or @add or @add(...). return partial(self.add, name=name, shortcut=shortcut, alias=alias, menu=menu) assert callback # Get the name from the callback function if needed. name = name or callback.__name__ alias = alias or _alias(name) name = name.replace('&', '') shortcut = shortcut or self._default_shortcuts.get(name, None) # Skip existing action. if name in self._actions_dict: return # Set the status tip from the function's docstring. docstring = docstring or callback.__doc__ or name docstring = re.sub(r'[ \t\r\f\v]{2,}', ' ', docstring.strip()) # Create and register the action. action = _create_qaction(self.gui, name, callback, shortcut, docstring=docstring, alias=alias, ) action_obj = Bunch(qaction=action, name=name, alias=alias, shortcut=shortcut, callback=callback, menu=menu) if verbose and not name.startswith('_'): logger.log(5, "Add action `%s` (%s).", name, _get_shortcut_string(action.shortcut())) self.gui.addAction(action) # Add the action to the menu. menu = menu or self.menu # Do not show private actions in the menu. if menu and not name.startswith('_'): self.gui.get_menu(menu).addAction(action) self._actions_dict[name] = action_obj # Register the alias -> name mapping. self._aliases[alias] = name # Set the callback method. if callback: setattr(self, name, callback)
[ "def", "add", "(", "self", ",", "callback", "=", "None", ",", "name", "=", "None", ",", "shortcut", "=", "None", ",", "alias", "=", "None", ",", "docstring", "=", "None", ",", "menu", "=", "None", ",", "verbose", "=", "True", ")", ":", "if", "cal...
Add an action with a keyboard shortcut.
[ "Add", "an", "action", "with", "a", "keyboard", "shortcut", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L200-L246
7,723
kwikteam/phy
phy/gui/actions.py
Actions.separator
def separator(self, menu=None): """Add a separator""" self.gui.get_menu(menu or self.menu).addSeparator()
python
def separator(self, menu=None): """Add a separator""" self.gui.get_menu(menu or self.menu).addSeparator()
[ "def", "separator", "(", "self", ",", "menu", "=", "None", ")", ":", "self", ".", "gui", ".", "get_menu", "(", "menu", "or", "self", ".", "menu", ")", ".", "addSeparator", "(", ")" ]
Add a separator
[ "Add", "a", "separator" ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L248-L250
7,724
kwikteam/phy
phy/gui/actions.py
Actions.disable
def disable(self, name=None): """Disable one or all actions.""" if name is None: for name in self._actions_dict: self.disable(name) return self._actions_dict[name].qaction.setEnabled(False)
python
def disable(self, name=None): """Disable one or all actions.""" if name is None: for name in self._actions_dict: self.disable(name) return self._actions_dict[name].qaction.setEnabled(False)
[ "def", "disable", "(", "self", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "for", "name", "in", "self", ".", "_actions_dict", ":", "self", ".", "disable", "(", "name", ")", "return", "self", ".", "_actions_dict", "[", "name",...
Disable one or all actions.
[ "Disable", "one", "or", "all", "actions", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L252-L258
7,725
kwikteam/phy
phy/gui/actions.py
Actions.enable
def enable(self, name=None): """Enable one or all actions.""" if name is None: for name in self._actions_dict: self.enable(name) return self._actions_dict[name].qaction.setEnabled(True)
python
def enable(self, name=None): """Enable one or all actions.""" if name is None: for name in self._actions_dict: self.enable(name) return self._actions_dict[name].qaction.setEnabled(True)
[ "def", "enable", "(", "self", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "for", "name", "in", "self", ".", "_actions_dict", ":", "self", ".", "enable", "(", "name", ")", "return", "self", ".", "_actions_dict", "[", "name", ...
Enable one or all actions.
[ "Enable", "one", "or", "all", "actions", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L260-L266
7,726
kwikteam/phy
phy/gui/actions.py
Actions.run
def run(self, name, *args): """Run an action as specified by its name.""" assert isinstance(name, string_types) # Resolve the alias if it is an alias. name = self._aliases.get(name, name) # Get the action. action = self._actions_dict.get(name, None) if not action: raise ValueError("Action `{}` doesn't exist.".format(name)) if not name.startswith('_'): logger.debug("Execute action `%s`.", name) return action.callback(*args)
python
def run(self, name, *args): """Run an action as specified by its name.""" assert isinstance(name, string_types) # Resolve the alias if it is an alias. name = self._aliases.get(name, name) # Get the action. action = self._actions_dict.get(name, None) if not action: raise ValueError("Action `{}` doesn't exist.".format(name)) if not name.startswith('_'): logger.debug("Execute action `%s`.", name) return action.callback(*args)
[ "def", "run", "(", "self", ",", "name", ",", "*", "args", ")", ":", "assert", "isinstance", "(", "name", ",", "string_types", ")", "# Resolve the alias if it is an alias.", "name", "=", "self", ".", "_aliases", ".", "get", "(", "name", ",", "name", ")", ...
Run an action as specified by its name.
[ "Run", "an", "action", "as", "specified", "by", "its", "name", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L272-L283
7,727
kwikteam/phy
phy/gui/actions.py
Actions.remove
def remove(self, name): """Remove an action.""" self.gui.removeAction(self._actions_dict[name].qaction) del self._actions_dict[name] delattr(self, name)
python
def remove(self, name): """Remove an action.""" self.gui.removeAction(self._actions_dict[name].qaction) del self._actions_dict[name] delattr(self, name)
[ "def", "remove", "(", "self", ",", "name", ")", ":", "self", ".", "gui", ".", "removeAction", "(", "self", ".", "_actions_dict", "[", "name", "]", ".", "qaction", ")", "del", "self", ".", "_actions_dict", "[", "name", "]", "delattr", "(", "self", ","...
Remove an action.
[ "Remove", "an", "action", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L285-L289
7,728
kwikteam/phy
phy/gui/actions.py
Actions.remove_all
def remove_all(self): """Remove all actions.""" names = sorted(self._actions_dict.keys()) for name in names: self.remove(name)
python
def remove_all(self): """Remove all actions.""" names = sorted(self._actions_dict.keys()) for name in names: self.remove(name)
[ "def", "remove_all", "(", "self", ")", ":", "names", "=", "sorted", "(", "self", ".", "_actions_dict", ".", "keys", "(", ")", ")", "for", "name", "in", "names", ":", "self", ".", "remove", "(", "name", ")" ]
Remove all actions.
[ "Remove", "all", "actions", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L291-L295
7,729
kwikteam/phy
phy/gui/actions.py
Actions.shortcuts
def shortcuts(self): """A dictionary of action shortcuts.""" return {name: action.shortcut for name, action in self._actions_dict.items()}
python
def shortcuts(self): """A dictionary of action shortcuts.""" return {name: action.shortcut for name, action in self._actions_dict.items()}
[ "def", "shortcuts", "(", "self", ")", ":", "return", "{", "name", ":", "action", ".", "shortcut", "for", "name", ",", "action", "in", "self", ".", "_actions_dict", ".", "items", "(", ")", "}" ]
A dictionary of action shortcuts.
[ "A", "dictionary", "of", "action", "shortcuts", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L298-L301
7,730
kwikteam/phy
phy/gui/actions.py
Actions.show_shortcuts
def show_shortcuts(self): """Print all shortcuts.""" gui_name = self.gui.name actions_name = self.name name = ('{} - {}'.format(gui_name, actions_name) if actions_name else gui_name) _show_shortcuts(self.shortcuts, name)
python
def show_shortcuts(self): """Print all shortcuts.""" gui_name = self.gui.name actions_name = self.name name = ('{} - {}'.format(gui_name, actions_name) if actions_name else gui_name) _show_shortcuts(self.shortcuts, name)
[ "def", "show_shortcuts", "(", "self", ")", ":", "gui_name", "=", "self", ".", "gui", ".", "name", "actions_name", "=", "self", ".", "name", "name", "=", "(", "'{} - {}'", ".", "format", "(", "gui_name", ",", "actions_name", ")", "if", "actions_name", "el...
Print all shortcuts.
[ "Print", "all", "shortcuts", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L303-L309
7,731
kwikteam/phy
phy/gui/actions.py
Snippets.command
def command(self): """This is used to write a snippet message in the status bar. A cursor is appended at the end. """ msg = self.gui.status_message n = len(msg) n_cur = len(self.cursor) return msg[:n - n_cur]
python
def command(self): """This is used to write a snippet message in the status bar. A cursor is appended at the end. """ msg = self.gui.status_message n = len(msg) n_cur = len(self.cursor) return msg[:n - n_cur]
[ "def", "command", "(", "self", ")", ":", "msg", "=", "self", ".", "gui", ".", "status_message", "n", "=", "len", "(", "msg", ")", "n_cur", "=", "len", "(", "self", ".", "cursor", ")", "return", "msg", "[", ":", "n", "-", "n_cur", "]" ]
This is used to write a snippet message in the status bar. A cursor is appended at the end.
[ "This", "is", "used", "to", "write", "a", "snippet", "message", "in", "the", "status", "bar", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L372-L381
7,732
kwikteam/phy
phy/gui/actions.py
Snippets._backspace
def _backspace(self): """Erase the last character in the snippet command.""" if self.command == ':': return logger.log(5, "Snippet keystroke `Backspace`.") self.command = self.command[:-1]
python
def _backspace(self): """Erase the last character in the snippet command.""" if self.command == ':': return logger.log(5, "Snippet keystroke `Backspace`.") self.command = self.command[:-1]
[ "def", "_backspace", "(", "self", ")", ":", "if", "self", ".", "command", "==", "':'", ":", "return", "logger", ".", "log", "(", "5", ",", "\"Snippet keystroke `Backspace`.\"", ")", "self", ".", "command", "=", "self", ".", "command", "[", ":", "-", "1...
Erase the last character in the snippet command.
[ "Erase", "the", "last", "character", "in", "the", "snippet", "command", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L390-L395
7,733
kwikteam/phy
phy/gui/actions.py
Snippets._enter
def _enter(self): """Disable the snippet mode and execute the command.""" command = self.command logger.log(5, "Snippet keystroke `Enter`.") # NOTE: we need to set back the actions (mode_off) before running # the command. self.mode_off() self.run(command)
python
def _enter(self): """Disable the snippet mode and execute the command.""" command = self.command logger.log(5, "Snippet keystroke `Enter`.") # NOTE: we need to set back the actions (mode_off) before running # the command. self.mode_off() self.run(command)
[ "def", "_enter", "(", "self", ")", ":", "command", "=", "self", ".", "command", "logger", ".", "log", "(", "5", ",", "\"Snippet keystroke `Enter`.\"", ")", "# NOTE: we need to set back the actions (mode_off) before running", "# the command.", "self", ".", "mode_off", ...
Disable the snippet mode and execute the command.
[ "Disable", "the", "snippet", "mode", "and", "execute", "the", "command", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L397-L404
7,734
kwikteam/phy
phy/gui/actions.py
Snippets._create_snippet_actions
def _create_snippet_actions(self): """Add mock Qt actions for snippet keystrokes. Used to enable snippet mode. """ # One action per allowed character. for i, char in enumerate(self._snippet_chars): def _make_func(char): def callback(): logger.log(5, "Snippet keystroke `%s`.", char) self.command += char return callback self.actions.add(name='_snippet_{}'.format(i), shortcut=char, callback=_make_func(char)) self.actions.add(name='_snippet_backspace', shortcut='backspace', callback=self._backspace) self.actions.add(name='_snippet_activate', shortcut=('enter', 'return'), callback=self._enter) self.actions.add(name='_snippet_disable', shortcut='escape', callback=self.mode_off)
python
def _create_snippet_actions(self): """Add mock Qt actions for snippet keystrokes. Used to enable snippet mode. """ # One action per allowed character. for i, char in enumerate(self._snippet_chars): def _make_func(char): def callback(): logger.log(5, "Snippet keystroke `%s`.", char) self.command += char return callback self.actions.add(name='_snippet_{}'.format(i), shortcut=char, callback=_make_func(char)) self.actions.add(name='_snippet_backspace', shortcut='backspace', callback=self._backspace) self.actions.add(name='_snippet_activate', shortcut=('enter', 'return'), callback=self._enter) self.actions.add(name='_snippet_disable', shortcut='escape', callback=self.mode_off)
[ "def", "_create_snippet_actions", "(", "self", ")", ":", "# One action per allowed character.", "for", "i", ",", "char", "in", "enumerate", "(", "self", ".", "_snippet_chars", ")", ":", "def", "_make_func", "(", "char", ")", ":", "def", "callback", "(", ")", ...
Add mock Qt actions for snippet keystrokes. Used to enable snippet mode.
[ "Add", "mock", "Qt", "actions", "for", "snippet", "keystrokes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L406-L433
7,735
kwikteam/phy
phy/gui/actions.py
Snippets.run
def run(self, snippet): """Executes a snippet command. May be overridden. """ assert snippet[0] == ':' snippet = snippet[1:] snippet_args = _parse_snippet(snippet) name = snippet_args[0] logger.info("Processing snippet `%s`.", snippet) try: # Try to run the snippet on all attached Actions instances. for actions in self.gui.actions: try: actions.run(name, *snippet_args[1:]) return except ValueError: # This Actions instance doesn't contain the requested # snippet, trying the next attached Actions instance. pass logger.warn("Couldn't find action `%s`.", name) except Exception as e: logger.warn("Error when executing snippet: \"%s\".", str(e)) logger.debug(''.join(traceback.format_exception(*sys.exc_info())))
python
def run(self, snippet): """Executes a snippet command. May be overridden. """ assert snippet[0] == ':' snippet = snippet[1:] snippet_args = _parse_snippet(snippet) name = snippet_args[0] logger.info("Processing snippet `%s`.", snippet) try: # Try to run the snippet on all attached Actions instances. for actions in self.gui.actions: try: actions.run(name, *snippet_args[1:]) return except ValueError: # This Actions instance doesn't contain the requested # snippet, trying the next attached Actions instance. pass logger.warn("Couldn't find action `%s`.", name) except Exception as e: logger.warn("Error when executing snippet: \"%s\".", str(e)) logger.debug(''.join(traceback.format_exception(*sys.exc_info())))
[ "def", "run", "(", "self", ",", "snippet", ")", ":", "assert", "snippet", "[", "0", "]", "==", "':'", "snippet", "=", "snippet", "[", "1", ":", "]", "snippet_args", "=", "_parse_snippet", "(", "snippet", ")", "name", "=", "snippet_args", "[", "0", "]...
Executes a snippet command. May be overridden.
[ "Executes", "a", "snippet", "command", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/gui/actions.py#L435-L460
7,736
kwikteam/phy
phy/traces/waveform.py
_before_after
def _before_after(n_samples): """Get the number of samples before and after.""" if not isinstance(n_samples, (tuple, list)): before = n_samples // 2 after = n_samples - before else: assert len(n_samples) == 2 before, after = n_samples n_samples = before + after assert before >= 0 assert after >= 0 assert before + after == n_samples return before, after
python
def _before_after(n_samples): """Get the number of samples before and after.""" if not isinstance(n_samples, (tuple, list)): before = n_samples // 2 after = n_samples - before else: assert len(n_samples) == 2 before, after = n_samples n_samples = before + after assert before >= 0 assert after >= 0 assert before + after == n_samples return before, after
[ "def", "_before_after", "(", "n_samples", ")", ":", "if", "not", "isinstance", "(", "n_samples", ",", "(", "tuple", ",", "list", ")", ")", ":", "before", "=", "n_samples", "//", "2", "after", "=", "n_samples", "-", "before", "else", ":", "assert", "len...
Get the number of samples before and after.
[ "Get", "the", "number", "of", "samples", "before", "and", "after", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/waveform.py#L149-L161
7,737
kwikteam/phy
phy/traces/waveform.py
_slice
def _slice(index, n_samples, margin=None): """Return a waveform slice.""" if margin is None: margin = (0, 0) assert isinstance(n_samples, (tuple, list)) assert len(n_samples) == 2 before, after = n_samples assert isinstance(margin, (tuple, list)) assert len(margin) == 2 margin_before, margin_after = margin before += margin_before after += margin_after index = int(index) before = int(before) after = int(after) return slice(max(0, index - before), index + after, None)
python
def _slice(index, n_samples, margin=None): """Return a waveform slice.""" if margin is None: margin = (0, 0) assert isinstance(n_samples, (tuple, list)) assert len(n_samples) == 2 before, after = n_samples assert isinstance(margin, (tuple, list)) assert len(margin) == 2 margin_before, margin_after = margin before += margin_before after += margin_after index = int(index) before = int(before) after = int(after) return slice(max(0, index - before), index + after, None)
[ "def", "_slice", "(", "index", ",", "n_samples", ",", "margin", "=", "None", ")", ":", "if", "margin", "is", "None", ":", "margin", "=", "(", "0", ",", "0", ")", "assert", "isinstance", "(", "n_samples", ",", "(", "tuple", ",", "list", ")", ")", ...
Return a waveform slice.
[ "Return", "a", "waveform", "slice", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/waveform.py#L164-L179
7,738
kwikteam/phy
phy/traces/waveform.py
WaveformLoader._load_at
def _load_at(self, time, channels=None): """Load a waveform at a given time.""" if channels is None: channels = slice(None, None, None) time = int(time) time_o = time ns = self.n_samples_trace if not (0 <= time_o < ns): raise ValueError("Invalid time {0:d}/{1:d}.".format(time_o, ns)) slice_extract = _slice(time_o, self.n_samples_before_after, self._filter_margin) extract = self._traces[slice_extract][:, channels].astype(np.float32) # Pad the extracted chunk if needed. if slice_extract.start <= 0: extract = _pad(extract, self._n_samples_extract, 'left') elif slice_extract.stop >= ns - 1: extract = _pad(extract, self._n_samples_extract, 'right') assert extract.shape[0] == self._n_samples_extract return extract
python
def _load_at(self, time, channels=None): """Load a waveform at a given time.""" if channels is None: channels = slice(None, None, None) time = int(time) time_o = time ns = self.n_samples_trace if not (0 <= time_o < ns): raise ValueError("Invalid time {0:d}/{1:d}.".format(time_o, ns)) slice_extract = _slice(time_o, self.n_samples_before_after, self._filter_margin) extract = self._traces[slice_extract][:, channels].astype(np.float32) # Pad the extracted chunk if needed. if slice_extract.start <= 0: extract = _pad(extract, self._n_samples_extract, 'left') elif slice_extract.stop >= ns - 1: extract = _pad(extract, self._n_samples_extract, 'right') assert extract.shape[0] == self._n_samples_extract return extract
[ "def", "_load_at", "(", "self", ",", "time", ",", "channels", "=", "None", ")", ":", "if", "channels", "is", "None", ":", "channels", "=", "slice", "(", "None", ",", "None", ",", "None", ")", "time", "=", "int", "(", "time", ")", "time_o", "=", "...
Load a waveform at a given time.
[ "Load", "a", "waveform", "at", "a", "given", "time", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/waveform.py#L248-L269
7,739
kwikteam/phy
phy/traces/waveform.py
WaveformLoader.get
def get(self, spike_ids, channels=None): """Load the waveforms of the specified spikes.""" if isinstance(spike_ids, slice): spike_ids = _range_from_slice(spike_ids, start=0, stop=self.n_spikes, ) if not hasattr(spike_ids, '__len__'): spike_ids = [spike_ids] if channels is None: channels = slice(None, None, None) nc = self.n_channels else: channels = np.asarray(channels, dtype=np.int32) assert np.all(channels < self.n_channels) nc = len(channels) # Ensure a list of time samples are being requested. spike_ids = _as_array(spike_ids) n_spikes = len(spike_ids) # Initialize the array. # NOTE: last dimension is time to simplify things. shape = (n_spikes, nc, self._n_samples_extract) waveforms = np.zeros(shape, dtype=np.float32) # No traces: return null arrays. if self.n_samples_trace == 0: return np.transpose(waveforms, (0, 2, 1)) # Load all spikes. for i, spike_id in enumerate(spike_ids): assert 0 <= spike_id < self.n_spikes time = self._spike_samples[spike_id] # Extract the waveforms on the unmasked channels. try: w = self._load_at(time, channels) except ValueError as e: # pragma: no cover logger.warn("Error while loading waveform: %s", str(e)) continue assert w.shape == (self._n_samples_extract, nc) waveforms[i, :, :] = w.T # Filter the waveforms. waveforms_f = waveforms.reshape((-1, self._n_samples_extract)) # Only filter the non-zero waveforms. unmasked = waveforms_f.max(axis=1) != 0 waveforms_f[unmasked] = self._filter(waveforms_f[unmasked], axis=1) waveforms_f = waveforms_f.reshape((n_spikes, nc, self._n_samples_extract)) # Remove the margin. margin_before, margin_after = self._filter_margin if margin_after > 0: assert margin_before >= 0 waveforms_f = waveforms_f[:, :, margin_before:-margin_after] assert waveforms_f.shape == (n_spikes, nc, self.n_samples_waveforms, ) # NOTE: we transpose before returning the array. return np.transpose(waveforms_f, (0, 2, 1))
python
def get(self, spike_ids, channels=None): """Load the waveforms of the specified spikes.""" if isinstance(spike_ids, slice): spike_ids = _range_from_slice(spike_ids, start=0, stop=self.n_spikes, ) if not hasattr(spike_ids, '__len__'): spike_ids = [spike_ids] if channels is None: channels = slice(None, None, None) nc = self.n_channels else: channels = np.asarray(channels, dtype=np.int32) assert np.all(channels < self.n_channels) nc = len(channels) # Ensure a list of time samples are being requested. spike_ids = _as_array(spike_ids) n_spikes = len(spike_ids) # Initialize the array. # NOTE: last dimension is time to simplify things. shape = (n_spikes, nc, self._n_samples_extract) waveforms = np.zeros(shape, dtype=np.float32) # No traces: return null arrays. if self.n_samples_trace == 0: return np.transpose(waveforms, (0, 2, 1)) # Load all spikes. for i, spike_id in enumerate(spike_ids): assert 0 <= spike_id < self.n_spikes time = self._spike_samples[spike_id] # Extract the waveforms on the unmasked channels. try: w = self._load_at(time, channels) except ValueError as e: # pragma: no cover logger.warn("Error while loading waveform: %s", str(e)) continue assert w.shape == (self._n_samples_extract, nc) waveforms[i, :, :] = w.T # Filter the waveforms. waveforms_f = waveforms.reshape((-1, self._n_samples_extract)) # Only filter the non-zero waveforms. unmasked = waveforms_f.max(axis=1) != 0 waveforms_f[unmasked] = self._filter(waveforms_f[unmasked], axis=1) waveforms_f = waveforms_f.reshape((n_spikes, nc, self._n_samples_extract)) # Remove the margin. margin_before, margin_after = self._filter_margin if margin_after > 0: assert margin_before >= 0 waveforms_f = waveforms_f[:, :, margin_before:-margin_after] assert waveforms_f.shape == (n_spikes, nc, self.n_samples_waveforms, ) # NOTE: we transpose before returning the array. return np.transpose(waveforms_f, (0, 2, 1))
[ "def", "get", "(", "self", ",", "spike_ids", ",", "channels", "=", "None", ")", ":", "if", "isinstance", "(", "spike_ids", ",", "slice", ")", ":", "spike_ids", "=", "_range_from_slice", "(", "spike_ids", ",", "start", "=", "0", ",", "stop", "=", "self"...
Load the waveforms of the specified spikes.
[ "Load", "the", "waveforms", "of", "the", "specified", "spikes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/waveform.py#L271-L337
7,740
kwikteam/phy
phy/stats/clusters.py
get_waveform_amplitude
def get_waveform_amplitude(mean_masks, mean_waveforms): """Return the amplitude of the waveforms on all channels.""" assert mean_waveforms.ndim == 2 n_samples, n_channels = mean_waveforms.shape assert mean_masks.ndim == 1 assert mean_masks.shape == (n_channels,) mean_waveforms = mean_waveforms * mean_masks assert mean_waveforms.shape == (n_samples, n_channels) # Amplitudes. m, M = mean_waveforms.min(axis=0), mean_waveforms.max(axis=0) return M - m
python
def get_waveform_amplitude(mean_masks, mean_waveforms): """Return the amplitude of the waveforms on all channels.""" assert mean_waveforms.ndim == 2 n_samples, n_channels = mean_waveforms.shape assert mean_masks.ndim == 1 assert mean_masks.shape == (n_channels,) mean_waveforms = mean_waveforms * mean_masks assert mean_waveforms.shape == (n_samples, n_channels) # Amplitudes. m, M = mean_waveforms.min(axis=0), mean_waveforms.max(axis=0) return M - m
[ "def", "get_waveform_amplitude", "(", "mean_masks", ",", "mean_waveforms", ")", ":", "assert", "mean_waveforms", ".", "ndim", "==", "2", "n_samples", ",", "n_channels", "=", "mean_waveforms", ".", "shape", "assert", "mean_masks", ".", "ndim", "==", "1", "assert"...
Return the amplitude of the waveforms on all channels.
[ "Return", "the", "amplitude", "of", "the", "waveforms", "on", "all", "channels", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/stats/clusters.py#L41-L55
7,741
kwikteam/phy
phy/stats/clusters.py
get_mean_masked_features_distance
def get_mean_masked_features_distance(mean_features_0, mean_features_1, mean_masks_0, mean_masks_1, n_features_per_channel=None, ): """Compute the distance between the mean masked features.""" assert n_features_per_channel > 0 mu_0 = mean_features_0.ravel() mu_1 = mean_features_1.ravel() omeg_0 = mean_masks_0 omeg_1 = mean_masks_1 omeg_0 = np.repeat(omeg_0, n_features_per_channel) omeg_1 = np.repeat(omeg_1, n_features_per_channel) d_0 = mu_0 * omeg_0 d_1 = mu_1 * omeg_1 return np.linalg.norm(d_0 - d_1)
python
def get_mean_masked_features_distance(mean_features_0, mean_features_1, mean_masks_0, mean_masks_1, n_features_per_channel=None, ): """Compute the distance between the mean masked features.""" assert n_features_per_channel > 0 mu_0 = mean_features_0.ravel() mu_1 = mean_features_1.ravel() omeg_0 = mean_masks_0 omeg_1 = mean_masks_1 omeg_0 = np.repeat(omeg_0, n_features_per_channel) omeg_1 = np.repeat(omeg_1, n_features_per_channel) d_0 = mu_0 * omeg_0 d_1 = mu_1 * omeg_1 return np.linalg.norm(d_0 - d_1)
[ "def", "get_mean_masked_features_distance", "(", "mean_features_0", ",", "mean_features_1", ",", "mean_masks_0", ",", "mean_masks_1", ",", "n_features_per_channel", "=", "None", ",", ")", ":", "assert", "n_features_per_channel", ">", "0", "mu_0", "=", "mean_features_0",...
Compute the distance between the mean masked features.
[ "Compute", "the", "distance", "between", "the", "mean", "masked", "features", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/stats/clusters.py#L58-L80
7,742
kwikteam/phy
phy/cluster/clustering.py
_extend_spikes
def _extend_spikes(spike_ids, spike_clusters): """Return all spikes belonging to the clusters containing the specified spikes.""" # We find the spikes belonging to modified clusters. # What are the old clusters that are modified by the assignment? old_spike_clusters = spike_clusters[spike_ids] unique_clusters = _unique(old_spike_clusters) # Now we take all spikes from these clusters. changed_spike_ids = _spikes_in_clusters(spike_clusters, unique_clusters) # These are the new spikes that need to be reassigned. extended_spike_ids = np.setdiff1d(changed_spike_ids, spike_ids, assume_unique=True) return extended_spike_ids
python
def _extend_spikes(spike_ids, spike_clusters): """Return all spikes belonging to the clusters containing the specified spikes.""" # We find the spikes belonging to modified clusters. # What are the old clusters that are modified by the assignment? old_spike_clusters = spike_clusters[spike_ids] unique_clusters = _unique(old_spike_clusters) # Now we take all spikes from these clusters. changed_spike_ids = _spikes_in_clusters(spike_clusters, unique_clusters) # These are the new spikes that need to be reassigned. extended_spike_ids = np.setdiff1d(changed_spike_ids, spike_ids, assume_unique=True) return extended_spike_ids
[ "def", "_extend_spikes", "(", "spike_ids", ",", "spike_clusters", ")", ":", "# We find the spikes belonging to modified clusters.", "# What are the old clusters that are modified by the assignment?", "old_spike_clusters", "=", "spike_clusters", "[", "spike_ids", "]", "unique_clusters...
Return all spikes belonging to the clusters containing the specified spikes.
[ "Return", "all", "spikes", "belonging", "to", "the", "clusters", "containing", "the", "specified", "spikes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L29-L41
7,743
kwikteam/phy
phy/cluster/clustering.py
Clustering.reset
def reset(self): """Reset the clustering to the original clustering. All changes are lost. """ self._undo_stack.clear() self._spike_clusters = self._spike_clusters_base self._new_cluster_id = self._new_cluster_id_0
python
def reset(self): """Reset the clustering to the original clustering. All changes are lost. """ self._undo_stack.clear() self._spike_clusters = self._spike_clusters_base self._new_cluster_id = self._new_cluster_id_0
[ "def", "reset", "(", "self", ")", ":", "self", ".", "_undo_stack", ".", "clear", "(", ")", "self", ".", "_spike_clusters", "=", "self", ".", "_spike_clusters_base", "self", ".", "_new_cluster_id", "=", "self", ".", "_new_cluster_id_0" ]
Reset the clustering to the original clustering. All changes are lost.
[ "Reset", "the", "clustering", "to", "the", "original", "clustering", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L178-L186
7,744
kwikteam/phy
phy/cluster/clustering.py
Clustering._do_assign
def _do_assign(self, spike_ids, new_spike_clusters): """Make spike-cluster assignments after the spike selection has been extended to full clusters.""" # Ensure spike_clusters has the right shape. spike_ids = _as_array(spike_ids) if len(new_spike_clusters) == 1 and len(spike_ids) > 1: new_spike_clusters = (np.ones(len(spike_ids), dtype=np.int64) * new_spike_clusters[0]) old_spike_clusters = self._spike_clusters[spike_ids] assert len(spike_ids) == len(old_spike_clusters) assert len(new_spike_clusters) == len(spike_ids) # Update the spikes per cluster structure. old_clusters = _unique(old_spike_clusters) # NOTE: shortcut to a merge if this assignment is effectively a merge # i.e. if all spikes are assigned to a single cluster. # The fact that spike selection has been previously extended to # whole clusters is critical here. new_clusters = _unique(new_spike_clusters) if len(new_clusters) == 1: return self._do_merge(spike_ids, old_clusters, new_clusters[0]) # We return the UpdateInfo structure. up = _assign_update_info(spike_ids, old_spike_clusters, new_spike_clusters) # We update the new cluster id (strictly increasing during a session). self._new_cluster_id = max(self._new_cluster_id, max(up.added) + 1) # We make the assignments. self._spike_clusters[spike_ids] = new_spike_clusters # OPTIM: we update spikes_per_cluster manually. new_spc = _spikes_per_cluster(new_spike_clusters, spike_ids) self._update_cluster_ids(to_remove=old_clusters, to_add=new_spc) return up
python
def _do_assign(self, spike_ids, new_spike_clusters): """Make spike-cluster assignments after the spike selection has been extended to full clusters.""" # Ensure spike_clusters has the right shape. spike_ids = _as_array(spike_ids) if len(new_spike_clusters) == 1 and len(spike_ids) > 1: new_spike_clusters = (np.ones(len(spike_ids), dtype=np.int64) * new_spike_clusters[0]) old_spike_clusters = self._spike_clusters[spike_ids] assert len(spike_ids) == len(old_spike_clusters) assert len(new_spike_clusters) == len(spike_ids) # Update the spikes per cluster structure. old_clusters = _unique(old_spike_clusters) # NOTE: shortcut to a merge if this assignment is effectively a merge # i.e. if all spikes are assigned to a single cluster. # The fact that spike selection has been previously extended to # whole clusters is critical here. new_clusters = _unique(new_spike_clusters) if len(new_clusters) == 1: return self._do_merge(spike_ids, old_clusters, new_clusters[0]) # We return the UpdateInfo structure. up = _assign_update_info(spike_ids, old_spike_clusters, new_spike_clusters) # We update the new cluster id (strictly increasing during a session). self._new_cluster_id = max(self._new_cluster_id, max(up.added) + 1) # We make the assignments. self._spike_clusters[spike_ids] = new_spike_clusters # OPTIM: we update spikes_per_cluster manually. new_spc = _spikes_per_cluster(new_spike_clusters, spike_ids) self._update_cluster_ids(to_remove=old_clusters, to_add=new_spc) return up
[ "def", "_do_assign", "(", "self", ",", "spike_ids", ",", "new_spike_clusters", ")", ":", "# Ensure spike_clusters has the right shape.", "spike_ids", "=", "_as_array", "(", "spike_ids", ")", "if", "len", "(", "new_spike_clusters", ")", "==", "1", "and", "len", "("...
Make spike-cluster assignments after the spike selection has been extended to full clusters.
[ "Make", "spike", "-", "cluster", "assignments", "after", "the", "spike", "selection", "has", "been", "extended", "to", "full", "clusters", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L258-L296
7,745
kwikteam/phy
phy/cluster/clustering.py
Clustering.merge
def merge(self, cluster_ids, to=None): """Merge several clusters to a new cluster. Parameters ---------- cluster_ids : array-like List of clusters to merge. to : integer or None The id of the new cluster. By default, this is `new_cluster_id()`. Returns ------- up : UpdateInfo instance """ if not _is_array_like(cluster_ids): raise ValueError("The first argument should be a list or " "an array.") cluster_ids = sorted(cluster_ids) if not set(cluster_ids) <= set(self.cluster_ids): raise ValueError("Some clusters do not exist.") # Find the new cluster number. if to is None: to = self.new_cluster_id() if to < self.new_cluster_id(): raise ValueError("The new cluster numbers should be higher than " "{0}.".format(self.new_cluster_id())) # NOTE: we could have called self.assign() here, but we don't. # We circumvent self.assign() for performance reasons. # assign() is a relatively costly operation, whereas merging is a much # cheaper operation. # Find all spikes in the specified clusters. spike_ids = _spikes_in_clusters(self.spike_clusters, cluster_ids) up = self._do_merge(spike_ids, cluster_ids, to) undo_state = self.emit('request_undo_state', up) # Add to stack. self._undo_stack.add((spike_ids, [to], undo_state)) self.emit('cluster', up) return up
python
def merge(self, cluster_ids, to=None): """Merge several clusters to a new cluster. Parameters ---------- cluster_ids : array-like List of clusters to merge. to : integer or None The id of the new cluster. By default, this is `new_cluster_id()`. Returns ------- up : UpdateInfo instance """ if not _is_array_like(cluster_ids): raise ValueError("The first argument should be a list or " "an array.") cluster_ids = sorted(cluster_ids) if not set(cluster_ids) <= set(self.cluster_ids): raise ValueError("Some clusters do not exist.") # Find the new cluster number. if to is None: to = self.new_cluster_id() if to < self.new_cluster_id(): raise ValueError("The new cluster numbers should be higher than " "{0}.".format(self.new_cluster_id())) # NOTE: we could have called self.assign() here, but we don't. # We circumvent self.assign() for performance reasons. # assign() is a relatively costly operation, whereas merging is a much # cheaper operation. # Find all spikes in the specified clusters. spike_ids = _spikes_in_clusters(self.spike_clusters, cluster_ids) up = self._do_merge(spike_ids, cluster_ids, to) undo_state = self.emit('request_undo_state', up) # Add to stack. self._undo_stack.add((spike_ids, [to], undo_state)) self.emit('cluster', up) return up
[ "def", "merge", "(", "self", ",", "cluster_ids", ",", "to", "=", "None", ")", ":", "if", "not", "_is_array_like", "(", "cluster_ids", ")", ":", "raise", "ValueError", "(", "\"The first argument should be a list or \"", "\"an array.\"", ")", "cluster_ids", "=", "...
Merge several clusters to a new cluster. Parameters ---------- cluster_ids : array-like List of clusters to merge. to : integer or None The id of the new cluster. By default, this is `new_cluster_id()`. Returns ------- up : UpdateInfo instance
[ "Merge", "several", "clusters", "to", "a", "new", "cluster", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L320-L368
7,746
kwikteam/phy
phy/cluster/clustering.py
Clustering.assign
def assign(self, spike_ids, spike_clusters_rel=0): """Make new spike cluster assignments. Parameters ---------- spike_ids : array-like List of spike ids. spike_clusters_rel : array-like Relative cluster ids of the spikes in `spike_ids`. This must have the same size as `spike_ids`. Returns ------- up : UpdateInfo instance Note ---- `spike_clusters_rel` contain *relative* cluster indices. Their values don't matter: what matters is whether two give spikes should end up in the same cluster or not. Adding a constant number to all elements in `spike_clusters_rel` results in exactly the same operation. The final cluster ids are automatically generated by the `Clustering` class. This is because we must ensure that all modified clusters get brand new ids. The whole library is based on the assumption that cluster ids are unique and "disposable". Changing a cluster always results in a new cluster id being assigned. If a spike is assigned to a new cluster, then all other spikes belonging to the same cluster are assigned to a brand new cluster, even if they were not changed explicitely by the `assign()` method. In other words, the list of spikes affected by an `assign()` is almost always a strict superset of the `spike_ids` parameter. The only case where this is not true is when whole clusters change: this is called a merge. It is implemented in a separate `merge()` method because it is logically much simpler, and faster to execute. """ assert not isinstance(spike_ids, slice) # Ensure `spike_clusters_rel` is an array-like. if not hasattr(spike_clusters_rel, '__len__'): spike_clusters_rel = spike_clusters_rel * np.ones(len(spike_ids), dtype=np.int64) spike_ids = _as_array(spike_ids) if len(spike_ids) == 0: return UpdateInfo() assert len(spike_ids) == len(spike_clusters_rel) assert spike_ids.min() >= 0 assert spike_ids.max() < self._n_spikes, "Some spikes don't exist." # Normalize the spike-cluster assignment such that # there are only new or dead clusters, not modified clusters. # This implies that spikes not explicitly selected, but that # belong to clusters affected by the operation, will be assigned # to brand new clusters. spike_ids, cluster_ids = _extend_assignment(spike_ids, self._spike_clusters, spike_clusters_rel, self.new_cluster_id(), ) up = self._do_assign(spike_ids, cluster_ids) undo_state = self.emit('request_undo_state', up) # Add the assignment to the undo stack. self._undo_stack.add((spike_ids, cluster_ids, undo_state)) self.emit('cluster', up) return up
python
def assign(self, spike_ids, spike_clusters_rel=0): """Make new spike cluster assignments. Parameters ---------- spike_ids : array-like List of spike ids. spike_clusters_rel : array-like Relative cluster ids of the spikes in `spike_ids`. This must have the same size as `spike_ids`. Returns ------- up : UpdateInfo instance Note ---- `spike_clusters_rel` contain *relative* cluster indices. Their values don't matter: what matters is whether two give spikes should end up in the same cluster or not. Adding a constant number to all elements in `spike_clusters_rel` results in exactly the same operation. The final cluster ids are automatically generated by the `Clustering` class. This is because we must ensure that all modified clusters get brand new ids. The whole library is based on the assumption that cluster ids are unique and "disposable". Changing a cluster always results in a new cluster id being assigned. If a spike is assigned to a new cluster, then all other spikes belonging to the same cluster are assigned to a brand new cluster, even if they were not changed explicitely by the `assign()` method. In other words, the list of spikes affected by an `assign()` is almost always a strict superset of the `spike_ids` parameter. The only case where this is not true is when whole clusters change: this is called a merge. It is implemented in a separate `merge()` method because it is logically much simpler, and faster to execute. """ assert not isinstance(spike_ids, slice) # Ensure `spike_clusters_rel` is an array-like. if not hasattr(spike_clusters_rel, '__len__'): spike_clusters_rel = spike_clusters_rel * np.ones(len(spike_ids), dtype=np.int64) spike_ids = _as_array(spike_ids) if len(spike_ids) == 0: return UpdateInfo() assert len(spike_ids) == len(spike_clusters_rel) assert spike_ids.min() >= 0 assert spike_ids.max() < self._n_spikes, "Some spikes don't exist." # Normalize the spike-cluster assignment such that # there are only new or dead clusters, not modified clusters. # This implies that spikes not explicitly selected, but that # belong to clusters affected by the operation, will be assigned # to brand new clusters. spike_ids, cluster_ids = _extend_assignment(spike_ids, self._spike_clusters, spike_clusters_rel, self.new_cluster_id(), ) up = self._do_assign(spike_ids, cluster_ids) undo_state = self.emit('request_undo_state', up) # Add the assignment to the undo stack. self._undo_stack.add((spike_ids, cluster_ids, undo_state)) self.emit('cluster', up) return up
[ "def", "assign", "(", "self", ",", "spike_ids", ",", "spike_clusters_rel", "=", "0", ")", ":", "assert", "not", "isinstance", "(", "spike_ids", ",", "slice", ")", "# Ensure `spike_clusters_rel` is an array-like.", "if", "not", "hasattr", "(", "spike_clusters_rel", ...
Make new spike cluster assignments. Parameters ---------- spike_ids : array-like List of spike ids. spike_clusters_rel : array-like Relative cluster ids of the spikes in `spike_ids`. This must have the same size as `spike_ids`. Returns ------- up : UpdateInfo instance Note ---- `spike_clusters_rel` contain *relative* cluster indices. Their values don't matter: what matters is whether two give spikes should end up in the same cluster or not. Adding a constant number to all elements in `spike_clusters_rel` results in exactly the same operation. The final cluster ids are automatically generated by the `Clustering` class. This is because we must ensure that all modified clusters get brand new ids. The whole library is based on the assumption that cluster ids are unique and "disposable". Changing a cluster always results in a new cluster id being assigned. If a spike is assigned to a new cluster, then all other spikes belonging to the same cluster are assigned to a brand new cluster, even if they were not changed explicitely by the `assign()` method. In other words, the list of spikes affected by an `assign()` is almost always a strict superset of the `spike_ids` parameter. The only case where this is not true is when whole clusters change: this is called a merge. It is implemented in a separate `merge()` method because it is logically much simpler, and faster to execute.
[ "Make", "new", "spike", "cluster", "assignments", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L370-L446
7,747
kwikteam/phy
phy/cluster/clustering.py
Clustering.undo
def undo(self): """Undo the last cluster assignment operation. Returns ------- up : UpdateInfo instance of the changes done by this operation. """ _, _, undo_state = self._undo_stack.back() # Retrieve the initial spike_cluster structure. spike_clusters_new = self._spike_clusters_base.copy() # Loop over the history (except the last item because we undo). for spike_ids, cluster_ids, _ in self._undo_stack: # We update the spike clusters accordingly. if spike_ids is not None: spike_clusters_new[spike_ids] = cluster_ids # What are the spikes affected by the last changes? changed = np.nonzero(self._spike_clusters != spike_clusters_new)[0] clusters_changed = spike_clusters_new[changed] up = self._do_assign(changed, clusters_changed) up.history = 'undo' # Add the undo_state object from the undone object. up.undo_state = undo_state self.emit('cluster', up) return up
python
def undo(self): """Undo the last cluster assignment operation. Returns ------- up : UpdateInfo instance of the changes done by this operation. """ _, _, undo_state = self._undo_stack.back() # Retrieve the initial spike_cluster structure. spike_clusters_new = self._spike_clusters_base.copy() # Loop over the history (except the last item because we undo). for spike_ids, cluster_ids, _ in self._undo_stack: # We update the spike clusters accordingly. if spike_ids is not None: spike_clusters_new[spike_ids] = cluster_ids # What are the spikes affected by the last changes? changed = np.nonzero(self._spike_clusters != spike_clusters_new)[0] clusters_changed = spike_clusters_new[changed] up = self._do_assign(changed, clusters_changed) up.history = 'undo' # Add the undo_state object from the undone object. up.undo_state = undo_state self.emit('cluster', up) return up
[ "def", "undo", "(", "self", ")", ":", "_", ",", "_", ",", "undo_state", "=", "self", ".", "_undo_stack", ".", "back", "(", ")", "# Retrieve the initial spike_cluster structure.", "spike_clusters_new", "=", "self", ".", "_spike_clusters_base", ".", "copy", "(", ...
Undo the last cluster assignment operation. Returns ------- up : UpdateInfo instance of the changes done by this operation.
[ "Undo", "the", "last", "cluster", "assignment", "operation", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L477-L508
7,748
kwikteam/phy
phy/cluster/clustering.py
Clustering.redo
def redo(self): """Redo the last cluster assignment operation. Returns ------- up : UpdateInfo instance of the changes done by this operation. """ # Go forward in the stack, and retrieve the new assignment. item = self._undo_stack.forward() if item is None: # No redo has been performed: abort. return # NOTE: the undo_state object is only returned when undoing. # It represents data associated to the state # *before* the action. What might be more useful would be the # undo_state object of the next item in the list (if it exists). spike_ids, cluster_ids, undo_state = item assert spike_ids is not None # We apply the new assignment. up = self._do_assign(spike_ids, cluster_ids) up.history = 'redo' self.emit('cluster', up) return up
python
def redo(self): """Redo the last cluster assignment operation. Returns ------- up : UpdateInfo instance of the changes done by this operation. """ # Go forward in the stack, and retrieve the new assignment. item = self._undo_stack.forward() if item is None: # No redo has been performed: abort. return # NOTE: the undo_state object is only returned when undoing. # It represents data associated to the state # *before* the action. What might be more useful would be the # undo_state object of the next item in the list (if it exists). spike_ids, cluster_ids, undo_state = item assert spike_ids is not None # We apply the new assignment. up = self._do_assign(spike_ids, cluster_ids) up.history = 'redo' self.emit('cluster', up) return up
[ "def", "redo", "(", "self", ")", ":", "# Go forward in the stack, and retrieve the new assignment.", "item", "=", "self", ".", "_undo_stack", ".", "forward", "(", ")", "if", "item", "is", "None", ":", "# No redo has been performed: abort.", "return", "# NOTE: the undo_s...
Redo the last cluster assignment operation. Returns ------- up : UpdateInfo instance of the changes done by this operation.
[ "Redo", "the", "last", "cluster", "assignment", "operation", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/clustering.py#L510-L537
7,749
kwikteam/phy
phy/stats/ccg.py
_increment
def _increment(arr, indices): """Increment some indices in a 1D vector of non-negative integers. Repeated indices are taken into account.""" arr = _as_array(arr) indices = _as_array(indices) bbins = np.bincount(indices) arr[:len(bbins)] += bbins return arr
python
def _increment(arr, indices): """Increment some indices in a 1D vector of non-negative integers. Repeated indices are taken into account.""" arr = _as_array(arr) indices = _as_array(indices) bbins = np.bincount(indices) arr[:len(bbins)] += bbins return arr
[ "def", "_increment", "(", "arr", ",", "indices", ")", ":", "arr", "=", "_as_array", "(", "arr", ")", "indices", "=", "_as_array", "(", "indices", ")", "bbins", "=", "np", ".", "bincount", "(", "indices", ")", "arr", "[", ":", "len", "(", "bbins", "...
Increment some indices in a 1D vector of non-negative integers. Repeated indices are taken into account.
[ "Increment", "some", "indices", "in", "a", "1D", "vector", "of", "non", "-", "negative", "integers", ".", "Repeated", "indices", "are", "taken", "into", "account", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/stats/ccg.py#L19-L26
7,750
kwikteam/phy
phy/stats/ccg.py
_symmetrize_correlograms
def _symmetrize_correlograms(correlograms): """Return the symmetrized version of the CCG arrays.""" n_clusters, _, n_bins = correlograms.shape assert n_clusters == _ # We symmetrize c[i, j, 0]. # This is necessary because the algorithm in correlograms() # is sensitive to the order of identical spikes. correlograms[..., 0] = np.maximum(correlograms[..., 0], correlograms[..., 0].T) sym = correlograms[..., 1:][..., ::-1] sym = np.transpose(sym, (1, 0, 2)) return np.dstack((sym, correlograms))
python
def _symmetrize_correlograms(correlograms): """Return the symmetrized version of the CCG arrays.""" n_clusters, _, n_bins = correlograms.shape assert n_clusters == _ # We symmetrize c[i, j, 0]. # This is necessary because the algorithm in correlograms() # is sensitive to the order of identical spikes. correlograms[..., 0] = np.maximum(correlograms[..., 0], correlograms[..., 0].T) sym = correlograms[..., 1:][..., ::-1] sym = np.transpose(sym, (1, 0, 2)) return np.dstack((sym, correlograms))
[ "def", "_symmetrize_correlograms", "(", "correlograms", ")", ":", "n_clusters", ",", "_", ",", "n_bins", "=", "correlograms", ".", "shape", "assert", "n_clusters", "==", "_", "# We symmetrize c[i, j, 0].", "# This is necessary because the algorithm in correlograms()", "# is...
Return the symmetrized version of the CCG arrays.
[ "Return", "the", "symmetrized", "version", "of", "the", "CCG", "arrays", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/stats/ccg.py#L39-L54
7,751
kwikteam/phy
phy/stats/ccg.py
correlograms
def correlograms(spike_times, spike_clusters, cluster_ids=None, sample_rate=1., bin_size=None, window_size=None, symmetrize=True, ): """Compute all pairwise cross-correlograms among the clusters appearing in `spike_clusters`. Parameters ---------- spike_times : array-like Spike times in seconds. spike_clusters : array-like Spike-cluster mapping. cluster_ids : array-like The list of unique clusters, in any order. That order will be used in the output array. bin_size : float Size of the bin, in seconds. window_size : float Size of the window, in seconds. Returns ------- correlograms : array A `(n_clusters, n_clusters, winsize_samples)` array with all pairwise CCGs. """ assert sample_rate > 0. assert np.all(np.diff(spike_times) >= 0), ("The spike times must be " "increasing.") # Get the spike samples. spike_times = np.asarray(spike_times, dtype=np.float64) spike_samples = (spike_times * sample_rate).astype(np.int64) spike_clusters = _as_array(spike_clusters) assert spike_samples.ndim == 1 assert spike_samples.shape == spike_clusters.shape # Find `binsize`. bin_size = np.clip(bin_size, 1e-5, 1e5) # in seconds binsize = int(sample_rate * bin_size) # in samples assert binsize >= 1 # Find `winsize_bins`. window_size = np.clip(window_size, 1e-5, 1e5) # in seconds winsize_bins = 2 * int(.5 * window_size / bin_size) + 1 assert winsize_bins >= 1 assert winsize_bins % 2 == 1 # Take the cluster oder into account. if cluster_ids is None: clusters = _unique(spike_clusters) else: clusters = _as_array(cluster_ids) n_clusters = len(clusters) # Like spike_clusters, but with 0..n_clusters-1 indices. spike_clusters_i = _index_of(spike_clusters, clusters) # Shift between the two copies of the spike trains. shift = 1 # At a given shift, the mask precises which spikes have matching spikes # within the correlogram time window. mask = np.ones_like(spike_samples, dtype=np.bool) correlograms = _create_correlograms_array(n_clusters, winsize_bins) # The loop continues as long as there is at least one spike with # a matching spike. while mask[:-shift].any(): # Number of time samples between spike i and spike i+shift. spike_diff = _diff_shifted(spike_samples, shift) # Binarize the delays between spike i and spike i+shift. spike_diff_b = spike_diff // binsize # Spikes with no matching spikes are masked. mask[:-shift][spike_diff_b > (winsize_bins // 2)] = False # Cache the masked spike delays. m = mask[:-shift].copy() d = spike_diff_b[m] # # Update the masks given the clusters to update. # m0 = np.in1d(spike_clusters[:-shift], clusters) # m = m & m0 # d = spike_diff_b[m] d = spike_diff_b[m] # Find the indices in the raveled correlograms array that need # to be incremented, taking into account the spike clusters. indices = np.ravel_multi_index((spike_clusters_i[:-shift][m], spike_clusters_i[+shift:][m], d), correlograms.shape) # Increment the matching spikes in the correlograms array. _increment(correlograms.ravel(), indices) shift += 1 # Remove ACG peaks. correlograms[np.arange(n_clusters), np.arange(n_clusters), 0] = 0 if symmetrize: return _symmetrize_correlograms(correlograms) else: return correlograms
python
def correlograms(spike_times, spike_clusters, cluster_ids=None, sample_rate=1., bin_size=None, window_size=None, symmetrize=True, ): """Compute all pairwise cross-correlograms among the clusters appearing in `spike_clusters`. Parameters ---------- spike_times : array-like Spike times in seconds. spike_clusters : array-like Spike-cluster mapping. cluster_ids : array-like The list of unique clusters, in any order. That order will be used in the output array. bin_size : float Size of the bin, in seconds. window_size : float Size of the window, in seconds. Returns ------- correlograms : array A `(n_clusters, n_clusters, winsize_samples)` array with all pairwise CCGs. """ assert sample_rate > 0. assert np.all(np.diff(spike_times) >= 0), ("The spike times must be " "increasing.") # Get the spike samples. spike_times = np.asarray(spike_times, dtype=np.float64) spike_samples = (spike_times * sample_rate).astype(np.int64) spike_clusters = _as_array(spike_clusters) assert spike_samples.ndim == 1 assert spike_samples.shape == spike_clusters.shape # Find `binsize`. bin_size = np.clip(bin_size, 1e-5, 1e5) # in seconds binsize = int(sample_rate * bin_size) # in samples assert binsize >= 1 # Find `winsize_bins`. window_size = np.clip(window_size, 1e-5, 1e5) # in seconds winsize_bins = 2 * int(.5 * window_size / bin_size) + 1 assert winsize_bins >= 1 assert winsize_bins % 2 == 1 # Take the cluster oder into account. if cluster_ids is None: clusters = _unique(spike_clusters) else: clusters = _as_array(cluster_ids) n_clusters = len(clusters) # Like spike_clusters, but with 0..n_clusters-1 indices. spike_clusters_i = _index_of(spike_clusters, clusters) # Shift between the two copies of the spike trains. shift = 1 # At a given shift, the mask precises which spikes have matching spikes # within the correlogram time window. mask = np.ones_like(spike_samples, dtype=np.bool) correlograms = _create_correlograms_array(n_clusters, winsize_bins) # The loop continues as long as there is at least one spike with # a matching spike. while mask[:-shift].any(): # Number of time samples between spike i and spike i+shift. spike_diff = _diff_shifted(spike_samples, shift) # Binarize the delays between spike i and spike i+shift. spike_diff_b = spike_diff // binsize # Spikes with no matching spikes are masked. mask[:-shift][spike_diff_b > (winsize_bins // 2)] = False # Cache the masked spike delays. m = mask[:-shift].copy() d = spike_diff_b[m] # # Update the masks given the clusters to update. # m0 = np.in1d(spike_clusters[:-shift], clusters) # m = m & m0 # d = spike_diff_b[m] d = spike_diff_b[m] # Find the indices in the raveled correlograms array that need # to be incremented, taking into account the spike clusters. indices = np.ravel_multi_index((spike_clusters_i[:-shift][m], spike_clusters_i[+shift:][m], d), correlograms.shape) # Increment the matching spikes in the correlograms array. _increment(correlograms.ravel(), indices) shift += 1 # Remove ACG peaks. correlograms[np.arange(n_clusters), np.arange(n_clusters), 0] = 0 if symmetrize: return _symmetrize_correlograms(correlograms) else: return correlograms
[ "def", "correlograms", "(", "spike_times", ",", "spike_clusters", ",", "cluster_ids", "=", "None", ",", "sample_rate", "=", "1.", ",", "bin_size", "=", "None", ",", "window_size", "=", "None", ",", "symmetrize", "=", "True", ",", ")", ":", "assert", "sampl...
Compute all pairwise cross-correlograms among the clusters appearing in `spike_clusters`. Parameters ---------- spike_times : array-like Spike times in seconds. spike_clusters : array-like Spike-cluster mapping. cluster_ids : array-like The list of unique clusters, in any order. That order will be used in the output array. bin_size : float Size of the bin, in seconds. window_size : float Size of the window, in seconds. Returns ------- correlograms : array A `(n_clusters, n_clusters, winsize_samples)` array with all pairwise CCGs.
[ "Compute", "all", "pairwise", "cross", "-", "correlograms", "among", "the", "clusters", "appearing", "in", "spike_clusters", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/stats/ccg.py#L57-L177
7,752
kwikteam/phy
phy/cluster/views/correlogram.py
CorrelogramView.set_bin_window
def set_bin_window(self, bin_size=None, window_size=None): """Set the bin and window sizes.""" bin_size = bin_size or self.bin_size window_size = window_size or self.window_size assert 1e-6 < bin_size < 1e3 assert 1e-6 < window_size < 1e3 assert bin_size < window_size self.bin_size = bin_size self.window_size = window_size # Set the status message. b, w = self.bin_size * 1000, self.window_size * 1000 self.set_status('Bin: {:.1f} ms. Window: {:.1f} ms.'.format(b, w))
python
def set_bin_window(self, bin_size=None, window_size=None): """Set the bin and window sizes.""" bin_size = bin_size or self.bin_size window_size = window_size or self.window_size assert 1e-6 < bin_size < 1e3 assert 1e-6 < window_size < 1e3 assert bin_size < window_size self.bin_size = bin_size self.window_size = window_size # Set the status message. b, w = self.bin_size * 1000, self.window_size * 1000 self.set_status('Bin: {:.1f} ms. Window: {:.1f} ms.'.format(b, w))
[ "def", "set_bin_window", "(", "self", ",", "bin_size", "=", "None", ",", "window_size", "=", "None", ")", ":", "bin_size", "=", "bin_size", "or", "self", ".", "bin_size", "window_size", "=", "window_size", "or", "self", ".", "window_size", "assert", "1e-6", ...
Set the bin and window sizes.
[ "Set", "the", "bin", "and", "window", "sizes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/views/correlogram.py#L56-L67
7,753
kwikteam/phy
phy/io/datasets.py
_md5
def _md5(path, blocksize=2 ** 20): """Compute the checksum of a file.""" m = hashlib.md5() with open(path, 'rb') as f: while True: buf = f.read(blocksize) if not buf: break m.update(buf) return m.hexdigest()
python
def _md5(path, blocksize=2 ** 20): """Compute the checksum of a file.""" m = hashlib.md5() with open(path, 'rb') as f: while True: buf = f.read(blocksize) if not buf: break m.update(buf) return m.hexdigest()
[ "def", "_md5", "(", "path", ",", "blocksize", "=", "2", "**", "20", ")", ":", "m", "=", "hashlib", ".", "md5", "(", ")", "with", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "while", "True", ":", "buf", "=", "f", ".", "read", "(", ...
Compute the checksum of a file.
[ "Compute", "the", "checksum", "of", "a", "file", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/datasets.py#L66-L75
7,754
kwikteam/phy
phy/io/datasets.py
download_file
def download_file(url, output_path): """Download a binary file from an URL. The checksum will be downloaded from `URL + .md5`. If this download succeeds, the file's MD5 will be compared to the expected checksum. Parameters ---------- url : str The file's URL. output_path : str The path where the file is to be saved. """ output_path = op.realpath(output_path) assert output_path is not None if op.exists(output_path): checked = _check_md5_of_url(output_path, url) if checked is False: logger.debug("The file `%s` already exists " "but is invalid: redownloading.", output_path) elif checked is True: logger.debug("The file `%s` already exists: skipping.", output_path) return output_path r = _download(url, stream=True) _save_stream(r, output_path) if _check_md5_of_url(output_path, url) is False: logger.debug("The checksum doesn't match: retrying the download.") r = _download(url, stream=True) _save_stream(r, output_path) if _check_md5_of_url(output_path, url) is False: raise RuntimeError("The checksum of the downloaded file " "doesn't match the provided checksum.") return
python
def download_file(url, output_path): """Download a binary file from an URL. The checksum will be downloaded from `URL + .md5`. If this download succeeds, the file's MD5 will be compared to the expected checksum. Parameters ---------- url : str The file's URL. output_path : str The path where the file is to be saved. """ output_path = op.realpath(output_path) assert output_path is not None if op.exists(output_path): checked = _check_md5_of_url(output_path, url) if checked is False: logger.debug("The file `%s` already exists " "but is invalid: redownloading.", output_path) elif checked is True: logger.debug("The file `%s` already exists: skipping.", output_path) return output_path r = _download(url, stream=True) _save_stream(r, output_path) if _check_md5_of_url(output_path, url) is False: logger.debug("The checksum doesn't match: retrying the download.") r = _download(url, stream=True) _save_stream(r, output_path) if _check_md5_of_url(output_path, url) is False: raise RuntimeError("The checksum of the downloaded file " "doesn't match the provided checksum.") return
[ "def", "download_file", "(", "url", ",", "output_path", ")", ":", "output_path", "=", "op", ".", "realpath", "(", "output_path", ")", "assert", "output_path", "is", "not", "None", "if", "op", ".", "exists", "(", "output_path", ")", ":", "checked", "=", "...
Download a binary file from an URL. The checksum will be downloaded from `URL + .md5`. If this download succeeds, the file's MD5 will be compared to the expected checksum. Parameters ---------- url : str The file's URL. output_path : str The path where the file is to be saved.
[ "Download", "a", "binary", "file", "from", "an", "URL", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/datasets.py#L103-L138
7,755
kwikteam/phy
phy/plot/plot.py
_make_class
def _make_class(cls, **kwargs): """Return a custom Visual class with given parameters.""" kwargs = {k: (v if v is not None else getattr(cls, k, None)) for k, v in kwargs.items()} # The class name contains a hash of the custom parameters. name = cls.__name__ + '_' + _hash(kwargs) if name not in _CLASSES: logger.log(5, "Create class %s %s.", name, kwargs) cls = type(name, (cls,), kwargs) _CLASSES[name] = cls return _CLASSES[name]
python
def _make_class(cls, **kwargs): """Return a custom Visual class with given parameters.""" kwargs = {k: (v if v is not None else getattr(cls, k, None)) for k, v in kwargs.items()} # The class name contains a hash of the custom parameters. name = cls.__name__ + '_' + _hash(kwargs) if name not in _CLASSES: logger.log(5, "Create class %s %s.", name, kwargs) cls = type(name, (cls,), kwargs) _CLASSES[name] = cls return _CLASSES[name]
[ "def", "_make_class", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "kwargs", "=", "{", "k", ":", "(", "v", "if", "v", "is", "not", "None", "else", "getattr", "(", "cls", ",", "k", ",", "None", ")", ")", "for", "k", ",", "v", "in", "kwargs",...
Return a custom Visual class with given parameters.
[ "Return", "a", "custom", "Visual", "class", "with", "given", "parameters", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/plot.py#L46-L56
7,756
kwikteam/phy
phy/plot/plot.py
View._add_item
def _add_item(self, cls, *args, **kwargs): """Add a plot item.""" box_index = kwargs.pop('box_index', self._default_box_index) data = cls.validate(*args, **kwargs) n = cls.vertex_count(**data) if not isinstance(box_index, np.ndarray): k = len(self._default_box_index) box_index = _get_array(box_index, (n, k)) data['box_index'] = box_index if cls not in self._items: self._items[cls] = [] self._items[cls].append(data) return data
python
def _add_item(self, cls, *args, **kwargs): """Add a plot item.""" box_index = kwargs.pop('box_index', self._default_box_index) data = cls.validate(*args, **kwargs) n = cls.vertex_count(**data) if not isinstance(box_index, np.ndarray): k = len(self._default_box_index) box_index = _get_array(box_index, (n, k)) data['box_index'] = box_index if cls not in self._items: self._items[cls] = [] self._items[cls].append(data) return data
[ "def", "_add_item", "(", "self", ",", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "box_index", "=", "kwargs", ".", "pop", "(", "'box_index'", ",", "self", ".", "_default_box_index", ")", "data", "=", "cls", ".", "validate", "(", "*", ...
Add a plot item.
[ "Add", "a", "plot", "item", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/plot.py#L118-L133
7,757
kwikteam/phy
phy/plot/plot.py
View.scatter
def scatter(self, *args, **kwargs): """Add a scatter plot.""" cls = _make_class(ScatterVisual, _default_marker=kwargs.pop('marker', None), ) return self._add_item(cls, *args, **kwargs)
python
def scatter(self, *args, **kwargs): """Add a scatter plot.""" cls = _make_class(ScatterVisual, _default_marker=kwargs.pop('marker', None), ) return self._add_item(cls, *args, **kwargs)
[ "def", "scatter", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "cls", "=", "_make_class", "(", "ScatterVisual", ",", "_default_marker", "=", "kwargs", ".", "pop", "(", "'marker'", ",", "None", ")", ",", ")", "return", "self", "."...
Add a scatter plot.
[ "Add", "a", "scatter", "plot", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/plot.py#L153-L158
7,758
kwikteam/phy
phy/plot/plot.py
View.build
def build(self): """Build all added items. Visuals are created, added, and built. The `set_data()` methods can be called afterwards. """ for cls, data_list in self._items.items(): # Some variables are not concatenated. They are specified # in `allow_list`. data = _accumulate(data_list, cls.allow_list) box_index = data.pop('box_index') visual = cls() self.add_visual(visual) visual.set_data(**data) # NOTE: visual.program.__contains__ is implemented in vispy master # so we can replace this with `if 'a_box_index' in visual.program` # after the next VisPy release. if 'a_box_index' in visual.program._code_variables: visual.program['a_box_index'] = box_index.astype(np.float32) # TODO: refactor this when there is the possibility to update existing # visuals without recreating the whole scene. if self.lasso: self.lasso.create_visual() self.update()
python
def build(self): """Build all added items. Visuals are created, added, and built. The `set_data()` methods can be called afterwards. """ for cls, data_list in self._items.items(): # Some variables are not concatenated. They are specified # in `allow_list`. data = _accumulate(data_list, cls.allow_list) box_index = data.pop('box_index') visual = cls() self.add_visual(visual) visual.set_data(**data) # NOTE: visual.program.__contains__ is implemented in vispy master # so we can replace this with `if 'a_box_index' in visual.program` # after the next VisPy release. if 'a_box_index' in visual.program._code_variables: visual.program['a_box_index'] = box_index.astype(np.float32) # TODO: refactor this when there is the possibility to update existing # visuals without recreating the whole scene. if self.lasso: self.lasso.create_visual() self.update()
[ "def", "build", "(", "self", ")", ":", "for", "cls", ",", "data_list", "in", "self", ".", "_items", ".", "items", "(", ")", ":", "# Some variables are not concatenated. They are specified", "# in `allow_list`.", "data", "=", "_accumulate", "(", "data_list", ",", ...
Build all added items. Visuals are created, added, and built. The `set_data()` methods can be called afterwards.
[ "Build", "all", "added", "items", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/plot.py#L176-L200
7,759
kwikteam/phy
phy/io/array.py
_range_from_slice
def _range_from_slice(myslice, start=None, stop=None, step=None, length=None): """Convert a slice to an array of integers.""" assert isinstance(myslice, slice) # Find 'step'. step = myslice.step if myslice.step is not None else step if step is None: step = 1 # Find 'start'. start = myslice.start if myslice.start is not None else start if start is None: start = 0 # Find 'stop' as a function of length if 'stop' is unspecified. stop = myslice.stop if myslice.stop is not None else stop if length is not None: stop_inferred = floor(start + step * length) if stop is not None and stop < stop_inferred: raise ValueError("'stop' ({stop}) and ".format(stop=stop) + "'length' ({length}) ".format(length=length) + "are not compatible.") stop = stop_inferred if stop is None and length is None: raise ValueError("'stop' and 'length' cannot be both unspecified.") myrange = np.arange(start, stop, step) # Check the length if it was specified. if length is not None: assert len(myrange) == length return myrange
python
def _range_from_slice(myslice, start=None, stop=None, step=None, length=None): """Convert a slice to an array of integers.""" assert isinstance(myslice, slice) # Find 'step'. step = myslice.step if myslice.step is not None else step if step is None: step = 1 # Find 'start'. start = myslice.start if myslice.start is not None else start if start is None: start = 0 # Find 'stop' as a function of length if 'stop' is unspecified. stop = myslice.stop if myslice.stop is not None else stop if length is not None: stop_inferred = floor(start + step * length) if stop is not None and stop < stop_inferred: raise ValueError("'stop' ({stop}) and ".format(stop=stop) + "'length' ({length}) ".format(length=length) + "are not compatible.") stop = stop_inferred if stop is None and length is None: raise ValueError("'stop' and 'length' cannot be both unspecified.") myrange = np.arange(start, stop, step) # Check the length if it was specified. if length is not None: assert len(myrange) == length return myrange
[ "def", "_range_from_slice", "(", "myslice", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "step", "=", "None", ",", "length", "=", "None", ")", ":", "assert", "isinstance", "(", "myslice", ",", "slice", ")", "# Find 'step'.", "step", "=", "...
Convert a slice to an array of integers.
[ "Convert", "a", "slice", "to", "an", "array", "of", "integers", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L28-L54
7,760
kwikteam/phy
phy/io/array.py
_index_of
def _index_of(arr, lookup): """Replace scalars in an array by their indices in a lookup table. Implicitely assume that: * All elements of arr and lookup are non-negative integers. * All elements or arr belong to lookup. This is not checked for performance reasons. """ # Equivalent of np.digitize(arr, lookup) - 1, but much faster. # TODO: assertions to disable in production for performance reasons. # TODO: np.searchsorted(lookup, arr) is faster on small arrays with large # values lookup = np.asarray(lookup, dtype=np.int32) m = (lookup.max() if len(lookup) else 0) + 1 tmp = np.zeros(m + 1, dtype=np.int) # Ensure that -1 values are kept. tmp[-1] = -1 if len(lookup): tmp[lookup] = np.arange(len(lookup)) return tmp[arr]
python
def _index_of(arr, lookup): """Replace scalars in an array by their indices in a lookup table. Implicitely assume that: * All elements of arr and lookup are non-negative integers. * All elements or arr belong to lookup. This is not checked for performance reasons. """ # Equivalent of np.digitize(arr, lookup) - 1, but much faster. # TODO: assertions to disable in production for performance reasons. # TODO: np.searchsorted(lookup, arr) is faster on small arrays with large # values lookup = np.asarray(lookup, dtype=np.int32) m = (lookup.max() if len(lookup) else 0) + 1 tmp = np.zeros(m + 1, dtype=np.int) # Ensure that -1 values are kept. tmp[-1] = -1 if len(lookup): tmp[lookup] = np.arange(len(lookup)) return tmp[arr]
[ "def", "_index_of", "(", "arr", ",", "lookup", ")", ":", "# Equivalent of np.digitize(arr, lookup) - 1, but much faster.", "# TODO: assertions to disable in production for performance reasons.", "# TODO: np.searchsorted(lookup, arr) is faster on small arrays with large", "# values", "lookup"...
Replace scalars in an array by their indices in a lookup table. Implicitely assume that: * All elements of arr and lookup are non-negative integers. * All elements or arr belong to lookup. This is not checked for performance reasons.
[ "Replace", "scalars", "in", "an", "array", "by", "their", "indices", "in", "a", "lookup", "table", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L99-L121
7,761
kwikteam/phy
phy/io/array.py
_pad
def _pad(arr, n, dir='right'): """Pad an array with zeros along the first axis. Parameters ---------- n : int Size of the returned array in the first axis. dir : str Direction of the padding. Must be one 'left' or 'right'. """ assert dir in ('left', 'right') if n < 0: raise ValueError("'n' must be positive: {0}.".format(n)) elif n == 0: return np.zeros((0,) + arr.shape[1:], dtype=arr.dtype) n_arr = arr.shape[0] shape = (n,) + arr.shape[1:] if n_arr == n: assert arr.shape == shape return arr elif n_arr < n: out = np.zeros(shape, dtype=arr.dtype) if dir == 'left': out[-n_arr:, ...] = arr elif dir == 'right': out[:n_arr, ...] = arr assert out.shape == shape return out else: if dir == 'left': out = arr[-n:, ...] elif dir == 'right': out = arr[:n, ...] assert out.shape == shape return out
python
def _pad(arr, n, dir='right'): """Pad an array with zeros along the first axis. Parameters ---------- n : int Size of the returned array in the first axis. dir : str Direction of the padding. Must be one 'left' or 'right'. """ assert dir in ('left', 'right') if n < 0: raise ValueError("'n' must be positive: {0}.".format(n)) elif n == 0: return np.zeros((0,) + arr.shape[1:], dtype=arr.dtype) n_arr = arr.shape[0] shape = (n,) + arr.shape[1:] if n_arr == n: assert arr.shape == shape return arr elif n_arr < n: out = np.zeros(shape, dtype=arr.dtype) if dir == 'left': out[-n_arr:, ...] = arr elif dir == 'right': out[:n_arr, ...] = arr assert out.shape == shape return out else: if dir == 'left': out = arr[-n:, ...] elif dir == 'right': out = arr[:n, ...] assert out.shape == shape return out
[ "def", "_pad", "(", "arr", ",", "n", ",", "dir", "=", "'right'", ")", ":", "assert", "dir", "in", "(", "'left'", ",", "'right'", ")", "if", "n", "<", "0", ":", "raise", "ValueError", "(", "\"'n' must be positive: {0}.\"", ".", "format", "(", "n", ")"...
Pad an array with zeros along the first axis. Parameters ---------- n : int Size of the returned array in the first axis. dir : str Direction of the padding. Must be one 'left' or 'right'.
[ "Pad", "an", "array", "with", "zeros", "along", "the", "first", "axis", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L124-L160
7,762
kwikteam/phy
phy/io/array.py
_in_polygon
def _in_polygon(points, polygon): """Return the points that are inside a polygon.""" from matplotlib.path import Path points = _as_array(points) polygon = _as_array(polygon) assert points.ndim == 2 assert polygon.ndim == 2 if len(polygon): polygon = np.vstack((polygon, polygon[0])) path = Path(polygon, closed=True) return path.contains_points(points)
python
def _in_polygon(points, polygon): """Return the points that are inside a polygon.""" from matplotlib.path import Path points = _as_array(points) polygon = _as_array(polygon) assert points.ndim == 2 assert polygon.ndim == 2 if len(polygon): polygon = np.vstack((polygon, polygon[0])) path = Path(polygon, closed=True) return path.contains_points(points)
[ "def", "_in_polygon", "(", "points", ",", "polygon", ")", ":", "from", "matplotlib", ".", "path", "import", "Path", "points", "=", "_as_array", "(", "points", ")", "polygon", "=", "_as_array", "(", "polygon", ")", "assert", "points", ".", "ndim", "==", "...
Return the points that are inside a polygon.
[ "Return", "the", "points", "that", "are", "inside", "a", "polygon", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L183-L193
7,763
kwikteam/phy
phy/io/array.py
read_array
def read_array(path, mmap_mode=None): """Read a .npy array.""" file_ext = op.splitext(path)[1] if file_ext == '.npy': return np.load(path, mmap_mode=mmap_mode) raise NotImplementedError("The file extension `{}` ".format(file_ext) + "is not currently supported.")
python
def read_array(path, mmap_mode=None): """Read a .npy array.""" file_ext = op.splitext(path)[1] if file_ext == '.npy': return np.load(path, mmap_mode=mmap_mode) raise NotImplementedError("The file extension `{}` ".format(file_ext) + "is not currently supported.")
[ "def", "read_array", "(", "path", ",", "mmap_mode", "=", "None", ")", ":", "file_ext", "=", "op", ".", "splitext", "(", "path", ")", "[", "1", "]", "if", "file_ext", "==", "'.npy'", ":", "return", "np", ".", "load", "(", "path", ",", "mmap_mode", "...
Read a .npy array.
[ "Read", "a", ".", "npy", "array", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L219-L225
7,764
kwikteam/phy
phy/io/array.py
write_array
def write_array(path, arr): """Write an array to a .npy file.""" file_ext = op.splitext(path)[1] if file_ext == '.npy': return np.save(path, arr) raise NotImplementedError("The file extension `{}` ".format(file_ext) + "is not currently supported.")
python
def write_array(path, arr): """Write an array to a .npy file.""" file_ext = op.splitext(path)[1] if file_ext == '.npy': return np.save(path, arr) raise NotImplementedError("The file extension `{}` ".format(file_ext) + "is not currently supported.")
[ "def", "write_array", "(", "path", ",", "arr", ")", ":", "file_ext", "=", "op", ".", "splitext", "(", "path", ")", "[", "1", "]", "if", "file_ext", "==", "'.npy'", ":", "return", "np", ".", "save", "(", "path", ",", "arr", ")", "raise", "NotImpleme...
Write an array to a .npy file.
[ "Write", "an", "array", "to", "a", ".", "npy", "file", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L228-L234
7,765
kwikteam/phy
phy/io/array.py
_concatenate_virtual_arrays
def _concatenate_virtual_arrays(arrs, cols=None, scaling=None): """Return a virtual concatenate of several NumPy arrays.""" return None if not len(arrs) else ConcatenatedArrays(arrs, cols, scaling=scaling)
python
def _concatenate_virtual_arrays(arrs, cols=None, scaling=None): """Return a virtual concatenate of several NumPy arrays.""" return None if not len(arrs) else ConcatenatedArrays(arrs, cols, scaling=scaling)
[ "def", "_concatenate_virtual_arrays", "(", "arrs", ",", "cols", "=", "None", ",", "scaling", "=", "None", ")", ":", "return", "None", "if", "not", "len", "(", "arrs", ")", "else", "ConcatenatedArrays", "(", "arrs", ",", "cols", ",", "scaling", "=", "scal...
Return a virtual concatenate of several NumPy arrays.
[ "Return", "a", "virtual", "concatenate", "of", "several", "NumPy", "arrays", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L359-L362
7,766
kwikteam/phy
phy/io/array.py
_excerpt_step
def _excerpt_step(n_samples, n_excerpts=None, excerpt_size=None): """Compute the step of an excerpt set as a function of the number of excerpts or their sizes.""" assert n_excerpts >= 2 step = max((n_samples - excerpt_size) // (n_excerpts - 1), excerpt_size) return step
python
def _excerpt_step(n_samples, n_excerpts=None, excerpt_size=None): """Compute the step of an excerpt set as a function of the number of excerpts or their sizes.""" assert n_excerpts >= 2 step = max((n_samples - excerpt_size) // (n_excerpts - 1), excerpt_size) return step
[ "def", "_excerpt_step", "(", "n_samples", ",", "n_excerpts", "=", "None", ",", "excerpt_size", "=", "None", ")", ":", "assert", "n_excerpts", ">=", "2", "step", "=", "max", "(", "(", "n_samples", "-", "excerpt_size", ")", "//", "(", "n_excerpts", "-", "1...
Compute the step of an excerpt set as a function of the number of excerpts or their sizes.
[ "Compute", "the", "step", "of", "an", "excerpt", "set", "as", "a", "function", "of", "the", "number", "of", "excerpts", "or", "their", "sizes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L369-L375
7,767
kwikteam/phy
phy/io/array.py
chunk_bounds
def chunk_bounds(n_samples, chunk_size, overlap=0): """Return chunk bounds. Chunks have the form: [ overlap/2 | chunk_size-overlap | overlap/2 ] s_start keep_start keep_end s_end Except for the first and last chunks which do not have a left/right overlap. This generator yields (s_start, s_end, keep_start, keep_end). """ s_start = 0 s_end = chunk_size keep_start = s_start keep_end = s_end - overlap // 2 yield s_start, s_end, keep_start, keep_end while s_end - overlap + chunk_size < n_samples: s_start = s_end - overlap s_end = s_start + chunk_size keep_start = keep_end keep_end = s_end - overlap // 2 if s_start < s_end: yield s_start, s_end, keep_start, keep_end s_start = s_end - overlap s_end = n_samples keep_start = keep_end keep_end = s_end if s_start < s_end: yield s_start, s_end, keep_start, keep_end
python
def chunk_bounds(n_samples, chunk_size, overlap=0): """Return chunk bounds. Chunks have the form: [ overlap/2 | chunk_size-overlap | overlap/2 ] s_start keep_start keep_end s_end Except for the first and last chunks which do not have a left/right overlap. This generator yields (s_start, s_end, keep_start, keep_end). """ s_start = 0 s_end = chunk_size keep_start = s_start keep_end = s_end - overlap // 2 yield s_start, s_end, keep_start, keep_end while s_end - overlap + chunk_size < n_samples: s_start = s_end - overlap s_end = s_start + chunk_size keep_start = keep_end keep_end = s_end - overlap // 2 if s_start < s_end: yield s_start, s_end, keep_start, keep_end s_start = s_end - overlap s_end = n_samples keep_start = keep_end keep_end = s_end if s_start < s_end: yield s_start, s_end, keep_start, keep_end
[ "def", "chunk_bounds", "(", "n_samples", ",", "chunk_size", ",", "overlap", "=", "0", ")", ":", "s_start", "=", "0", "s_end", "=", "chunk_size", "keep_start", "=", "s_start", "keep_end", "=", "s_end", "-", "overlap", "//", "2", "yield", "s_start", ",", "...
Return chunk bounds. Chunks have the form: [ overlap/2 | chunk_size-overlap | overlap/2 ] s_start keep_start keep_end s_end Except for the first and last chunks which do not have a left/right overlap. This generator yields (s_start, s_end, keep_start, keep_end).
[ "Return", "chunk", "bounds", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L378-L411
7,768
kwikteam/phy
phy/io/array.py
data_chunk
def data_chunk(data, chunk, with_overlap=False): """Get a data chunk.""" assert isinstance(chunk, tuple) if len(chunk) == 2: i, j = chunk elif len(chunk) == 4: if with_overlap: i, j = chunk[:2] else: i, j = chunk[2:] else: raise ValueError("'chunk' should have 2 or 4 elements, " "not {0:d}".format(len(chunk))) return data[i:j, ...]
python
def data_chunk(data, chunk, with_overlap=False): """Get a data chunk.""" assert isinstance(chunk, tuple) if len(chunk) == 2: i, j = chunk elif len(chunk) == 4: if with_overlap: i, j = chunk[:2] else: i, j = chunk[2:] else: raise ValueError("'chunk' should have 2 or 4 elements, " "not {0:d}".format(len(chunk))) return data[i:j, ...]
[ "def", "data_chunk", "(", "data", ",", "chunk", ",", "with_overlap", "=", "False", ")", ":", "assert", "isinstance", "(", "chunk", ",", "tuple", ")", "if", "len", "(", "chunk", ")", "==", "2", ":", "i", ",", "j", "=", "chunk", "elif", "len", "(", ...
Get a data chunk.
[ "Get", "a", "data", "chunk", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L428-L441
7,769
kwikteam/phy
phy/io/array.py
_spikes_in_clusters
def _spikes_in_clusters(spike_clusters, clusters): """Return the ids of all spikes belonging to the specified clusters.""" if len(spike_clusters) == 0 or len(clusters) == 0: return np.array([], dtype=np.int) return np.nonzero(np.in1d(spike_clusters, clusters))[0]
python
def _spikes_in_clusters(spike_clusters, clusters): """Return the ids of all spikes belonging to the specified clusters.""" if len(spike_clusters) == 0 or len(clusters) == 0: return np.array([], dtype=np.int) return np.nonzero(np.in1d(spike_clusters, clusters))[0]
[ "def", "_spikes_in_clusters", "(", "spike_clusters", ",", "clusters", ")", ":", "if", "len", "(", "spike_clusters", ")", "==", "0", "or", "len", "(", "clusters", ")", "==", "0", ":", "return", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np...
Return the ids of all spikes belonging to the specified clusters.
[ "Return", "the", "ids", "of", "all", "spikes", "belonging", "to", "the", "specified", "clusters", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L465-L469
7,770
kwikteam/phy
phy/io/array.py
grouped_mean
def grouped_mean(arr, spike_clusters): """Compute the mean of a spike-dependent quantity for every cluster. The two arguments should be 1D array with `n_spikes` elements. The output is a 1D array with `n_clusters` elements. The clusters are sorted in increasing order. """ arr = np.asarray(arr) spike_clusters = np.asarray(spike_clusters) assert arr.ndim == 1 assert arr.shape[0] == len(spike_clusters) cluster_ids = _unique(spike_clusters) spike_clusters_rel = _index_of(spike_clusters, cluster_ids) spike_counts = np.bincount(spike_clusters_rel) assert len(spike_counts) == len(cluster_ids) t = np.zeros(len(cluster_ids)) # Compute the sum with possible repetitions. np.add.at(t, spike_clusters_rel, arr) return t / spike_counts
python
def grouped_mean(arr, spike_clusters): """Compute the mean of a spike-dependent quantity for every cluster. The two arguments should be 1D array with `n_spikes` elements. The output is a 1D array with `n_clusters` elements. The clusters are sorted in increasing order. """ arr = np.asarray(arr) spike_clusters = np.asarray(spike_clusters) assert arr.ndim == 1 assert arr.shape[0] == len(spike_clusters) cluster_ids = _unique(spike_clusters) spike_clusters_rel = _index_of(spike_clusters, cluster_ids) spike_counts = np.bincount(spike_clusters_rel) assert len(spike_counts) == len(cluster_ids) t = np.zeros(len(cluster_ids)) # Compute the sum with possible repetitions. np.add.at(t, spike_clusters_rel, arr) return t / spike_counts
[ "def", "grouped_mean", "(", "arr", ",", "spike_clusters", ")", ":", "arr", "=", "np", ".", "asarray", "(", "arr", ")", "spike_clusters", "=", "np", ".", "asarray", "(", "spike_clusters", ")", "assert", "arr", ".", "ndim", "==", "1", "assert", "arr", "....
Compute the mean of a spike-dependent quantity for every cluster. The two arguments should be 1D array with `n_spikes` elements. The output is a 1D array with `n_clusters` elements. The clusters are sorted in increasing order.
[ "Compute", "the", "mean", "of", "a", "spike", "-", "dependent", "quantity", "for", "every", "cluster", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L506-L526
7,771
kwikteam/phy
phy/io/array.py
regular_subset
def regular_subset(spikes, n_spikes_max=None, offset=0): """Prune the current selection to get at most n_spikes_max spikes.""" assert spikes is not None # Nothing to do if the selection already satisfies n_spikes_max. if n_spikes_max is None or len(spikes) <= n_spikes_max: # pragma: no cover return spikes step = math.ceil(np.clip(1. / n_spikes_max * len(spikes), 1, len(spikes))) step = int(step) # Note: randomly-changing selections are confusing... my_spikes = spikes[offset::step][:n_spikes_max] assert len(my_spikes) <= len(spikes) assert len(my_spikes) <= n_spikes_max return my_spikes
python
def regular_subset(spikes, n_spikes_max=None, offset=0): """Prune the current selection to get at most n_spikes_max spikes.""" assert spikes is not None # Nothing to do if the selection already satisfies n_spikes_max. if n_spikes_max is None or len(spikes) <= n_spikes_max: # pragma: no cover return spikes step = math.ceil(np.clip(1. / n_spikes_max * len(spikes), 1, len(spikes))) step = int(step) # Note: randomly-changing selections are confusing... my_spikes = spikes[offset::step][:n_spikes_max] assert len(my_spikes) <= len(spikes) assert len(my_spikes) <= n_spikes_max return my_spikes
[ "def", "regular_subset", "(", "spikes", ",", "n_spikes_max", "=", "None", ",", "offset", "=", "0", ")", ":", "assert", "spikes", "is", "not", "None", "# Nothing to do if the selection already satisfies n_spikes_max.", "if", "n_spikes_max", "is", "None", "or", "len",...
Prune the current selection to get at most n_spikes_max spikes.
[ "Prune", "the", "current", "selection", "to", "get", "at", "most", "n_spikes_max", "spikes", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L529-L542
7,772
kwikteam/phy
phy/io/array.py
select_spikes
def select_spikes(cluster_ids=None, max_n_spikes_per_cluster=None, spikes_per_cluster=None, batch_size=None, subset=None, ): """Return a selection of spikes belonging to the specified clusters.""" subset = subset or 'regular' assert _is_array_like(cluster_ids) if not len(cluster_ids): return np.array([], dtype=np.int64) if max_n_spikes_per_cluster in (None, 0): selection = {c: spikes_per_cluster(c) for c in cluster_ids} else: assert max_n_spikes_per_cluster > 0 selection = {} n_clusters = len(cluster_ids) for cluster in cluster_ids: # Decrease the number of spikes per cluster when there # are more clusters. n = int(max_n_spikes_per_cluster * exp(-.1 * (n_clusters - 1))) n = max(1, n) spike_ids = spikes_per_cluster(cluster) if subset == 'regular': # Regular subselection. if batch_size is None or len(spike_ids) <= max(batch_size, n): spike_ids = regular_subset(spike_ids, n_spikes_max=n) else: # Batch selections of spikes. spike_ids = get_excerpts(spike_ids, n // batch_size, batch_size) elif subset == 'random' and len(spike_ids) > n: # Random subselection. spike_ids = np.random.choice(spike_ids, n, replace=False) spike_ids = np.unique(spike_ids) selection[cluster] = spike_ids return _flatten_per_cluster(selection)
python
def select_spikes(cluster_ids=None, max_n_spikes_per_cluster=None, spikes_per_cluster=None, batch_size=None, subset=None, ): """Return a selection of spikes belonging to the specified clusters.""" subset = subset or 'regular' assert _is_array_like(cluster_ids) if not len(cluster_ids): return np.array([], dtype=np.int64) if max_n_spikes_per_cluster in (None, 0): selection = {c: spikes_per_cluster(c) for c in cluster_ids} else: assert max_n_spikes_per_cluster > 0 selection = {} n_clusters = len(cluster_ids) for cluster in cluster_ids: # Decrease the number of spikes per cluster when there # are more clusters. n = int(max_n_spikes_per_cluster * exp(-.1 * (n_clusters - 1))) n = max(1, n) spike_ids = spikes_per_cluster(cluster) if subset == 'regular': # Regular subselection. if batch_size is None or len(spike_ids) <= max(batch_size, n): spike_ids = regular_subset(spike_ids, n_spikes_max=n) else: # Batch selections of spikes. spike_ids = get_excerpts(spike_ids, n // batch_size, batch_size) elif subset == 'random' and len(spike_ids) > n: # Random subselection. spike_ids = np.random.choice(spike_ids, n, replace=False) spike_ids = np.unique(spike_ids) selection[cluster] = spike_ids return _flatten_per_cluster(selection)
[ "def", "select_spikes", "(", "cluster_ids", "=", "None", ",", "max_n_spikes_per_cluster", "=", "None", ",", "spikes_per_cluster", "=", "None", ",", "batch_size", "=", "None", ",", "subset", "=", "None", ",", ")", ":", "subset", "=", "subset", "or", "'regular...
Return a selection of spikes belonging to the specified clusters.
[ "Return", "a", "selection", "of", "spikes", "belonging", "to", "the", "specified", "clusters", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L545-L582
7,773
kwikteam/phy
phy/io/array.py
ConcatenatedArrays._get_recording
def _get_recording(self, index): """Return the recording that contains a given index.""" assert index >= 0 recs = np.nonzero((index - self.offsets[:-1]) >= 0)[0] if len(recs) == 0: # pragma: no cover # If the index is greater than the total size, # return the last recording. return len(self.arrs) - 1 # Return the last recording such that the index is greater than # its offset. return recs[-1]
python
def _get_recording(self, index): """Return the recording that contains a given index.""" assert index >= 0 recs = np.nonzero((index - self.offsets[:-1]) >= 0)[0] if len(recs) == 0: # pragma: no cover # If the index is greater than the total size, # return the last recording. return len(self.arrs) - 1 # Return the last recording such that the index is greater than # its offset. return recs[-1]
[ "def", "_get_recording", "(", "self", ",", "index", ")", ":", "assert", "index", ">=", "0", "recs", "=", "np", ".", "nonzero", "(", "(", "index", "-", "self", ".", "offsets", "[", ":", "-", "1", "]", ")", ">=", "0", ")", "[", "0", "]", "if", ...
Return the recording that contains a given index.
[ "Return", "the", "recording", "that", "contains", "a", "given", "index", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/io/array.py#L297-L307
7,774
kwikteam/phy
phy/traces/filter.py
bandpass_filter
def bandpass_filter(rate=None, low=None, high=None, order=None): """Butterworth bandpass filter.""" assert low < high assert order >= 1 return signal.butter(order, (low / (rate / 2.), high / (rate / 2.)), 'pass')
python
def bandpass_filter(rate=None, low=None, high=None, order=None): """Butterworth bandpass filter.""" assert low < high assert order >= 1 return signal.butter(order, (low / (rate / 2.), high / (rate / 2.)), 'pass')
[ "def", "bandpass_filter", "(", "rate", "=", "None", ",", "low", "=", "None", ",", "high", "=", "None", ",", "order", "=", "None", ")", ":", "assert", "low", "<", "high", "assert", "order", ">=", "1", "return", "signal", ".", "butter", "(", "order", ...
Butterworth bandpass filter.
[ "Butterworth", "bandpass", "filter", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/filter.py#L19-L25
7,775
kwikteam/phy
phy/traces/filter.py
apply_filter
def apply_filter(x, filter=None, axis=0): """Apply a filter to an array.""" x = _as_array(x) if x.shape[axis] == 0: return x b, a = filter return signal.filtfilt(b, a, x, axis=axis)
python
def apply_filter(x, filter=None, axis=0): """Apply a filter to an array.""" x = _as_array(x) if x.shape[axis] == 0: return x b, a = filter return signal.filtfilt(b, a, x, axis=axis)
[ "def", "apply_filter", "(", "x", ",", "filter", "=", "None", ",", "axis", "=", "0", ")", ":", "x", "=", "_as_array", "(", "x", ")", "if", "x", ".", "shape", "[", "axis", "]", "==", "0", ":", "return", "x", "b", ",", "a", "=", "filter", "retur...
Apply a filter to an array.
[ "Apply", "a", "filter", "to", "an", "array", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/filter.py#L28-L34
7,776
kwikteam/phy
phy/traces/filter.py
Whitening.fit
def fit(self, x, fudge=1e-18): """Compute the whitening matrix. Parameters ---------- x : array An `(n_samples, n_channels)` array. """ assert x.ndim == 2 ns, nc = x.shape x_cov = np.cov(x, rowvar=0) assert x_cov.shape == (nc, nc) d, v = np.linalg.eigh(x_cov) d = np.diag(1. / np.sqrt(d + fudge)) # This is equivalent, but seems much slower... # w = np.einsum('il,lk,jk->ij', v, d, v) w = np.dot(np.dot(v, d), v.T) self._matrix = w return w
python
def fit(self, x, fudge=1e-18): """Compute the whitening matrix. Parameters ---------- x : array An `(n_samples, n_channels)` array. """ assert x.ndim == 2 ns, nc = x.shape x_cov = np.cov(x, rowvar=0) assert x_cov.shape == (nc, nc) d, v = np.linalg.eigh(x_cov) d = np.diag(1. / np.sqrt(d + fudge)) # This is equivalent, but seems much slower... # w = np.einsum('il,lk,jk->ij', v, d, v) w = np.dot(np.dot(v, d), v.T) self._matrix = w return w
[ "def", "fit", "(", "self", ",", "x", ",", "fudge", "=", "1e-18", ")", ":", "assert", "x", ".", "ndim", "==", "2", "ns", ",", "nc", "=", "x", ".", "shape", "x_cov", "=", "np", ".", "cov", "(", "x", ",", "rowvar", "=", "0", ")", "assert", "x_...
Compute the whitening matrix. Parameters ---------- x : array An `(n_samples, n_channels)` array.
[ "Compute", "the", "whitening", "matrix", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/traces/filter.py#L72-L92
7,777
kwikteam/phy
phy/cluster/_history.py
History.current_item
def current_item(self): """Return the current element.""" if self._history and self._index >= 0: self._check_index() return self._history[self._index]
python
def current_item(self): """Return the current element.""" if self._history and self._index >= 0: self._check_index() return self._history[self._index]
[ "def", "current_item", "(", "self", ")", ":", "if", "self", ".", "_history", "and", "self", ".", "_index", ">=", "0", ":", "self", ".", "_check_index", "(", ")", "return", "self", ".", "_history", "[", "self", ".", "_index", "]" ]
Return the current element.
[ "Return", "the", "current", "element", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L28-L32
7,778
kwikteam/phy
phy/cluster/_history.py
History._check_index
def _check_index(self): """Check that the index is without the bounds of _history.""" assert 0 <= self._index <= len(self._history) - 1 # There should always be the base item at least. assert len(self._history) >= 1
python
def _check_index(self): """Check that the index is without the bounds of _history.""" assert 0 <= self._index <= len(self._history) - 1 # There should always be the base item at least. assert len(self._history) >= 1
[ "def", "_check_index", "(", "self", ")", ":", "assert", "0", "<=", "self", ".", "_index", "<=", "len", "(", "self", ".", "_history", ")", "-", "1", "# There should always be the base item at least.", "assert", "len", "(", "self", ".", "_history", ")", ">=", ...
Check that the index is without the bounds of _history.
[ "Check", "that", "the", "index", "is", "without", "the", "bounds", "of", "_history", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L39-L43
7,779
kwikteam/phy
phy/cluster/_history.py
History.iter
def iter(self, start=0, end=None): """Iterate through successive history items. Parameters ---------- end : int Index of the last item to loop through + 1. start : int Initial index for the loop (0 by default). """ if end is None: end = self._index + 1 elif end == 0: raise StopIteration() if start >= end: raise StopIteration() # Check arguments. assert 0 <= end <= len(self._history) assert 0 <= start <= end - 1 for i in range(start, end): yield self._history[i]
python
def iter(self, start=0, end=None): """Iterate through successive history items. Parameters ---------- end : int Index of the last item to loop through + 1. start : int Initial index for the loop (0 by default). """ if end is None: end = self._index + 1 elif end == 0: raise StopIteration() if start >= end: raise StopIteration() # Check arguments. assert 0 <= end <= len(self._history) assert 0 <= start <= end - 1 for i in range(start, end): yield self._history[i]
[ "def", "iter", "(", "self", ",", "start", "=", "0", ",", "end", "=", "None", ")", ":", "if", "end", "is", "None", ":", "end", "=", "self", ".", "_index", "+", "1", "elif", "end", "==", "0", ":", "raise", "StopIteration", "(", ")", "if", "start"...
Iterate through successive history items. Parameters ---------- end : int Index of the last item to loop through + 1. start : int Initial index for the loop (0 by default).
[ "Iterate", "through", "successive", "history", "items", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L51-L73
7,780
kwikteam/phy
phy/cluster/_history.py
History.add
def add(self, item): """Add an item in the history.""" self._check_index() # Possibly truncate the history up to the current point. self._history = self._history[:self._index + 1] # Append the item self._history.append(item) # Increment the index. self._index += 1 self._check_index() # Check that the current element is what was provided to the function. assert id(self.current_item) == id(item)
python
def add(self, item): """Add an item in the history.""" self._check_index() # Possibly truncate the history up to the current point. self._history = self._history[:self._index + 1] # Append the item self._history.append(item) # Increment the index. self._index += 1 self._check_index() # Check that the current element is what was provided to the function. assert id(self.current_item) == id(item)
[ "def", "add", "(", "self", ",", "item", ")", ":", "self", ".", "_check_index", "(", ")", "# Possibly truncate the history up to the current point.", "self", ".", "_history", "=", "self", ".", "_history", "[", ":", "self", ".", "_index", "+", "1", "]", "# App...
Add an item in the history.
[ "Add", "an", "item", "in", "the", "history", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L81-L92
7,781
kwikteam/phy
phy/cluster/_history.py
History.back
def back(self): """Go back in history if possible. Return the undone item. """ if self._index <= 0: return None undone = self.current_item self._index -= 1 self._check_index() return undone
python
def back(self): """Go back in history if possible. Return the undone item. """ if self._index <= 0: return None undone = self.current_item self._index -= 1 self._check_index() return undone
[ "def", "back", "(", "self", ")", ":", "if", "self", ".", "_index", "<=", "0", ":", "return", "None", "undone", "=", "self", ".", "current_item", "self", ".", "_index", "-=", "1", "self", ".", "_check_index", "(", ")", "return", "undone" ]
Go back in history if possible. Return the undone item.
[ "Go", "back", "in", "history", "if", "possible", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L94-L105
7,782
kwikteam/phy
phy/cluster/_history.py
History.forward
def forward(self): """Go forward in history if possible. Return the current item after going forward. """ if self._index >= len(self._history) - 1: return None self._index += 1 self._check_index() return self.current_item
python
def forward(self): """Go forward in history if possible. Return the current item after going forward. """ if self._index >= len(self._history) - 1: return None self._index += 1 self._check_index() return self.current_item
[ "def", "forward", "(", "self", ")", ":", "if", "self", ".", "_index", ">=", "len", "(", "self", ".", "_history", ")", "-", "1", ":", "return", "None", "self", ".", "_index", "+=", "1", "self", ".", "_check_index", "(", ")", "return", "self", ".", ...
Go forward in history if possible. Return the current item after going forward.
[ "Go", "forward", "in", "history", "if", "possible", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L110-L120
7,783
kwikteam/phy
phy/cluster/_history.py
GlobalHistory.add_to_current_action
def add_to_current_action(self, controller): """Add a controller to the current action.""" item = self.current_item self._history[self._index] = item + (controller,)
python
def add_to_current_action(self, controller): """Add a controller to the current action.""" item = self.current_item self._history[self._index] = item + (controller,)
[ "def", "add_to_current_action", "(", "self", ",", "controller", ")", ":", "item", "=", "self", ".", "current_item", "self", ".", "_history", "[", "self", ".", "_index", "]", "=", "item", "+", "(", "controller", ",", ")" ]
Add a controller to the current action.
[ "Add", "a", "controller", "to", "the", "current", "action", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L137-L140
7,784
kwikteam/phy
phy/cluster/_history.py
GlobalHistory.redo
def redo(self): """Redo the last action. This will call `redo()` on all controllers involved in this action. """ controllers = self.forward() if controllers is None: ups = () else: ups = tuple([controller.redo() for controller in controllers]) if self.process_ups is not None: return self.process_ups(ups) else: return ups
python
def redo(self): """Redo the last action. This will call `redo()` on all controllers involved in this action. """ controllers = self.forward() if controllers is None: ups = () else: ups = tuple([controller.redo() for controller in controllers]) if self.process_ups is not None: return self.process_ups(ups) else: return ups
[ "def", "redo", "(", "self", ")", ":", "controllers", "=", "self", ".", "forward", "(", ")", "if", "controllers", "is", "None", ":", "ups", "=", "(", ")", "else", ":", "ups", "=", "tuple", "(", "[", "controller", ".", "redo", "(", ")", "for", "con...
Redo the last action. This will call `redo()` on all controllers involved in this action.
[ "Redo", "the", "last", "action", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/cluster/_history.py#L159-L174
7,785
kwikteam/phy
phy/plot/base.py
_insert_glsl
def _insert_glsl(vertex, fragment, to_insert): """Insert snippets in a shader. to_insert is a dict `{(shader_type, location): snippet}`. Snippets can contain `{{ var }}` placeholders for the transformed variable name. """ # Find the place where to insert the GLSL snippet. # This is "gl_Position = transform(data_var_name);" where # data_var_name is typically an attribute. vs_regex = re.compile(r'gl_Position = transform\(([\S]+)\);') r = vs_regex.search(vertex) if not r: logger.debug("The vertex shader doesn't contain the transform " "placeholder: skipping the transform chain " "GLSL insertion.") return vertex, fragment assert r logger.log(5, "Found transform placeholder in vertex code: `%s`", r.group(0)) # Find the GLSL variable with the data (should be a `vec2`). var = r.group(1) assert var and var in vertex # Headers. vertex = to_insert['vert', 'header'] + '\n\n' + vertex fragment = to_insert['frag', 'header'] + '\n\n' + fragment # Get the pre and post transforms. vs_insert = to_insert['vert', 'before_transforms'] vs_insert += to_insert['vert', 'transforms'] vs_insert += to_insert['vert', 'after_transforms'] # Insert the GLSL snippet in the vertex shader. vertex = vs_regex.sub(indent(vs_insert), vertex) # Now, we make the replacements in the fragment shader. fs_regex = re.compile(r'(void main\(\)\s*\{)') # NOTE: we add the `void main(){` that was removed by the regex. fs_insert = '\\1\n' + to_insert['frag', 'before_transforms'] fragment = fs_regex.sub(indent(fs_insert), fragment) # Replace the transformed variable placeholder by its name. vertex = vertex.replace('{{ var }}', var) return vertex, fragment
python
def _insert_glsl(vertex, fragment, to_insert): """Insert snippets in a shader. to_insert is a dict `{(shader_type, location): snippet}`. Snippets can contain `{{ var }}` placeholders for the transformed variable name. """ # Find the place where to insert the GLSL snippet. # This is "gl_Position = transform(data_var_name);" where # data_var_name is typically an attribute. vs_regex = re.compile(r'gl_Position = transform\(([\S]+)\);') r = vs_regex.search(vertex) if not r: logger.debug("The vertex shader doesn't contain the transform " "placeholder: skipping the transform chain " "GLSL insertion.") return vertex, fragment assert r logger.log(5, "Found transform placeholder in vertex code: `%s`", r.group(0)) # Find the GLSL variable with the data (should be a `vec2`). var = r.group(1) assert var and var in vertex # Headers. vertex = to_insert['vert', 'header'] + '\n\n' + vertex fragment = to_insert['frag', 'header'] + '\n\n' + fragment # Get the pre and post transforms. vs_insert = to_insert['vert', 'before_transforms'] vs_insert += to_insert['vert', 'transforms'] vs_insert += to_insert['vert', 'after_transforms'] # Insert the GLSL snippet in the vertex shader. vertex = vs_regex.sub(indent(vs_insert), vertex) # Now, we make the replacements in the fragment shader. fs_regex = re.compile(r'(void main\(\)\s*\{)') # NOTE: we add the `void main(){` that was removed by the regex. fs_insert = '\\1\n' + to_insert['frag', 'before_transforms'] fragment = fs_regex.sub(indent(fs_insert), fragment) # Replace the transformed variable placeholder by its name. vertex = vertex.replace('{{ var }}', var) return vertex, fragment
[ "def", "_insert_glsl", "(", "vertex", ",", "fragment", ",", "to_insert", ")", ":", "# Find the place where to insert the GLSL snippet.", "# This is \"gl_Position = transform(data_var_name);\" where", "# data_var_name is typically an attribute.", "vs_regex", "=", "re", ".", "compile...
Insert snippets in a shader. to_insert is a dict `{(shader_type, location): snippet}`. Snippets can contain `{{ var }}` placeholders for the transformed variable name.
[ "Insert", "snippets", "in", "a", "shader", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L117-L165
7,786
kwikteam/phy
phy/plot/base.py
BaseVisual.on_draw
def on_draw(self): """Draw the visual.""" # Skip the drawing if the program hasn't been built yet. # The program is built by the interact. if self.program: # Draw the program. self.program.draw(self.gl_primitive_type) else: # pragma: no cover logger.debug("Skipping drawing visual `%s` because the program " "has not been built yet.", self)
python
def on_draw(self): """Draw the visual.""" # Skip the drawing if the program hasn't been built yet. # The program is built by the interact. if self.program: # Draw the program. self.program.draw(self.gl_primitive_type) else: # pragma: no cover logger.debug("Skipping drawing visual `%s` because the program " "has not been built yet.", self)
[ "def", "on_draw", "(", "self", ")", ":", "# Skip the drawing if the program hasn't been built yet.", "# The program is built by the interact.", "if", "self", ".", "program", ":", "# Draw the program.", "self", ".", "program", ".", "draw", "(", "self", ".", "gl_primitive_t...
Draw the visual.
[ "Draw", "the", "visual", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L67-L76
7,787
kwikteam/phy
phy/plot/base.py
GLSLInserter.add_transform_chain
def add_transform_chain(self, tc): """Insert the GLSL snippets of a transform chain.""" # Generate the transforms snippet. for t in tc.gpu_transforms: if isinstance(t, Clip): # Set the varying value in the vertex shader. self.insert_vert('v_temp_pos_tr = temp_pos_tr;') continue self.insert_vert(t.glsl('temp_pos_tr')) # Clipping. clip = tc.get('Clip') if clip: self.insert_frag(clip.glsl('v_temp_pos_tr'), 'before_transforms')
python
def add_transform_chain(self, tc): """Insert the GLSL snippets of a transform chain.""" # Generate the transforms snippet. for t in tc.gpu_transforms: if isinstance(t, Clip): # Set the varying value in the vertex shader. self.insert_vert('v_temp_pos_tr = temp_pos_tr;') continue self.insert_vert(t.glsl('temp_pos_tr')) # Clipping. clip = tc.get('Clip') if clip: self.insert_frag(clip.glsl('v_temp_pos_tr'), 'before_transforms')
[ "def", "add_transform_chain", "(", "self", ",", "tc", ")", ":", "# Generate the transforms snippet.", "for", "t", "in", "tc", ".", "gpu_transforms", ":", "if", "isinstance", "(", "t", ",", "Clip", ")", ":", "# Set the varying value in the vertex shader.", "self", ...
Insert the GLSL snippets of a transform chain.
[ "Insert", "the", "GLSL", "snippets", "of", "a", "transform", "chain", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L207-L219
7,788
kwikteam/phy
phy/plot/base.py
GLSLInserter.insert_into_shaders
def insert_into_shaders(self, vertex, fragment): """Apply the insertions to shader code.""" to_insert = defaultdict(str) to_insert.update({key: '\n'.join(self._to_insert[key]) + '\n' for key in self._to_insert}) return _insert_glsl(vertex, fragment, to_insert)
python
def insert_into_shaders(self, vertex, fragment): """Apply the insertions to shader code.""" to_insert = defaultdict(str) to_insert.update({key: '\n'.join(self._to_insert[key]) + '\n' for key in self._to_insert}) return _insert_glsl(vertex, fragment, to_insert)
[ "def", "insert_into_shaders", "(", "self", ",", "vertex", ",", "fragment", ")", ":", "to_insert", "=", "defaultdict", "(", "str", ")", "to_insert", ".", "update", "(", "{", "key", ":", "'\\n'", ".", "join", "(", "self", ".", "_to_insert", "[", "key", "...
Apply the insertions to shader code.
[ "Apply", "the", "insertions", "to", "shader", "code", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L221-L226
7,789
kwikteam/phy
phy/plot/base.py
BaseCanvas.add_visual
def add_visual(self, visual): """Add a visual to the canvas, and build its program by the same occasion. We can't build the visual's program before, because we need the canvas' transforms first. """ # Retrieve the visual's GLSL inserter. inserter = visual.inserter # Add the visual's transforms. inserter.add_transform_chain(visual.transforms) # Then, add the canvas' transforms. canvas_transforms = visual.canvas_transforms_filter(self.transforms) inserter.add_transform_chain(canvas_transforms) # Also, add the canvas' inserter. inserter += self.inserter # Now, we insert the transforms GLSL into the shaders. vs, fs = visual.vertex_shader, visual.fragment_shader vs, fs = inserter.insert_into_shaders(vs, fs) # Finally, we create the visual's program. visual.program = gloo.Program(vs, fs) logger.log(5, "Vertex shader: %s", vs) logger.log(5, "Fragment shader: %s", fs) # Initialize the size. visual.on_resize(self.size) # Register the visual in the list of visuals in the canvas. self.visuals.append(visual) self.events.visual_added(visual=visual)
python
def add_visual(self, visual): """Add a visual to the canvas, and build its program by the same occasion. We can't build the visual's program before, because we need the canvas' transforms first. """ # Retrieve the visual's GLSL inserter. inserter = visual.inserter # Add the visual's transforms. inserter.add_transform_chain(visual.transforms) # Then, add the canvas' transforms. canvas_transforms = visual.canvas_transforms_filter(self.transforms) inserter.add_transform_chain(canvas_transforms) # Also, add the canvas' inserter. inserter += self.inserter # Now, we insert the transforms GLSL into the shaders. vs, fs = visual.vertex_shader, visual.fragment_shader vs, fs = inserter.insert_into_shaders(vs, fs) # Finally, we create the visual's program. visual.program = gloo.Program(vs, fs) logger.log(5, "Vertex shader: %s", vs) logger.log(5, "Fragment shader: %s", fs) # Initialize the size. visual.on_resize(self.size) # Register the visual in the list of visuals in the canvas. self.visuals.append(visual) self.events.visual_added(visual=visual)
[ "def", "add_visual", "(", "self", ",", "visual", ")", ":", "# Retrieve the visual's GLSL inserter.", "inserter", "=", "visual", ".", "inserter", "# Add the visual's transforms.", "inserter", ".", "add_transform_chain", "(", "visual", ".", "transforms", ")", "# Then, add...
Add a visual to the canvas, and build its program by the same occasion. We can't build the visual's program before, because we need the canvas' transforms first.
[ "Add", "a", "visual", "to", "the", "canvas", "and", "build", "its", "program", "by", "the", "same", "occasion", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L258-L286
7,790
kwikteam/phy
phy/plot/base.py
BaseCanvas.on_resize
def on_resize(self, event): """Resize the OpenGL context.""" self.context.set_viewport(0, 0, event.size[0], event.size[1]) for visual in self.visuals: visual.on_resize(event.size) self.update()
python
def on_resize(self, event): """Resize the OpenGL context.""" self.context.set_viewport(0, 0, event.size[0], event.size[1]) for visual in self.visuals: visual.on_resize(event.size) self.update()
[ "def", "on_resize", "(", "self", ",", "event", ")", ":", "self", ".", "context", ".", "set_viewport", "(", "0", ",", "0", ",", "event", ".", "size", "[", "0", "]", ",", "event", ".", "size", "[", "1", "]", ")", "for", "visual", "in", "self", "....
Resize the OpenGL context.
[ "Resize", "the", "OpenGL", "context", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L288-L293
7,791
kwikteam/phy
phy/plot/base.py
BaseCanvas.on_draw
def on_draw(self, e): """Draw all visuals.""" gloo.clear() for visual in self.visuals: logger.log(5, "Draw visual `%s`.", visual) visual.on_draw()
python
def on_draw(self, e): """Draw all visuals.""" gloo.clear() for visual in self.visuals: logger.log(5, "Draw visual `%s`.", visual) visual.on_draw()
[ "def", "on_draw", "(", "self", ",", "e", ")", ":", "gloo", ".", "clear", "(", ")", "for", "visual", "in", "self", ".", "visuals", ":", "logger", ".", "log", "(", "5", ",", "\"Draw visual `%s`.\"", ",", "visual", ")", "visual", ".", "on_draw", "(", ...
Draw all visuals.
[ "Draw", "all", "visuals", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L295-L300
7,792
kwikteam/phy
phy/plot/base.py
BaseInteract.update
def update(self): """Update all visuals in the attached canvas.""" if not self.canvas: return for visual in self.canvas.visuals: self.update_program(visual.program) self.canvas.update()
python
def update(self): """Update all visuals in the attached canvas.""" if not self.canvas: return for visual in self.canvas.visuals: self.update_program(visual.program) self.canvas.update()
[ "def", "update", "(", "self", ")", ":", "if", "not", "self", ".", "canvas", ":", "return", "for", "visual", "in", "self", ".", "canvas", ".", "visuals", ":", "self", ".", "update_program", "(", "visual", ".", "program", ")", "self", ".", "canvas", "....
Update all visuals in the attached canvas.
[ "Update", "all", "visuals", "in", "the", "attached", "canvas", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/base.py#L324-L330
7,793
kwikteam/phy
phy/utils/cli.py
_add_log_file
def _add_log_file(filename): """Create a `phy.log` log file with DEBUG level in the current directory.""" handler = logging.FileHandler(filename) handler.setLevel(logging.DEBUG) formatter = _Formatter(fmt=_logger_fmt, datefmt='%Y-%m-%d %H:%M:%S') handler.setFormatter(formatter) logging.getLogger().addHandler(handler)
python
def _add_log_file(filename): """Create a `phy.log` log file with DEBUG level in the current directory.""" handler = logging.FileHandler(filename) handler.setLevel(logging.DEBUG) formatter = _Formatter(fmt=_logger_fmt, datefmt='%Y-%m-%d %H:%M:%S') handler.setFormatter(formatter) logging.getLogger().addHandler(handler)
[ "def", "_add_log_file", "(", "filename", ")", ":", "handler", "=", "logging", ".", "FileHandler", "(", "filename", ")", "handler", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "formatter", "=", "_Formatter", "(", "fmt", "=", "_logger_fmt", ",", "dat...
Create a `phy.log` log file with DEBUG level in the current directory.
[ "Create", "a", "phy", ".", "log", "log", "file", "with", "DEBUG", "level", "in", "the", "current", "directory", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/utils/cli.py#L46-L55
7,794
kwikteam/phy
phy/utils/cli.py
_run_cmd
def _run_cmd(cmd, ctx, glob, loc): # pragma: no cover """Run a command with optionally a debugger, IPython, or profiling.""" if PDB: _enable_pdb() if IPYTHON: from IPython import start_ipython args_ipy = ['-i', '--gui=qt'] ns = glob.copy() ns.update(loc) return start_ipython(args_ipy, user_ns=ns) # Profiling. The builtin `profile` is added in __init__. prof = __builtins__.get('profile', None) if prof: prof = __builtins__['profile'] return _profile(prof, cmd, glob, loc) return exec_(cmd, glob, loc)
python
def _run_cmd(cmd, ctx, glob, loc): # pragma: no cover """Run a command with optionally a debugger, IPython, or profiling.""" if PDB: _enable_pdb() if IPYTHON: from IPython import start_ipython args_ipy = ['-i', '--gui=qt'] ns = glob.copy() ns.update(loc) return start_ipython(args_ipy, user_ns=ns) # Profiling. The builtin `profile` is added in __init__. prof = __builtins__.get('profile', None) if prof: prof = __builtins__['profile'] return _profile(prof, cmd, glob, loc) return exec_(cmd, glob, loc)
[ "def", "_run_cmd", "(", "cmd", ",", "ctx", ",", "glob", ",", "loc", ")", ":", "# pragma: no cover", "if", "PDB", ":", "_enable_pdb", "(", ")", "if", "IPYTHON", ":", "from", "IPython", "import", "start_ipython", "args_ipy", "=", "[", "'-i'", ",", "'--gui=...
Run a command with optionally a debugger, IPython, or profiling.
[ "Run", "a", "command", "with", "optionally", "a", "debugger", "IPython", "or", "profiling", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/utils/cli.py#L58-L73
7,795
kwikteam/phy
phy/utils/cli.py
load_cli_plugins
def load_cli_plugins(cli, config_dir=None): """Load all plugins and attach them to a CLI object.""" from .config import load_master_config config = load_master_config(config_dir=config_dir) plugins = discover_plugins(config.Plugins.dirs) for plugin in plugins: if not hasattr(plugin, 'attach_to_cli'): # pragma: no cover continue logger.debug("Attach plugin `%s` to CLI.", _fullname(plugin)) # NOTE: plugin is a class, so we need to instantiate it. try: plugin().attach_to_cli(cli) except Exception as e: # pragma: no cover logger.error("Error when loading plugin `%s`: %s", plugin, e)
python
def load_cli_plugins(cli, config_dir=None): """Load all plugins and attach them to a CLI object.""" from .config import load_master_config config = load_master_config(config_dir=config_dir) plugins = discover_plugins(config.Plugins.dirs) for plugin in plugins: if not hasattr(plugin, 'attach_to_cli'): # pragma: no cover continue logger.debug("Attach plugin `%s` to CLI.", _fullname(plugin)) # NOTE: plugin is a class, so we need to instantiate it. try: plugin().attach_to_cli(cli) except Exception as e: # pragma: no cover logger.error("Error when loading plugin `%s`: %s", plugin, e)
[ "def", "load_cli_plugins", "(", "cli", ",", "config_dir", "=", "None", ")", ":", "from", ".", "config", "import", "load_master_config", "config", "=", "load_master_config", "(", "config_dir", "=", "config_dir", ")", "plugins", "=", "discover_plugins", "(", "conf...
Load all plugins and attach them to a CLI object.
[ "Load", "all", "plugins", "and", "attach", "them", "to", "a", "CLI", "object", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/utils/cli.py#L94-L109
7,796
kwikteam/phy
phy/plot/panzoom.py
PanZoom.get_mouse_pos
def get_mouse_pos(self, pos): """Return the mouse coordinates in NDC, taking panzoom into account.""" position = np.asarray(self._normalize(pos)) zoom = np.asarray(self._zoom_aspect()) pan = np.asarray(self.pan) mouse_pos = ((position / zoom) - pan) return mouse_pos
python
def get_mouse_pos(self, pos): """Return the mouse coordinates in NDC, taking panzoom into account.""" position = np.asarray(self._normalize(pos)) zoom = np.asarray(self._zoom_aspect()) pan = np.asarray(self.pan) mouse_pos = ((position / zoom) - pan) return mouse_pos
[ "def", "get_mouse_pos", "(", "self", ",", "pos", ")", ":", "position", "=", "np", ".", "asarray", "(", "self", ".", "_normalize", "(", "pos", ")", ")", "zoom", "=", "np", ".", "asarray", "(", "self", ".", "_zoom_aspect", "(", ")", ")", "pan", "=", ...
Return the mouse coordinates in NDC, taking panzoom into account.
[ "Return", "the", "mouse", "coordinates", "in", "NDC", "taking", "panzoom", "into", "account", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/panzoom.py#L228-L234
7,797
kwikteam/phy
phy/plot/panzoom.py
PanZoom.pan
def pan(self, value): """Pan translation.""" assert len(value) == 2 self._pan[:] = value self._constrain_pan() self.update()
python
def pan(self, value): """Pan translation.""" assert len(value) == 2 self._pan[:] = value self._constrain_pan() self.update()
[ "def", "pan", "(", "self", ",", "value", ")", ":", "assert", "len", "(", "value", ")", "==", "2", "self", ".", "_pan", "[", ":", "]", "=", "value", "self", ".", "_constrain_pan", "(", ")", "self", ".", "update", "(", ")" ]
Pan translation.
[ "Pan", "translation", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/panzoom.py#L245-L250
7,798
kwikteam/phy
phy/plot/panzoom.py
PanZoom.zoom
def zoom(self, value): """Zoom level.""" if isinstance(value, (int, float)): value = (value, value) assert len(value) == 2 self._zoom = np.clip(value, self._zmin, self._zmax) # Constrain bounding box. self._constrain_pan() self._constrain_zoom() self.update()
python
def zoom(self, value): """Zoom level.""" if isinstance(value, (int, float)): value = (value, value) assert len(value) == 2 self._zoom = np.clip(value, self._zmin, self._zmax) # Constrain bounding box. self._constrain_pan() self._constrain_zoom() self.update()
[ "def", "zoom", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "int", ",", "float", ")", ")", ":", "value", "=", "(", "value", ",", "value", ")", "assert", "len", "(", "value", ")", "==", "2", "self", ".", "_zoo...
Zoom level.
[ "Zoom", "level", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/panzoom.py#L258-L269
7,799
kwikteam/phy
phy/plot/panzoom.py
PanZoom.pan_delta
def pan_delta(self, d): """Pan the view by a given amount.""" dx, dy = d pan_x, pan_y = self.pan zoom_x, zoom_y = self._zoom_aspect(self._zoom) self.pan = (pan_x + dx / zoom_x, pan_y + dy / zoom_y) self.update()
python
def pan_delta(self, d): """Pan the view by a given amount.""" dx, dy = d pan_x, pan_y = self.pan zoom_x, zoom_y = self._zoom_aspect(self._zoom) self.pan = (pan_x + dx / zoom_x, pan_y + dy / zoom_y) self.update()
[ "def", "pan_delta", "(", "self", ",", "d", ")", ":", "dx", ",", "dy", "=", "d", "pan_x", ",", "pan_y", "=", "self", ".", "pan", "zoom_x", ",", "zoom_y", "=", "self", ".", "_zoom_aspect", "(", "self", ".", "_zoom", ")", "self", ".", "pan", "=", ...
Pan the view by a given amount.
[ "Pan", "the", "view", "by", "a", "given", "amount", "." ]
7e9313dc364304b7d2bd03b92938347343703003
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/panzoom.py#L271-L279