code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def autocorrelation(x, lag):
"""
Calculates the autocorrelation of the specified lag, according to the formula [1]
.. math::
\\frac{1}{(n-l)\sigma^{2}} \\sum_{t=1}^{n-l}(X_{t}-\\mu )(X_{t+l}-\\mu)
where :math:`n` is the length of the time series :math:`X_i`, :math:`\sigma^2` its variance and ... | Calculates the autocorrelation of the specified lag, according to the formula [1]
.. math::
\\frac{1}{(n-l)\sigma^{2}} \\sum_{t=1}^{n-l}(X_{t}-\\mu )(X_{t+l}-\\mu)
where :math:`n` is the length of the time series :math:`X_i`, :math:`\sigma^2` its variance and :math:`\mu` its
mean. `l` denotes the... |
def paste_clipboard(self, event):
"""
Send the clipboard content as user input to the CPU.
"""
log.critical("paste clipboard")
clipboard = self.root.clipboard_get()
for line in clipboard.splitlines():
log.critical("paste line: %s", repr(line))
self... | Send the clipboard content as user input to the CPU. |
def _method_error_handler(self, response: Dict[str, Any]):
"""处理400~499段状态码,为对应的任务设置异常.
Parameters:
(response): - 响应的python字典形式数据
Return:
(bool): - 准确地说没有错误就会返回True
"""
exp = response.get('MESSAGE')
code = response.get("CODE")
ID = exp.g... | 处理400~499段状态码,为对应的任务设置异常.
Parameters:
(response): - 响应的python字典形式数据
Return:
(bool): - 准确地说没有错误就会返回True |
def _must_be_deleted(local_path, r_st):
"""Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one."""
# if the file doesn't exists
if not os.path.lexists(local_path):
return... | Return True if the remote correspondent of local_path has to be deleted.
i.e. if it doesn't exists locally or if it has a different type from the remote one. |
def receive(self):
"""Receive TCP response, looping to get whole thing or timeout."""
try:
buffer = self._socket.recv(BUFFER_SIZE)
except socket.timeout as error:
# Something is wrong, assume it's offline temporarily
_LOGGER.error("Error receiving: %s", error)... | Receive TCP response, looping to get whole thing or timeout. |
def binarize_signal(signal, treshold="auto", cut="higher"):
"""
Binarize a channel based on a continuous channel.
Parameters
----------
signal = array or list
The signal channel.
treshold = float
The treshold value by which to select the events. If "auto", takes the value betwee... | Binarize a channel based on a continuous channel.
Parameters
----------
signal = array or list
The signal channel.
treshold = float
The treshold value by which to select the events. If "auto", takes the value between the max and the min.
cut = str
"higher" or "lower", define... |
def _tracked_model_diff(self):
"""Returns changes made to model instance.
Returns None if no changes were made.
"""
initial_state = self._tracked_model_initial_state
current_state = serializer.dump_model(self)
if current_state == initial_state:
return None
... | Returns changes made to model instance.
Returns None if no changes were made. |
def wp_status(self):
'''show status of wp download'''
try:
print("Have %u of %u waypoints" % (self.wploader.count()+len(self.wp_received), self.wploader.expected_count))
except Exception:
print("Have %u waypoints" % (self.wploader.count()+len(self.wp_received))) | show status of wp download |
def is_rfc2822(instance: str):
"""Validates RFC2822 format"""
if not isinstance(instance, str):
return True
return email.utils.parsedate(instance) is not None | Validates RFC2822 format |
def stream(self):
"""
:class:`Stream` object for playing
"""
# Add song to queue
self._connection.request(
'addSongsToQueue',
{'songIDsArtistIDs': [{'artistID': self.artist.id,
'source': 'user',
... | :class:`Stream` object for playing |
def format(self, tokensource, outfile):
"""
Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
tuples and write it into ``outfile``.
This implementation calculates where it should draw each token on the
pixmap, then calculates the required pixmap size and draws ... | Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
tuples and write it into ``outfile``.
This implementation calculates where it should draw each token on the
pixmap, then calculates the required pixmap size and draws the items. |
def has_frames(self, destination):
"""
Whether specified queue has any frames.
@param destination: The queue name (destinationination).
@type destination: C{str}
@return: Whether there are any frames in the specified queue.
@rtype: C{bool}
"""
session = ... | Whether specified queue has any frames.
@param destination: The queue name (destinationination).
@type destination: C{str}
@return: Whether there are any frames in the specified queue.
@rtype: C{bool} |
def enableGroup(self):
"""Enables all radio buttons in the group."""
radioButtonListInGroup = PygWidgetsRadioButton.__PygWidgets__Radio__Buttons__Groups__Dicts__[self.group]
for radioButton in radioButtonListInGroup:
radioButton.enable() | Enables all radio buttons in the group. |
def fuzzy_index_match(possiblities, label, **kwargs):
"""Find the closest matching column label, key, or integer indexed value
Returns:
type(label): sequence of immutable objects corresponding to best matches to each object in label
if label is an int returns the object (value) in the list ... | Find the closest matching column label, key, or integer indexed value
Returns:
type(label): sequence of immutable objects corresponding to best matches to each object in label
if label is an int returns the object (value) in the list of possibilities at that index
if label is a st... |
def delete_group_policy(self, group_name, policy_name):
"""
Deletes the specified policy document for the specified group.
:type group_name: string
:param group_name: The name of the group the policy is associated with.
:type policy_name: string
:param policy_name: The ... | Deletes the specified policy document for the specified group.
:type group_name: string
:param group_name: The name of the group the policy is associated with.
:type policy_name: string
:param policy_name: The policy document to delete. |
def load_copy_of_template(self, name, *parameters):
"""Load a copy of message template saved with `Save template` when originally saved values need to be preserved
from test to test.
Optional parameters are default values for message header separated with
colon.
Examples:
... | Load a copy of message template saved with `Save template` when originally saved values need to be preserved
from test to test.
Optional parameters are default values for message header separated with
colon.
Examples:
| Load Copy Of Template | MyMessage | header_field:value | |
def expand(string, vars, local_vars={}):
"""Expand a string containing $vars as Ninja would.
Note: doesn't handle the full Ninja variable syntax, but it's enough
to make configure.py's use of it work.
"""
def exp(m):
var = m.group(1)
if var == '$':
return '$'
ret... | Expand a string containing $vars as Ninja would.
Note: doesn't handle the full Ninja variable syntax, but it's enough
to make configure.py's use of it work. |
def __init(self):
"""loads the property data into the class"""
if self._portalId is None:
from .administration import Administration
portalSelf = Administration(url=self._securityHandler.org_url,
securityHandler=self._securityHandler,
... | loads the property data into the class |
def on_conflict(self, fields: List[Union[str, Tuple[str]]], action, index_predicate: str=None):
"""Sets the action to take when conflicts arise when attempting
to insert/create a new row.
Arguments:
fields:
The fields the conflicts can occur in.
action:
... | Sets the action to take when conflicts arise when attempting
to insert/create a new row.
Arguments:
fields:
The fields the conflicts can occur in.
action:
The action to take when the conflict occurs.
index_predicate:
... |
def _get_values(values, skipna, fill_value=None, fill_value_typ=None,
isfinite=False, copy=True, mask=None):
""" utility to get the values view, mask, dtype
if necessary copy and mask using the specified fill_value
copy = True will force the copy
"""
if is_datetime64tz_dtype(values)... | utility to get the values view, mask, dtype
if necessary copy and mask using the specified fill_value
copy = True will force the copy |
def distribute_equally(daily_data, divide=False):
"""Obtains hourly values by equally distributing the daily values.
Args:
daily_data: daily values
divide: if True, divide resulting values by the number of hours in
order to preserve the daily sum (required e.g. for precipitation).
... | Obtains hourly values by equally distributing the daily values.
Args:
daily_data: daily values
divide: if True, divide resulting values by the number of hours in
order to preserve the daily sum (required e.g. for precipitation).
Returns:
Equally distributed hourly values. |
def is_enhanced_rr_cap_valid(self):
"""Checks is enhanced route refresh capability is enabled/valid.
Checks sent and received `Open` messages to see if this session with
peer is capable of enhanced route refresh capability.
"""
if not self.recv_open_msg:
raise ValueE... | Checks is enhanced route refresh capability is enabled/valid.
Checks sent and received `Open` messages to see if this session with
peer is capable of enhanced route refresh capability. |
def tachogram(data, sample_rate, signal=False, in_seconds=False, out_seconds=False):
"""
Function for generation of ECG Tachogram.
----------
Parameters
----------
data : list
ECG signal or R peak list. When the input is a raw signal the input flag signal should be
True.
sa... | Function for generation of ECG Tachogram.
----------
Parameters
----------
data : list
ECG signal or R peak list. When the input is a raw signal the input flag signal should be
True.
sample_rate : int
Sampling frequency.
signal : boolean
If True, then the data ... |
def range(start, finish, step):
"""Like built-in :func:`~builtins.range`, but with float support"""
value = start
while value <= finish:
yield value
value += step | Like built-in :func:`~builtins.range`, but with float support |
def find(collection, query=None, user=None, password=None,
host=None, port=None, database='admin', authdb=None):
'''
Find an object or list of objects in a collection
CLI Example:
.. code-block:: bash
salt '*' mongodb.find mycollection '[{"foo": "FOO", "bar": "BAR"}]' <user> <passwor... | Find an object or list of objects in a collection
CLI Example:
.. code-block:: bash
salt '*' mongodb.find mycollection '[{"foo": "FOO", "bar": "BAR"}]' <user> <password> <host> <port> <database> |
def protorpc_to_endpoints_error(self, status, body):
"""Convert a ProtoRPC error to the format expected by Google Endpoints.
If the body does not contain an ProtoRPC message in state APPLICATION_ERROR
the status and body will be returned unchanged.
Args:
status: HTTP status of the response from ... | Convert a ProtoRPC error to the format expected by Google Endpoints.
If the body does not contain an ProtoRPC message in state APPLICATION_ERROR
the status and body will be returned unchanged.
Args:
status: HTTP status of the response from the backend
body: JSON-encoded error in format expecte... |
def _append_unknown_char(self):
'''
Appends the unknown character, in case one was encountered.
'''
if self.unknown_strategy == UNKNOWN_INCLUDE and \
self.unknown_char is not None:
self._append_to_stack(self.unknown_char)
self.unknown_char = None | Appends the unknown character, in case one was encountered. |
def create_enterprise_session(url, token=None):
"""
Create a github3.py session for a GitHub Enterprise instance
If token is not provided, will attempt to use the GITHUB_API_TOKEN
environment variable if present.
"""
gh_session = github3.enterprise_login(url=url, token=token)
if gh_sessio... | Create a github3.py session for a GitHub Enterprise instance
If token is not provided, will attempt to use the GITHUB_API_TOKEN
environment variable if present. |
def set_result(self, result, from_tree=False):
"""Set the addresses's value unless the future has been declared
read only.
Args:
result (bytes): The value at an address.
from_tree (bool): Whether the value is being set by a read from
the merkle tree.
... | Set the addresses's value unless the future has been declared
read only.
Args:
result (bytes): The value at an address.
from_tree (bool): Whether the value is being set by a read from
the merkle tree.
Returns:
None |
def confine(x,low,high):
'''Confine x to [low,high]. Values outside are set to low/high.
See also restrict.'''
y=x.copy()
y[y < low] = low
y[y > high] = high
return y | Confine x to [low,high]. Values outside are set to low/high.
See also restrict. |
def _autocomplete(client, url_part, input_text, session_token=None,
offset=None, location=None, radius=None, language=None,
types=None, components=None, strict_bounds=False):
"""
Internal handler for ``autocomplete`` and ``autocomplete_query``.
See each method's docs for ... | Internal handler for ``autocomplete`` and ``autocomplete_query``.
See each method's docs for arg details. |
def _append_integer(self, value, _file):
"""Call this function to write integer contents.
Keyword arguments:
* value - dict, content to be dumped
* _file - FileIO, output file
"""
_tabs = '\t' * self._tctr
_text = value
_labs = '{tabs}<integer>{t... | Call this function to write integer contents.
Keyword arguments:
* value - dict, content to be dumped
* _file - FileIO, output file |
def allow_rwe(self, name):
"""Allow all privileges for a particular name group (user, group, other)."""
assert name in PERMISSIONS.keys()
os.chmod(self.file_path, PERMISSIONS[name]['all']) | Allow all privileges for a particular name group (user, group, other). |
def check_result(data, key=''):
"""Check the result of an API response.
Ideally, this should be done by checking that the value of the ``resultCode``
attribute is 0, but there are endpoints that simply do not follow this rule.
Args:
data (dict): Response obtained from the API endpoint.
... | Check the result of an API response.
Ideally, this should be done by checking that the value of the ``resultCode``
attribute is 0, but there are endpoints that simply do not follow this rule.
Args:
data (dict): Response obtained from the API endpoint.
key (string): Key to check for existen... |
def prepare(self):
'''
Run before get/posts etc. Pre-flight checks:
- verify that we can speak back to them (compatible accept header)
'''
# Find an acceptable content-type
accept_header = self.request.headers.get('Accept', '*/*')
# Ignore any parameter, inclu... | Run before get/posts etc. Pre-flight checks:
- verify that we can speak back to them (compatible accept header) |
def findViewType(self, viewTypeName):
"""
Looks up the view type based on the inputed view type name.
:param viewTypeName | <str>
"""
for viewType in self._viewTypes:
if ( viewType.viewTypeName() == viewTypeName ):
return viewType
... | Looks up the view type based on the inputed view type name.
:param viewTypeName | <str> |
def repr_as_line(self, additional_columns=None, only_show=None, sep=','):
"""
Returns a representation of the host as a single line, with columns
joined by ``sep``.
:param additional_columns: Columns to show in addition to defaults.
:type additional_columns: ``list`` of ``str``
... | Returns a representation of the host as a single line, with columns
joined by ``sep``.
:param additional_columns: Columns to show in addition to defaults.
:type additional_columns: ``list`` of ``str``
:param only_show: A specific list of columns to show.
:type only_show: ``NoneT... |
def delete_data_source(self, data_source):
"""
Delete data source with it's name or ID.
data_source = { 'imap': {'name': 'data-source-name'}}
or
data_source = { 'pop3': {'id': 'data-source-id'}}
"""
source_type = [k for k in data_source.keys()][0]
complete... | Delete data source with it's name or ID.
data_source = { 'imap': {'name': 'data-source-name'}}
or
data_source = { 'pop3': {'id': 'data-source-id'}} |
def _enqueueIntoAllRemotes(self, msg: Any, signer: Signer) -> None:
"""
Enqueue the specified message into all the remotes in the nodestack.
:param msg: the message to enqueue
"""
for rid in self.remotes.keys():
self._enqueue(msg, rid, signer) | Enqueue the specified message into all the remotes in the nodestack.
:param msg: the message to enqueue |
def send_command(self, command, arg=None):
"""Sends a command to the device.
Args:
command: The command to send.
arg: Optional argument to the command.
"""
if arg is not None:
command = '%s:%s' % (command, arg)
self._write(six.StringIO(command), len(command)) | Sends a command to the device.
Args:
command: The command to send.
arg: Optional argument to the command. |
def do_api_calls_update_cache(self):
''' Do API calls and save data in cache. '''
zones = self.parse_env_zones()
data = self.group_instances(zones)
self.cache.write_to_cache(data)
self.inventory = data | Do API calls and save data in cache. |
def create_table(
self,
table_name,
obj=None,
schema=None,
database=None,
external=False,
force=False,
# HDFS options
format='parquet',
location=None,
partition=None,
like_parquet=None,
):
"""
Create a ne... | Create a new table in Impala using an Ibis table expression. This is
currently designed for tables whose data is stored in HDFS (or
eventually other filesystems).
Parameters
----------
table_name : string
obj : TableExpr or pandas.DataFrame, optional
If passed,... |
def get_filter(self, header=None, origin=1):
"""Get filter.
Often, the regions files implicitly assume the lower-left
corner of the image as a coordinate (1,1). However, the python
convetion is that the array index starts from 0. By default
(``origin=1``), coordinates of the retu... | Get filter.
Often, the regions files implicitly assume the lower-left
corner of the image as a coordinate (1,1). However, the python
convetion is that the array index starts from 0. By default
(``origin=1``), coordinates of the returned mpl artists have
coordinate shifted by (1, ... |
def calc_list(request, id=None):
# view associated to the endpoints /v1/calc/list and /v1/calc/:id/status
"""
Get a list of calculations and report their id, status, calculation_mode,
is_running, description, and a url where more detailed information
can be accessed. This is called several times by ... | Get a list of calculations and report their id, status, calculation_mode,
is_running, description, and a url where more detailed information
can be accessed. This is called several times by the Javascript.
Responses are in JSON. |
def set_content_disposition(self,
disptype: str,
quote_fields: bool=True,
**params: Any) -> None:
"""Sets ``Content-Disposition`` header."""
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_heade... | Sets ``Content-Disposition`` header. |
def nice_pkg_name(name):
"""todo: Docstring for nice_pkg_name
:param name: arg description
:type name: type description
:return:
:rtype:
"""
logger.debug("%s", name)
root, ext = os.path.splitext(name)
logger.debug("root :'%s', ext: '%s'", root, ext)
if ext in ugly_ext:
... | todo: Docstring for nice_pkg_name
:param name: arg description
:type name: type description
:return:
:rtype: |
def render_with(template=None, json=False, jsonp=False):
"""
Decorator to render the wrapped function with the given template (or dictionary
of mimetype keys to templates, where the template is a string name of a template
file or a callable that returns a Response). The function's return value must be
... | Decorator to render the wrapped function with the given template (or dictionary
of mimetype keys to templates, where the template is a string name of a template
file or a callable that returns a Response). The function's return value must be
a dictionary and is passed to the template as parameters. Callable... |
def getStartNodes(fdefs,calls):
'''Return a list of nodes in fdefs that have no inbound edges'''
s=[]
for source in fdefs:
for fn in fdefs[source]:
inboundEdges=False
for call in calls:
if call.target==fn:
inboundEdges=True
if n... | Return a list of nodes in fdefs that have no inbound edges |
def _queue_into_buffer(transfersession):
"""
Takes a chunk of data from the store to be put into the buffer to be sent to another morango instance.
"""
last_saved_by_conditions = []
filter_prefixes = Filter(transfersession.filter)
server_fsic = json.loads(transfersession.server_fsic)
client_... | Takes a chunk of data from the store to be put into the buffer to be sent to another morango instance. |
def from_zeros(self, lmax, gm, r0, omega=None, errors=False,
normalization='4pi', csphase=1):
"""
Initialize the class with spherical harmonic coefficients set to zero
from degree 1 to lmax, and set the degree 0 term to 1.
Usage
-----
x = SHGravCoeffs.... | Initialize the class with spherical harmonic coefficients set to zero
from degree 1 to lmax, and set the degree 0 term to 1.
Usage
-----
x = SHGravCoeffs.from_zeros(lmax, gm, r0, [omega, errors,
normalization, csphase])
Returns... |
def _ask_for_ledger_status(self, node_name: str, ledger_id):
"""
Ask other node for LedgerStatus
"""
self.request_msg(LEDGER_STATUS, {f.LEDGER_ID.nm: ledger_id},
[node_name, ])
logger.info("{} asking {} for ledger status of ledger {}".format(self, node_na... | Ask other node for LedgerStatus |
def reissue(self, order_id, csr, software_id, organization_handle, approver_email=None,
signature_hash_algorithm=None, domain_validation_methods=None, hostnames=None,
technical_handle=None):
"""Reissue an SSL certificate order"""
response = self.request(E.reissueSslCertR... | Reissue an SSL certificate order |
def order_by(self, **kwargs):
"""
Analog to SQL "ORDER BY". +kwargs+ should only contain one item.
examples)
NO: repo.order_by()
NO: repo.order_by(id="desc", name="asc")
YES: repo.order_by(id="asc)
"""
if kwargs:
col, order = kwargs.popite... | Analog to SQL "ORDER BY". +kwargs+ should only contain one item.
examples)
NO: repo.order_by()
NO: repo.order_by(id="desc", name="asc")
YES: repo.order_by(id="asc) |
def move_file_to_file(old_path, new_path):
"""Moves file from old location to new one
:param old_path: path of file to move
:param new_path: new path
"""
try:
os.rename(old_path, new_path)
except:
old_file = os.path.basename(old_path)
... | Moves file from old location to new one
:param old_path: path of file to move
:param new_path: new path |
def parse_args(arguments=None, root=None, apply_config=False):
"""Parse the arguments from the CLI.
If apply_config then we first look up and apply configs using
apply_config_defaults.
"""
if arguments is None:
arguments = []
parser = create_parser()
args = parser.parse_args(argum... | Parse the arguments from the CLI.
If apply_config then we first look up and apply configs using
apply_config_defaults. |
def filter_db_names(paths: List[str]) -> List[str]:
"""Returns a filtered list of `paths`, where every name matches our format.
Args:
paths: A list of file names.
"""
return [
db_path
for db_path in paths
if VERSION_RE.match(os.path.basename(db_path))
] | Returns a filtered list of `paths`, where every name matches our format.
Args:
paths: A list of file names. |
def to_op(self):
"""
Extracts the modification operation from the set.
:rtype: dict, None
"""
if not self._adds and not self._removes:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
if self._removes:
... | Extracts the modification operation from the set.
:rtype: dict, None |
def send_message(self, message, room_id, **kwargs):
"""
Send a message to a given room
"""
return SendMessage(settings=self.settings, **kwargs).call(
message=message,
room_id=room_id,
**kwargs
) | Send a message to a given room |
def index_document(self, text, url):
"Index the text of a document."
## For now, use first line for title
title = text[:text.index('\n')].strip()
docwords = words(text)
docid = len(self.documents)
self.documents.append(Document(title, url, len(docwords)))
for word... | Index the text of a document. |
def get_file(self, fax_id, **kwargs): # noqa: E501
"""get a file # noqa: E501
Get your fax archive file using it's id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_file(... | get a file # noqa: E501
Get your fax archive file using it's id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_file(fax_id, async=True)
>>> result = thread.get()
... |
def to_singular(word):
"""Attempts to singularize a word."""
if word[-1] != "s":
return word
elif word.endswith("ies"):
return word[:-3] + "y"
elif word.endswith("ses"):
return word[:-2]
else:
return word[:-1] | Attempts to singularize a word. |
def multihead_attention_2d(query_antecedent,
memory_antecedent,
total_key_depth,
total_value_depth,
output_depth,
num_heads,
attention_type="local_attention_2... | 2d Multihead scaled-dot-product attention with inp/output transformations.
Args:
query_antecedent: a Tensor with shape [batch, h, w, depth_k]
memory_antecedent: a Tensor with shape [batch, h, w, depth_k]
total_key_depth: an integer
total_value_depth: an integer
output_depth: an integer
num_he... |
def backward(self, speed=1):
"""
Drive the robot backward by running both motors backward.
:param float speed:
Speed at which to drive the motors, as a value between 0 (stopped)
and 1 (full speed). The default is 1.
"""
self.left_motor.backward(speed)
... | Drive the robot backward by running both motors backward.
:param float speed:
Speed at which to drive the motors, as a value between 0 (stopped)
and 1 (full speed). The default is 1. |
def extract(self, variable_idx):
"""
Extract a specific varaible
"""
branch = self._define_branch(variable_idx)
label = self.profiles[variable_idx].replace("\n", "")
self.label[variable_idx] = label
self.data[variable_idx] = [[], []]
with open(self... | Extract a specific varaible |
def loadTextureD3D11_Async(self, textureId, pD3D11Device):
"""Creates a D3D11 texture and loads data into it."""
fn = self.function_table.loadTextureD3D11_Async
ppD3D11Texture2D = c_void_p()
result = fn(textureId, pD3D11Device, byref(ppD3D11Texture2D))
return result, ppD3D11Text... | Creates a D3D11 texture and loads data into it. |
def send_loop():
"""
Loop indefinitely, checking queue at intervals of EMPTY_QUEUE_SLEEP and
sending messages if any are on queue.
"""
while True:
while not Message.objects.all():
logging.debug("sleeping for %s seconds before checking queue again" % EMPTY_QUEUE_SLEEP)
... | Loop indefinitely, checking queue at intervals of EMPTY_QUEUE_SLEEP and
sending messages if any are on queue. |
def _get_I(self, a, b, size, plus_transpose=True):
"""Return I matrix in Chaput's PRL paper.
None is returned if I is zero matrix.
"""
r_sum = np.zeros((3, 3), dtype='double', order='C')
for r in self._rotations_cartesian:
for i in range(3):
for j in... | Return I matrix in Chaput's PRL paper.
None is returned if I is zero matrix. |
def byte_size(self, selection=False, virtual=False):
"""Return the size in bytes the whole DataFrame requires (or the selection), respecting the active_fraction."""
bytes_per_row = 0
N = self.count(selection=selection)
extra = 0
for column in list(self.get_column_names(virtual=vi... | Return the size in bytes the whole DataFrame requires (or the selection), respecting the active_fraction. |
def CreateBitmap(self, artid, client, size):
"""Adds custom images to Artprovider"""
if artid in self.extra_icons:
return wx.Bitmap(self.extra_icons[artid], wx.BITMAP_TYPE_ANY)
else:
return wx.ArtProvider.GetBitmap(artid, client, size) | Adds custom images to Artprovider |
def read_version():
"""Read version from the first line starting with digit
"""
regex = re.compile('^(?P<number>\d.*?) .*$')
with open('../CHANGELOG.rst') as f:
for line in f:
match = regex.match(line)
if match:
return match.group('number') | Read version from the first line starting with digit |
def request(
self,
method,
url,
data=None,
headers=None,
withhold_token=False,
client_id=None,
client_secret=None,
**kwargs
):
"""Intercept all requests and add the OAuth 2 token if present."""
if not is_secure_transport(url):
... | Intercept all requests and add the OAuth 2 token if present. |
def parse(self, line):
"""Parse a line of the Nginx error log"""
csv_list = line.split(",")
date_time_message = csv_list.pop(0).split(" ", 2)
otherinfo = dict()
for item in csv_list:
key_value_pair = item.split(":", 1)
key = key_value_pair[0].strip()
... | Parse a line of the Nginx error log |
def get_tag(note_store, my_tags):
"""
get the tags from his Evernote account
:param note_store Evernote Instance
:param my_tags string
:return: array of the tag to create
"""
tag_id = []
listtags = note_store.listTags()
# cut the st... | get the tags from his Evernote account
:param note_store Evernote Instance
:param my_tags string
:return: array of the tag to create |
def __record_progress(self, next_step=None):
""" __record_progress: save progress to respective restoration file
Args: None
Returns: None
"""
config.SUSHI_BAR_CLIENT.report_progress(
self.get_status(), self.get_status().value/Status.DONE.value)
if next... | __record_progress: save progress to respective restoration file
Args: None
Returns: None |
def trace(self, data, callback=None):
"""Queue data for tracing
Args:
data (bytearray, string): Unstructured data to trace to any
connected client.
callback (callable): An optional callback that will be called with
a bool value of True when this d... | Queue data for tracing
Args:
data (bytearray, string): Unstructured data to trace to any
connected client.
callback (callable): An optional callback that will be called with
a bool value of True when this data actually gets traced.
If the ... |
def on_graphs_menu_close(self, update):
"""Return to main screen and update sensor that
are active in the view"""
logging.info("closing sensor menu, update=%s", update)
if update:
for sensor, visible_sensors in \
self.graphs_menu.active_sensors.items():
... | Return to main screen and update sensor that
are active in the view |
def pointerEvent(self, x, y, buttonmask=0):
"""Indicates either pointer movement or a pointer button press or release. The pointer is
now at (x-position, y-position), and the current state of buttons 1 to 8 are represented
by bits 0 to 7 of button-mask respectively, 0 meaning up, 1 meaning... | Indicates either pointer movement or a pointer button press or release. The pointer is
now at (x-position, y-position), and the current state of buttons 1 to 8 are represented
by bits 0 to 7 of button-mask respectively, 0 meaning up, 1 meaning down (pressed). |
def indent(rows, hasHeader=False, headerChar='-', delim=' | ', justify='left',
separateRows=False, prefix='', postfix='', wrapfunc=lambda x: x):
'''Indents a table by column.
- rows: A sequence of sequences of items, one sequence per row.
- hasHeader: True if the first row c... | Indents a table by column.
- rows: A sequence of sequences of items, one sequence per row.
- hasHeader: True if the first row consists of the columns' names.
- headerChar: Character to be used for the row separator line
(if hasHeader==True or separateRows==True).
... |
def register_on_medium_changed(self, callback):
"""Set the callback function to consume on medium changed events.
Callback receives a IMediumChangedEvent object.
Returns the callback_id
"""
event_type = library.VBoxEventType.on_medium_changed
return self.event_source.re... | Set the callback function to consume on medium changed events.
Callback receives a IMediumChangedEvent object.
Returns the callback_id |
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get("name"):
key = user["name"]
else:
... | Fancy display. |
def get_clean_factor_and_forward_returns(factor,
prices,
groupby=None,
binning_by_group=False,
quantiles=5,
bins=No... | Formats the factor data, pricing data, and group mappings into a DataFrame
that contains aligned MultiIndex indices of timestamp and asset. The
returned data will be formatted to be suitable for Alphalens functions.
It is safe to skip a call to this function and still make use of Alphalens
functionalit... |
def get_selection(self, name="default"):
"""Get the current selection object (mostly for internal use atm)."""
name = _normalize_selection_name(name)
selection_history = self.selection_histories[name]
index = self.selection_history_indices[name]
if index == -1:
return... | Get the current selection object (mostly for internal use atm). |
def wait_to_end(self, pids=[]):
'''
wait_to_end(self, pids=[])
Wait for processes to finish
:Parameters:
* *pids* (`list`) -- list of processes to wait to finish
'''
actual_pids = self._get_pids(pids)
return self.wait_for(pids=actual_pids, status_list=p... | wait_to_end(self, pids=[])
Wait for processes to finish
:Parameters:
* *pids* (`list`) -- list of processes to wait to finish |
def get_plugins(**kwargs):
"""
Get all available plugins
"""
plugins = []
plugin_paths = []
#Look in directory or set of directories for
#plugins
base_plugin_dir = config.get('plugin', 'default_directory')
plugin_xsd_path = config.get('plugin', 'plugin_xsd_path')
... | Get all available plugins |
def supportsType(self, type_uri):
"""Does this endpoint support this type?
I consider C{/server} endpoints to implicitly support C{/signon}.
"""
return (
(type_uri in self.type_uris) or
(type_uri == OPENID_2_0_TYPE and self.isOPIdentifier())
) | Does this endpoint support this type?
I consider C{/server} endpoints to implicitly support C{/signon}. |
def _generate_token(self):
"""Create authentation to use with requests."""
session = self.get_session()
url = self.__base_url('magicBox.cgi?action=getMachineName')
try:
# try old basic method
auth = requests.auth.HTTPBasicAuth(self._user, self._password)
... | Create authentation to use with requests. |
def run(self, eps=1e-4, kill=True, max_steps=50, verbose=False):
r"""Perform the clustering on the input components updating the initial
guess. The result is available in the member ``self.g``.
Return the number of iterations at convergence, or None.
:param eps:
If relativ... | r"""Perform the clustering on the input components updating the initial
guess. The result is available in the member ``self.g``.
Return the number of iterations at convergence, or None.
:param eps:
If relative change of distance between current and last step falls below ``eps``,
... |
def _padding_to_conv_op_padding(padding):
"""Whether to use SAME or VALID for the underlying convolution op.
Args:
padding: A tuple of members of ALLOWED_PADDINGS, e.g. as returned from
`_fill_and_verify_padding`.
Returns:
One of CONV_OP_ALLOWED_PADDINGS, the padding method to use for the
unde... | Whether to use SAME or VALID for the underlying convolution op.
Args:
padding: A tuple of members of ALLOWED_PADDINGS, e.g. as returned from
`_fill_and_verify_padding`.
Returns:
One of CONV_OP_ALLOWED_PADDINGS, the padding method to use for the
underlying convolution op.
Raises:
ValueErro... |
def renew_item(self, item, expiration):
"""Update the expiration time for ``item``.
The item will remain checked out for ``expiration`` seconds
beyond the current time. This queue instance must have
already checked out ``item``, and this method can fail if
``item`` is already o... | Update the expiration time for ``item``.
The item will remain checked out for ``expiration`` seconds
beyond the current time. This queue instance must have
already checked out ``item``, and this method can fail if
``item`` is already overdue. |
def _run_all(cmd, log_lvl=None, log_msg=None, exitcode=0):
'''
Simple wrapper around cmd.run_all
log_msg can contain {0} for stderr
:return: True or stdout, False if retcode wasn't exitcode
'''
res = __salt__['cmd.run_all'](cmd)
if res['retcode'] == exitcode:
if res['stdout']:
... | Simple wrapper around cmd.run_all
log_msg can contain {0} for stderr
:return: True or stdout, False if retcode wasn't exitcode |
def _normalize_dir(string_):
'''
Normalize the directory to make comparison possible
'''
return os.path.normpath(salt.utils.stringutils.to_unicode(string_)) | Normalize the directory to make comparison possible |
def do_group(self):
"""
Do grouping on register
"""
group_id = self.config.group
systems = {'machine_id': generate_machine_id()}
self.group_systems(group_id, systems) | Do grouping on register |
def personsAtHome(self, home=None):
"""
Return the list of known persons who are currently at home
"""
if not home: home = self.default_home
home_data = self.homeByName(home)
atHome = []
for p in home_data['persons']:
#Only check known persons
... | Return the list of known persons who are currently at home |
def safe_copyfile(src, dest):
"""safely copy src to dest using a temporary intermediate and then renaming
to dest"""
fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(dest))
shutil.copyfileobj(open(src, 'rb'), os.fdopen(fd, 'wb'))
shutil.copystat(src, tmpname)
os.rename(tmpname, dest) | safely copy src to dest using a temporary intermediate and then renaming
to dest |
def get_resource_query_session(self, proxy):
"""Gets a resource query session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.resource.ResourceQuerySession) - ``a
ResourceQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed... | Gets a resource query session.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.resource.ResourceQuerySession) - ``a
ResourceQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemen... |
def _raise_from_invalid_response(error):
"""Re-wrap and raise an ``InvalidResponse`` exception.
:type error: :exc:`google.resumable_media.InvalidResponse`
:param error: A caught exception from the ``google-resumable-media``
library.
:raises: :class:`~google.cloud.exceptions.GoogleClo... | Re-wrap and raise an ``InvalidResponse`` exception.
:type error: :exc:`google.resumable_media.InvalidResponse`
:param error: A caught exception from the ``google-resumable-media``
library.
:raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding
to the faile... |
def logsumexp(arr, axis=0):
"""Computes the sum of arr assuming arr is in the log domain.
Returns log(sum(exp(arr))) while minimizing the possibility of
over/underflow.
Examples
--------
>>> import numpy as np
>>> from sklearn.utils.extmath import logsumexp
>>> a = np.arange(10)
>... | Computes the sum of arr assuming arr is in the log domain.
Returns log(sum(exp(arr))) while minimizing the possibility of
over/underflow.
Examples
--------
>>> import numpy as np
>>> from sklearn.utils.extmath import logsumexp
>>> a = np.arange(10)
>>> np.log(np.sum(np.exp(a)))
9.... |
def from_dict(data, ctx):
"""
Instantiate a new GuaranteedStopLossOrderLevelRestriction from a dict
(generally from loading a JSON response). The data used to instantiate
the GuaranteedStopLossOrderLevelRestriction is a shallow copy of the
dict passed in, with any complex child t... | Instantiate a new GuaranteedStopLossOrderLevelRestriction from a dict
(generally from loading a JSON response). The data used to instantiate
the GuaranteedStopLossOrderLevelRestriction is a shallow copy of the
dict passed in, with any complex child types instantiated
appropriately. |
def get_stories(self, story_type='', limit=30):
"""
Yields a list of stories from the passed page
of HN.
'story_type' can be:
\t'' = top stories (homepage) (default)
\t'news2' = page 2 of top stories
\t'newest' = most recent stories
\t'best' = best... | Yields a list of stories from the passed page
of HN.
'story_type' can be:
\t'' = top stories (homepage) (default)
\t'news2' = page 2 of top stories
\t'newest' = most recent stories
\t'best' = best stories
'limit' is the number of stories required from the... |
def copy_and_sum_families(family_source, family_target):
""" methods iterates thru source family and copies its entries to target family
in case key already exists in both families - then the values are added"""
for every in family_source:
if every not in family_target:
family_target[eve... | methods iterates thru source family and copies its entries to target family
in case key already exists in both families - then the values are added |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.