code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def get_moderation(request):
"""Return the list of publications that need moderation."""
with db_connect() as db_conn:
with db_conn.cursor() as cursor:
cursor.execute("""\
SELECT row_to_json(combined_rows) FROM (
SELECT id, created, publisher, publication_message,
(select array_ag... | Return the list of publications that need moderation. |
def _change_precision(self, val, base=0):
"""
Check and normalise the value of precision (must be positive integer).
Args:
val (INT): must be positive integer
base (INT): Description
Returns:
VAL (INT): Description
"""
if not isinstan... | Check and normalise the value of precision (must be positive integer).
Args:
val (INT): must be positive integer
base (INT): Description
Returns:
VAL (INT): Description |
def _add_file(self, key, path):
"""Copy a file into the reference package."""
filename = os.path.basename(path)
base, ext = os.path.splitext(filename)
if os.path.exists(self.file_path(filename)):
with tempfile.NamedTemporaryFile(
dir=self.path, prefix=base... | Copy a file into the reference package. |
def Open(self):
"""Opens the process for reading."""
self.h_process = kernel32.OpenProcess(
PROCESS_VM_READ | PROCESS_QUERY_INFORMATION, 0, self.pid)
if not self.h_process:
raise process_error.ProcessError(
"Failed to open process (pid %d)." % self.pid)
if self.Is64bit():
... | Opens the process for reading. |
def _parse_fields_http(self, *args, **kwargs):
"""
Deprecated. This will be removed in a future release.
"""
from warnings import warn
warn('IPASN._parse_fields_http() has been deprecated and will be '
'removed. You should now use IPASN.parse_fields_http().')
... | Deprecated. This will be removed in a future release. |
def enable_pretty_logging(options: Any = None, logger: logging.Logger = None) -> None:
"""Turns on formatted logging output as configured.
This is called automatically by `tornado.options.parse_command_line`
and `tornado.options.parse_config_file`.
"""
if options is None:
import tornado.opt... | Turns on formatted logging output as configured.
This is called automatically by `tornado.options.parse_command_line`
and `tornado.options.parse_config_file`. |
def interleave(*arrays,**kwargs):
'''
arr1 = [1,2,3,4]
arr2 = ['a','b','c','d']
arr3 = ['@','#','%','*']
interleave(arr1,arr2,arr3)
'''
anum = arrays.__len__()
rslt = []
length = arrays[0].__len__()
for j in range(0,length):
for i in range(0,anum):
... | arr1 = [1,2,3,4]
arr2 = ['a','b','c','d']
arr3 = ['@','#','%','*']
interleave(arr1,arr2,arr3) |
def add_or_update_records(cls, tables: I2B2Tables, records: List["ObservationFact"]) -> Tuple[int, int]:
"""
Add or update the observation_fact table as needed to reflect the contents of records
:param tables: i2b2 sql connection
:param records: records to apply
:return: number o... | Add or update the observation_fact table as needed to reflect the contents of records
:param tables: i2b2 sql connection
:param records: records to apply
:return: number of records added / modified |
def get(self, remote_file, local_file):
"""
下载文件
:param remote_file:
:param local_file:
:return:
"""
sftp = self.get_sftp()
try:
sftp.get(remote_file, local_file)
except Exception as e:
logger.error('下载文件失败')
log... | 下载文件
:param remote_file:
:param local_file:
:return: |
def com_google_fonts_check_family_equal_font_versions(ttFonts):
"""Make sure all font files have the same version value."""
all_detected_versions = []
fontfile_versions = {}
for ttFont in ttFonts:
v = ttFont['head'].fontRevision
fontfile_versions[ttFont] = v
if v not in all_detected_versions:
... | Make sure all font files have the same version value. |
def grow(self, *args):
""" Creates a region around the given point Valid arguments:
* ``grow(wh)`` - Creates a region centered on this point with a width and height of ``wh``.
* ``grow(w, h)`` - Creates a region centered on this point with a width of ``w`` and height
of ``h``.
... | Creates a region around the given point Valid arguments:
* ``grow(wh)`` - Creates a region centered on this point with a width and height of ``wh``.
* ``grow(w, h)`` - Creates a region centered on this point with a width of ``w`` and height
of ``h``.
* ``grow(Region.CREATE_X_DIRECTION... |
def locate(pattern, root=os.curdir):
"""Locate all files matching supplied filename pattern recursively."""
for path, dummy, files in os.walk(os.path.abspath(root)):
for filename in fnmatch.filter(files, pattern):
yield os.path.join(path, filename) | Locate all files matching supplied filename pattern recursively. |
def _create_checkable_action(self, text, conf_name, editorstack_method):
"""Helper function to create a checkable action.
Args:
text (str): Text to be displayed in the action.
conf_name (str): configuration setting associated with the action
editorstack_method ... | Helper function to create a checkable action.
Args:
text (str): Text to be displayed in the action.
conf_name (str): configuration setting associated with the action
editorstack_method (str): name of EditorStack class that will be
used to update the cha... |
def start(self):
""" Starts services. """
cert_path = os.path.join(self.work_dir, 'certificates')
public_keys_dir = os.path.join(cert_path, 'public_keys')
private_keys_dir = os.path.join(cert_path, 'private_keys')
client_secret_file = os.path.join(private_keys_dir, "client.key")... | Starts services. |
def cast_pars_dict(pars_dict):
"""Cast the bool and float elements of a parameters dict to
the appropriate python types.
"""
o = {}
for pname, pdict in pars_dict.items():
o[pname] = {}
for k, v in pdict.items():
if k == 'free':
o[pname][k] = bool(int(... | Cast the bool and float elements of a parameters dict to
the appropriate python types. |
def parse_na(txt: str) -> (MetarData, Units): # type: ignore
"""
Parser for the North American METAR variant
"""
units = Units(**NA_UNITS) # type: ignore
clean = core.sanitize_report_string(txt)
wxresp = {'raw': txt, 'sanitized': clean}
wxdata, wxresp['remarks'] = core.get_remarks(clean)
... | Parser for the North American METAR variant |
def multiply_slow(x, y, prim=0x11b):
'''Another equivalent (but even slower) way to compute multiplication in Galois Fields without using a precomputed look-up table.
This is the form you will most often see in academic literature, by using the standard carry-less multiplication + modular reduction usin... | Another equivalent (but even slower) way to compute multiplication in Galois Fields without using a precomputed look-up table.
This is the form you will most often see in academic literature, by using the standard carry-less multiplication + modular reduction using an irreducible prime polynomial. |
def start_receive(self, fd, data=None):
"""
Cause :meth:`poll` to yield `data` when `fd` is readable.
"""
self._rfds[fd] = (data or fd, self._generation)
self._update(fd) | Cause :meth:`poll` to yield `data` when `fd` is readable. |
def measure_old_norse_syllable(syllable: list) -> Union[Length, None]:
"""
Old Norse syllables are considered as:
- short if
- long if
- overlong if
>>> measure_old_norse_syllable([m, a.lengthen(), l]).name
'long'
>>> measure_old_norse_syllable([a, l]).name
'short'
>>> measure... | Old Norse syllables are considered as:
- short if
- long if
- overlong if
>>> measure_old_norse_syllable([m, a.lengthen(), l]).name
'long'
>>> measure_old_norse_syllable([a, l]).name
'short'
>>> measure_old_norse_syllable([s, t, ee, r, k, r]).name
'long'
>>> measure_old_norse... |
def _create_hidden_port(self, context, network_id, device_id, fixed_ips,
port_type=DEVICE_OWNER_ROUTER_INTF):
"""Creates port used specially for HA purposes."""
port = {'port': {
'tenant_id': '', # intentionally not set
'network_id': network_id,
... | Creates port used specially for HA purposes. |
def polygen(*coefficients):
'''Polynomial generating function'''
if not coefficients:
return lambda i: 0
else:
c0 = coefficients[0]
coefficients = coefficients[1:]
def _(i):
v = c0
for c in coefficients:
v += c*i
... | Polynomial generating function |
def stopping_function(results, args=None, rstate=None, M=None,
return_vals=False):
"""
The default stopping function utilized by :class:`DynamicSampler`.
Zipped parameters are passed to the function via :data:`args`.
Assigns the run a stopping value based on a weighted average of t... | The default stopping function utilized by :class:`DynamicSampler`.
Zipped parameters are passed to the function via :data:`args`.
Assigns the run a stopping value based on a weighted average of the
stopping values for the posterior and evidence::
stop = pfrac * stop_post + (1.- pfrac) * stop_evid
... |
def _rewind(self):
'''rewind to start of log'''
DFReader._rewind(self)
self.line = 0
# find the first valid line
while self.line < len(self.lines):
if self.lines[self.line].startswith("FMT, "):
break
self.line += 1 | rewind to start of log |
def convertforoutput(self,outputfile):
"""Convert from one of the source formats into target format. Relevant if converters are used in OutputTemplates. Outputfile is a CLAMOutputFile instance."""
super(CharEncodingConverter,self).convertforoutput(outputfile)
return withheaders( flask.make_resp... | Convert from one of the source formats into target format. Relevant if converters are used in OutputTemplates. Outputfile is a CLAMOutputFile instance. |
def set_port_profile_created(self, vlan_id, profile_name, device_id):
"""Sets created_on_ucs flag to True."""
with self.session.begin(subtransactions=True):
port_profile = self.session.query(
ucsm_model.PortProfile).filter_by(
vlan_id=vlan_id, profile_id=p... | Sets created_on_ucs flag to True. |
def get_learning_objective_ids_metadata(self):
"""get the metadata for learning objective"""
metadata = dict(self._learning_objective_ids_metadata)
metadata.update({'existing_id_values': self.my_osid_object_form._my_map['learningObjectiveIds'][0]})
return Metadata(**metadata) | get the metadata for learning objective |
def remove_col_label(self, event=None, col=None):
"""
check to see if column is required
if it is not, delete it from grid
"""
if event:
col = event.GetCol()
if not col:
return
label = self.grid.GetColLabelValue(col)
if '**' in labe... | check to see if column is required
if it is not, delete it from grid |
def preorder(self):
""" iterator for nodes: root, left, right """
if not self:
return
yield self
if self.left:
for x in self.left.preorder():
yield x
if self.right:
for x in self.right.preorder():
yield x | iterator for nodes: root, left, right |
def _get_system_volume(vm_):
'''
Construct VM system volume list from cloud profile config
'''
# Override system volume size if 'disk_size' is defined in cloud profile
disk_size = get_size(vm_)['disk']
if 'disk_size' in vm_:
disk_size = vm_['disk_size']
# Construct the system volum... | Construct VM system volume list from cloud profile config |
def TRUE(classical_reg):
"""
Produce a TRUE instruction.
:param classical_reg: A classical register to modify.
:return: An instruction object representing the equivalent MOVE.
"""
warn("`TRUE a` has been deprecated. Use `MOVE a 1` instead.")
if isinstance(classical_reg, int):
classi... | Produce a TRUE instruction.
:param classical_reg: A classical register to modify.
:return: An instruction object representing the equivalent MOVE. |
def __get_strut_token(self):
"""
Move the staged loan notes to the order stage and get the struts token
from the place order HTML.
The order will not be placed until calling _confirm_order()
Returns
-------
dict
A dict with the token name and value
... | Move the staged loan notes to the order stage and get the struts token
from the place order HTML.
The order will not be placed until calling _confirm_order()
Returns
-------
dict
A dict with the token name and value |
def script(self, s):
"""
Parse a script by compiling it.
Return a :class:`Contract` or None.
"""
try:
script = self._network.script.compile(s)
script_info = self._network.contract.info_for_script(script)
return Contract(script_info, self._netwo... | Parse a script by compiling it.
Return a :class:`Contract` or None. |
def key_wait():
"""Waits until the user presses a key.
Then returns a :any:`KeyDown` event.
Key events will repeat if held down.
A click to close the window will be converted into an Alt+F4 KeyDown event.
Returns:
tdl.event.KeyDown: The pressed key.
"""
while 1:
for event ... | Waits until the user presses a key.
Then returns a :any:`KeyDown` event.
Key events will repeat if held down.
A click to close the window will be converted into an Alt+F4 KeyDown event.
Returns:
tdl.event.KeyDown: The pressed key. |
def capture(board):
"""Try to solve the board described by board_string.
Return sequence of summaries that describe how to get to the solution.
"""
game = Game()
v = (0, 0)
stub_actor = base.Actor('capture', v, v, v, v, v, v, v, v, v)
root = base.State(board, stub_actor, stub_actor,
... | Try to solve the board described by board_string.
Return sequence of summaries that describe how to get to the solution. |
def _read_descriptions(self, password):
"""
Read and evaluate the igddesc.xml file
and the tr64desc.xml file if a password is given.
"""
descfiles = [FRITZ_IGD_DESC_FILE]
if password:
descfiles.append(FRITZ_TR64_DESC_FILE)
for descfile in descfiles:
... | Read and evaluate the igddesc.xml file
and the tr64desc.xml file if a password is given. |
def getlanguage(self, language=None, windowsversion=None):
"""
Get and return the manifest's language as string.
Can be either language-culture e.g. 'en-us' or a string indicating
language neutrality, e.g. 'x-ww' on Windows XP or 'none' on Vista
and later.
... | Get and return the manifest's language as string.
Can be either language-culture e.g. 'en-us' or a string indicating
language neutrality, e.g. 'x-ww' on Windows XP or 'none' on Vista
and later. |
def _get_account_number(self, token, uuid):
"""Get fido account number."""
# Data
data = {"accessToken": token,
"uuid": uuid}
# Http request
try:
raw_res = yield from self._session.post(ACCOUNT_URL,
d... | Get fido account number. |
def call(method, *args, **kwargs):
'''
Calls an arbitrary pyeapi method.
'''
kwargs = clean_kwargs(**kwargs)
return getattr(pyeapi_device['connection'], method)(*args, **kwargs) | Calls an arbitrary pyeapi method. |
def run_band_structure(self,
paths,
with_eigenvectors=False,
with_group_velocities=False,
is_band_connection=False,
path_connections=None,
labels=None,
... | Run phonon band structure calculation.
Parameters
----------
paths : List of array_like
Sets of qpoints that can be passed to phonopy.set_band_structure().
Numbers of qpoints can be different.
shape of each array_like : (qpoints, 3)
with_eigenvectors ... |
def save_features(self, train_features, test_features, feature_names, feature_list_id):
"""
Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array... | Save features for the training and test sets to disk, along with their metadata.
Args:
train_features: A NumPy array of features for the training set.
test_features: A NumPy array of features for the test set.
feature_names: A list containing the names of the feature columns... |
def restart(self, container, instances=None, map_name=None, **kwargs):
"""
Restarts instances for a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance names to stop. If not specified, will restart all instances as... | Restarts instances for a container configuration.
:param container: Container name.
:type container: unicode | str
:param instances: Instance names to stop. If not specified, will restart all instances as specified in the
configuration (or just one default instance).
:type inst... |
def _format_conditions_and_actions(self, raw_data):
"""
This function gets a set of actions and conditionswith the following
format:
{'action-0': 'repeat',
'action-1': 'repeat',
'analysisservice-0': '30cd952b0bb04a05ac27b70ada7feab2',
'analysisservice-1': ... | This function gets a set of actions and conditionswith the following
format:
{'action-0': 'repeat',
'action-1': 'repeat',
'analysisservice-0': '30cd952b0bb04a05ac27b70ada7feab2',
'analysisservice-1': '30cd952b0bb04a05ac27b70ada7feab2',
'and_or-0': 'and',
... |
def parse_genemap2(lines):
"""Parse the omim source file called genemap2.txt
Explanation of Phenotype field:
Brackets, "[ ]", indicate "nondiseases," mainly genetic variations that
lead to apparently abnormal laboratory test values.
Braces, "{ }", indicate mutations that contribute to suscept... | Parse the omim source file called genemap2.txt
Explanation of Phenotype field:
Brackets, "[ ]", indicate "nondiseases," mainly genetic variations that
lead to apparently abnormal laboratory test values.
Braces, "{ }", indicate mutations that contribute to susceptibility to
multifactorial dis... |
def unicorn_edit(path, **kwargs):
"""Edit Unicorn node interactively.
"""
ctx = Context(**kwargs)
ctx.timeout = None
ctx.execute_action('unicorn:edit', **{
'unicorn': ctx.repo.create_secure_service('unicorn'),
'path': path,
}) | Edit Unicorn node interactively. |
def _EvaluateExpressions(self, frame):
"""Evaluates watched expressions into a string form.
If expression evaluation fails, the error message is used as evaluated
expression string.
Args:
frame: Python stack frame of breakpoint hit.
Returns:
Array of strings where each string correspo... | Evaluates watched expressions into a string form.
If expression evaluation fails, the error message is used as evaluated
expression string.
Args:
frame: Python stack frame of breakpoint hit.
Returns:
Array of strings where each string corresponds to the breakpoint
expression with th... |
def runInactiveDeviceCleanup(self):
"""
Runs both the deleteInactiveDevicesByAge and the deleteInactiveDevicesByQuota
methods with the configuration that was set when calling create.
"""
yield self.deleteInactiveDevicesByQuota(
self.__inactive_per_jid_max,
... | Runs both the deleteInactiveDevicesByAge and the deleteInactiveDevicesByQuota
methods with the configuration that was set when calling create. |
def unlink(self, request, uuid=None):
"""
Unlink all related resources, service project link and service itself.
"""
service = self.get_object()
service.unlink_descendants()
self.perform_destroy(service)
return Response(status=status.HTTP_204_NO_CONTENT) | Unlink all related resources, service project link and service itself. |
def product(*arrays):
""" Generate a cartesian product of input arrays.
Parameters
----------
arrays : list of array-like
1-D arrays to form the cartesian product of.
Returns
-------
out : ndarray
2-D array of shape (M, len(arrays)) containing cartesian products
for... | Generate a cartesian product of input arrays.
Parameters
----------
arrays : list of array-like
1-D arrays to form the cartesian product of.
Returns
-------
out : ndarray
2-D array of shape (M, len(arrays)) containing cartesian products
formed of input arrays. |
def _delete_unwanted_caracters(self, chain):
"""Remove not wanted char from chain
unwanted char are illegal_macro_output_chars attribute
:param chain: chain to remove char from
:type chain: str
:return: chain cleaned
:rtype: str
"""
try:
chain... | Remove not wanted char from chain
unwanted char are illegal_macro_output_chars attribute
:param chain: chain to remove char from
:type chain: str
:return: chain cleaned
:rtype: str |
def transform_txn_for_ledger(txn):
'''
Makes sure that we have integer as keys after possible deserialization from json
:param txn: txn to be transformed
:return: transformed txn
'''
txn_data = get_payload_data(txn)
txn_data[AUDIT_TXN_LEDGERS_SIZE] = {int(k): v fo... | Makes sure that we have integer as keys after possible deserialization from json
:param txn: txn to be transformed
:return: transformed txn |
def paintEvent(self, event):
"""
Overloads the paint event to support rendering of hints if there are
no items in the tree.
:param event | <QPaintEvent>
"""
super(XTextEdit, self).paintEvent(event)
if self.document().isEmpty() and s... | Overloads the paint event to support rendering of hints if there are
no items in the tree.
:param event | <QPaintEvent> |
def head(draw=True, show=True, max_shape=256):
"""Show a volumetric rendering of a human male head."""
# inspired by http://graphicsrunner.blogspot.com/2009/01/volume-rendering-102-transfer-functions.html
import ipyvolume as ipv
from scipy.interpolate import interp1d
# First part is a simpler versi... | Show a volumetric rendering of a human male head. |
def add_concept(self, concept_obj):
''' Add a concept to current concept list '''
if concept_obj is None:
raise Exception("Concept object cannot be None")
elif concept_obj in self.__concepts:
raise Exception("Concept object is already inside")
elif concept_obj.cid... | Add a concept to current concept list |
def delete(cls, resources, background=False, force=False):
"""Delete an ip by deleting the iface"""
if not isinstance(resources, (list, tuple)):
resources = [resources]
ifaces = []
for item in resources:
try:
ip_ = cls.info(item)
excep... | Delete an ip by deleting the iface |
def _weld_unary(array, weld_type, operation):
"""Apply operation on each element in the array.
As mentioned by Weld, the operations follow the behavior of the equivalent C functions from math.h
Parameters
----------
array : numpy.ndarray or WeldObject
Data
weld_type : WeldType
... | Apply operation on each element in the array.
As mentioned by Weld, the operations follow the behavior of the equivalent C functions from math.h
Parameters
----------
array : numpy.ndarray or WeldObject
Data
weld_type : WeldType
Of the data
operation : {'exp', 'log', 'sqrt', 's... |
def add_datepart(df, fldname, drop=True, time=False, errors="raise"):
"""add_datepart converts a column of df from a datetime64 to many columns containing
the information from the date. This applies changes inplace.
Parameters:
-----------
df: A pandas data frame. df gain several new columns.
f... | add_datepart converts a column of df from a datetime64 to many columns containing
the information from the date. This applies changes inplace.
Parameters:
-----------
df: A pandas data frame. df gain several new columns.
fldname: A string that is the name of the date column you wish to expand.
... |
def h_v_t(header, key):
"""
get header value with title
try to get key from header and consider case sensitive
e.g. header['x-log-abc'] or header['X-Log-Abc']
:param header:
:param key:
:return:
"""
if key not in header:
key =... | get header value with title
try to get key from header and consider case sensitive
e.g. header['x-log-abc'] or header['X-Log-Abc']
:param header:
:param key:
:return: |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: DocumentContext for this DocumentInstance
:rtype: twilio.rest.preview.sync.service.document.Docum... | Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: DocumentContext for this DocumentInstance
:rtype: twilio.rest.preview.sync.service.document.DocumentContext |
def printMe(self, selfKey, selfValue):
'''Parse the single and its value and return the parsed str.
Args:
selfTag (str): The tag. Normally just ``self.tag``
selfValue (list): a list of value elements(single, subclasses, str, int). Normally just ``self.value``
Returns:
... | Parse the single and its value and return the parsed str.
Args:
selfTag (str): The tag. Normally just ``self.tag``
selfValue (list): a list of value elements(single, subclasses, str, int). Normally just ``self.value``
Returns:
str: A parsed text |
def p_declare_list(p):
'''declare_list : STRING EQUALS static_scalar
| declare_list COMMA STRING EQUALS static_scalar'''
if len(p) == 4:
p[0] = [ast.Directive(p[1], p[3], lineno=p.lineno(1))]
else:
p[0] = p[1] + [ast.Directive(p[3], p[5], lineno=p.lineno(2))] | declare_list : STRING EQUALS static_scalar
| declare_list COMMA STRING EQUALS static_scalar |
def get_ssh_keys(sshdir):
"""Get SSH keys"""
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_r... | Get SSH keys |
def drop_layer(self, layer):
"""Removes the named layer and the value associated with it from the node.
Parameters
----------
layer : str
Name of the layer to drop.
Raises
------
TypeError
If the node is frozen
KeyError
... | Removes the named layer and the value associated with it from the node.
Parameters
----------
layer : str
Name of the layer to drop.
Raises
------
TypeError
If the node is frozen
KeyError
If the named layer does not exist |
def _check_not_in_finally(self, node, node_name, breaker_classes=()):
"""check that a node is not inside a finally clause of a
try...finally statement.
If we found before a try...finally bloc a parent which its type is
in breaker_classes, we skip the whole check."""
# if self._tr... | check that a node is not inside a finally clause of a
try...finally statement.
If we found before a try...finally bloc a parent which its type is
in breaker_classes, we skip the whole check. |
def remove_csv_from_json(d):
"""
Remove all CSV data 'values' entries from paleoData table in the JSON structure.
:param dict d: JSON data - old structure
:return dict: Metadata dictionary without CSV values
"""
logger_jsons.info("enter remove_csv_from_json")
# Check both sections
if "p... | Remove all CSV data 'values' entries from paleoData table in the JSON structure.
:param dict d: JSON data - old structure
:return dict: Metadata dictionary without CSV values |
def _check_filepath(changes):
'''
Ensure all changes are fully qualified and affect only one file.
This ensures that the diff output works and a state change is not
incorrectly reported.
'''
filename = None
for change_ in changes:
try:
cmd, arg = change_.split(' ', 1)
... | Ensure all changes are fully qualified and affect only one file.
This ensures that the diff output works and a state change is not
incorrectly reported. |
def tostring(self, inject):
"""Get the entire text content as str"""
return inject(self, '\n'.join(document.tostring(inject) for document in self.documents)) | Get the entire text content as str |
def define_parser(self):
""" Defines xdot grammar.
@see: http://graphviz.org/doc/info/output.html#d:xdot """
# Common constructs.
point = Group(integer.setResultsName("x") +
integer.setResultsName("y"))
n_points = (integer.setResultsName("n") +
... | Defines xdot grammar.
@see: http://graphviz.org/doc/info/output.html#d:xdot |
def sort_dict_by_key(obj):
"""
Sort dict by its keys
>>> sort_dict_by_key(dict(c=1, b=2, a=3, d=4))
OrderedDict([('a', 3), ('b', 2), ('c', 1), ('d', 4)])
"""
sort_func = lambda x: x[0]
return OrderedDict(sorted(obj.items(), key=sort_func)) | Sort dict by its keys
>>> sort_dict_by_key(dict(c=1, b=2, a=3, d=4))
OrderedDict([('a', 3), ('b', 2), ('c', 1), ('d', 4)]) |
def extract_args(self, data):
"""
It extracts irc msg arguments.
"""
args = []
data = data.strip(' ')
if ':' in data:
lhs, rhs = data.split(':', 1)
if lhs: args.extend(lhs.rstrip(' ').split(' '))
args.append(rhs)
else:
... | It extracts irc msg arguments. |
def set_pin_retries(ctx, pw_attempts, admin_pin, force):
"""
Manage pin-retries.
Sets the number of attempts available before locking for each PIN.
PW_ATTEMPTS should be three integer values corresponding to the number of
attempts for the PIN, Reset Code, and Admin PIN, respectively.
"""
c... | Manage pin-retries.
Sets the number of attempts available before locking for each PIN.
PW_ATTEMPTS should be three integer values corresponding to the number of
attempts for the PIN, Reset Code, and Admin PIN, respectively. |
def dt_weekofyear(x):
"""Returns the week ordinal of the year.
:returns: an expression containing the week ordinal of the year, extracted from a datetime column.
Example:
>>> import vaex
>>> import numpy as np
>>> date = np.array(['2009-10-12T03:31:00', '2016-02-11T10:17:34', '2015-11-12T11:3... | Returns the week ordinal of the year.
:returns: an expression containing the week ordinal of the year, extracted from a datetime column.
Example:
>>> import vaex
>>> import numpy as np
>>> date = np.array(['2009-10-12T03:31:00', '2016-02-11T10:17:34', '2015-11-12T11:34:22'], dtype=np.datetime64)
... |
def close(self):
""" Closes the connection to the serial port and ensure no pending
operatoin are left """
self._serial.write(b"@c")
self._serial.read()
self._serial.close() | Closes the connection to the serial port and ensure no pending
operatoin are left |
def launch_batch_workflow(self, batch_workflow):
"""Launches GBDX batch workflow.
Args:
batch_workflow (dict): Dictionary specifying batch workflow tasks.
Returns:
Batch Workflow id (str).
"""
# hit workflow api
url = '%(base_url)s/batch_workflo... | Launches GBDX batch workflow.
Args:
batch_workflow (dict): Dictionary specifying batch workflow tasks.
Returns:
Batch Workflow id (str). |
def first_consumed_mesh(self):
"""The first consumed mesh.
:return: the first consumed mesh
:rtype: knittingpattern.Mesh.Mesh
:raises IndexError: if no mesh is consumed
.. seealso:: :attr:`number_of_consumed_meshes`
"""
for instruction in self.instructions:
... | The first consumed mesh.
:return: the first consumed mesh
:rtype: knittingpattern.Mesh.Mesh
:raises IndexError: if no mesh is consumed
.. seealso:: :attr:`number_of_consumed_meshes` |
def configure(self, options, conf):
""" Get the options. """
super(S3Logging, self).configure(options, conf)
self.options = options | Get the options. |
def detect_ts(df, max_anoms=0.10, direction='pos',
alpha=0.05, only_last=None, threshold=None,
e_value=False, longterm=False,
piecewise_median_period_weeks=2, plot=False,
y_log=False, xlabel = '', ylabel = 'count',
title=None, verbose=False):
"""... | Anomaly Detection Using Seasonal Hybrid ESD Test
A technique for detecting anomalies in seasonal univariate time series where the input is a
series of <timestamp, value> pairs.
Args:
x: Time series as a two column data frame where the first column consists of the
timestamps and the second column c... |
def total(self):
"""Return the total number of records"""
if self._result_cache:
return self._result_cache.total
return self.all().total | Return the total number of records |
def masked_local_attention_2d(q,
k,
v,
query_shape=(8, 16),
memory_flange=(8, 16),
name=None):
"""Strided block local self-attention.
Each position in a query block ... | Strided block local self-attention.
Each position in a query block can attend to all the generated queries in
the query block, which are generated in raster scan, and positions that are
generated to the left and top. The shapes are specified by query shape and
memory flange. Note that if you're using this func... |
def dataframe_setup(self):
"""
Set-up a report to store the desired header: sanitized string combinations
"""
# Initialise a dictionary to store the sanitized headers and strings
genesippr_dict = dict()
# Try to open all the reports - use pandas to extract the results fro... | Set-up a report to store the desired header: sanitized string combinations |
def build_logits(data_ops, embed_layer, rnn_core, output_linear, name_prefix):
"""This is the core model logic.
Unrolls a Bayesian RNN over the given sequence.
Args:
data_ops: A `sequence_data.SequenceDataOps` namedtuple.
embed_layer: A `snt.Embed` instance.
rnn_core: A `snt.RNNCore` instance.
o... | This is the core model logic.
Unrolls a Bayesian RNN over the given sequence.
Args:
data_ops: A `sequence_data.SequenceDataOps` namedtuple.
embed_layer: A `snt.Embed` instance.
rnn_core: A `snt.RNNCore` instance.
output_linear: A `snt.Linear` instance.
name_prefix: A string to use to prefix lo... |
def get_locations(self, locations, columns=None, **kwargs):
"""
For list of locations and list of columns return a DataFrame of the values.
:param locations: list of index locations
:param columns: list of column names
:param kwargs: will pass along these parameters to the get()... | For list of locations and list of columns return a DataFrame of the values.
:param locations: list of index locations
:param columns: list of column names
:param kwargs: will pass along these parameters to the get() method
:return: DataFrame |
def distinct_letters(string_matrix: List[List[str]]) -> Set[str]:
"""
Diagnostic function
:param string_matrix: a data matrix: a list wrapping a list of strings, with each sublist being a sentence.
:return:
>>> dl = distinct_letters([['the', 'quick', 'brown'],['how', 'now', 'cow']])
>>> sorted(d... | Diagnostic function
:param string_matrix: a data matrix: a list wrapping a list of strings, with each sublist being a sentence.
:return:
>>> dl = distinct_letters([['the', 'quick', 'brown'],['how', 'now', 'cow']])
>>> sorted(dl)
['b', 'c', 'e', 'h', 'i', 'k', 'n', 'o', 'q', 'r', 't', 'u', 'w'] |
def batch_(self, rpc_calls):
"""Batch RPC call.
Pass array of arrays: [ [ "method", params... ], ... ]
Returns array of results.
"""
batch_data = []
for rpc_call in rpc_calls:
AuthServiceProxy.__id_count += 1
m = rpc_call.pop(0)
b... | Batch RPC call.
Pass array of arrays: [ [ "method", params... ], ... ]
Returns array of results. |
def new(cls, ns_path, script, campaign_dir, runner_type='Auto',
overwrite=False, optimized=True, check_repo=True):
"""
Create a new campaign from an ns-3 installation and a campaign
directory.
This method will create a DatabaseManager, which will install a
database i... | Create a new campaign from an ns-3 installation and a campaign
directory.
This method will create a DatabaseManager, which will install a
database in the specified campaign_dir. If a database is already
available at the ns_path described in the specified campaign_dir and
its con... |
def delete_floatingip(self, floatingip_id):
'''
Deletes the specified floatingip
'''
ret = self.network_conn.delete_floatingip(floatingip_id)
return ret if ret else True | Deletes the specified floatingip |
def _emit_no_set_found(environment_name, product_name):
"""
writes to std out and logs if no connection string is found for deployment
:param environment_name:
:param product_name:
:return:
"""
sys.stdout.write(colorama.Fore.YELLOW + 'No connections found in global config file '
... | writes to std out and logs if no connection string is found for deployment
:param environment_name:
:param product_name:
:return: |
def radius_server_host_retries(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
radius_server = ET.SubElement(config, "radius-server", xmlns="urn:brocade.com:mgmt:brocade-aaa")
host = ET.SubElement(radius_server, "host")
hostname_key = ET.SubEleme... | Auto Generated Code |
def get_wind_url(self):
"""Get wind arrow url."""
wind_direction = self.f_d.get('wind_direction', None)
if wind_direction is not None:
rounded = int(5 * round(float(wind_direction)/5))
return WIND_ARROW_URL.format(rounded) | Get wind arrow url. |
def execute(self):
"""
self.params = {
"ActionScriptType" : "None",
"ExecutableEntityId" : "01pd0000001yXtYAAU",
"IsDumpingHeap" : True,
"Iteration" : 1,
"Line" : 3,
... | self.params = {
"ActionScriptType" : "None",
"ExecutableEntityId" : "01pd0000001yXtYAAU",
"IsDumpingHeap" : True,
"Iteration" : 1,
"Line" : 3,
"ScopeId" : "005d00000... |
def _complete_last_byte(self, packet):
"""Pad until the packet length is a multiple of 8 (bytes)."""
padded_size = self.get_size()
padding_bytes = padded_size - len(packet)
if padding_bytes > 0:
packet += Pad(padding_bytes).pack()
return packet | Pad until the packet length is a multiple of 8 (bytes). |
def Division(left: vertex_constructor_param_types, right: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex:
"""
Divides one vertex by another
:param left: the vertex to be divided
:param right: the vertex to divide
"""
return Double(context.jvm_view().DivisionVertex, lab... | Divides one vertex by another
:param left: the vertex to be divided
:param right: the vertex to divide |
def _unparse_entry_record(self, entry):
"""
:type entry: Dict[string, List[string]]
:param entry: Dictionary holding an entry
"""
for attr_type in sorted(entry.keys()):
for attr_value in entry[attr_type]:
self._unparse_attr(attr_type, attr_value) | :type entry: Dict[string, List[string]]
:param entry: Dictionary holding an entry |
def example_load_data(self):
"""
加载数据
"""
# 特征向量
self.x = constant([[0.7, 0.9]])
# 权重向量, w1代表神经网络的第一层,w2代表神经网络的第二层
self.w1 = Variable(random_normal([2, 3], stddev=1, seed=1))
self.w2 = Variable(random_normal([3, 1], stddev=1, seed=1)) | 加载数据 |
def get_github_hostname_user_repo_from_url(url):
"""Return hostname, user and repository to fork from.
:param url: The URL to parse
:return: hostname, user, repository
"""
parsed = parse.urlparse(url)
if parsed.netloc == '':
# Probably ssh
host, sep, path = parsed.path.partition... | Return hostname, user and repository to fork from.
:param url: The URL to parse
:return: hostname, user, repository |
def add_device_not_active_callback(self, callback):
"""Register callback to be invoked when a device is not responding."""
_LOGGER.debug('Added new callback %s ', callback)
self._cb_device_not_active.append(callback) | Register callback to be invoked when a device is not responding. |
def get_as_type_with_default(self, index, value_type, default_value):
"""
Converts array element into a value defined by specied typecode.
If conversion is not possible it returns default value.
:param index: an index of element to get.
:param value_type: the TypeCode that defi... | Converts array element into a value defined by specied typecode.
If conversion is not possible it returns default value.
:param index: an index of element to get.
:param value_type: the TypeCode that defined the type of the result
:param default_value: the default value
:retu... |
def get_ip_prefixes_from_bird(filename):
"""Build a list of IP prefixes found in Bird configuration.
Arguments:
filename (str): The absolute path of the Bird configuration file.
Notes:
It can only parse a file with the following format
define ACAST_PS_ADVERTISE =
... | Build a list of IP prefixes found in Bird configuration.
Arguments:
filename (str): The absolute path of the Bird configuration file.
Notes:
It can only parse a file with the following format
define ACAST_PS_ADVERTISE =
[
10.189.200.155/32,
... |
def create_access_token_response(self, uri, http_method='GET', body=None,
headers=None, credentials=None):
"""Create an access token response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP ... | Create an access token response, with a new request token if valid.
:param uri: The full URI of the token request.
:param http_method: A valid HTTP verb, i.e. GET, POST, PUT, HEAD, etc.
:param body: The request body as a string.
:param headers: The request headers as a dict.
:pa... |
def chmod(self, mode):
"""
Change the mode (permissions) of this file. The permissions are
unix-style and identical to those used by python's C{os.chmod}
function.
@param mode: new permissions
@type mode: int
"""
self.sftp._log(DEBUG, 'chmod(%s, %r)' % (... | Change the mode (permissions) of this file. The permissions are
unix-style and identical to those used by python's C{os.chmod}
function.
@param mode: new permissions
@type mode: int |
def compute_all_sg_permutations(positions, # scaled positions
rotations, # scaled
translations, # scaled
lattice, # column vectors
symprec):
"""Compute a permutation for every space gr... | Compute a permutation for every space group operation.
See 'compute_permutation_for_rotation' for more info.
Output has shape (num_rot, num_pos) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.