code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _bfd_rx(self, **kwargs):
"""Return the BFD minimum receive interval XML.
You should not use this method.
You probably want `BGP.bfd`.
Args:
min_rx (str): BFD receive interval in milliseconds (300, 500, etc)
delete (bool): Remove the configuration if ``True``... | Return the BFD minimum receive interval XML.
You should not use this method.
You probably want `BGP.bfd`.
Args:
min_rx (str): BFD receive interval in milliseconds (300, 500, etc)
delete (bool): Remove the configuration if ``True``.
Returns:
XML to b... |
def is_complex(self) -> bool:
"""
Whether the field is "complex" eg. env variables should be parsed as JSON.
"""
from .main import BaseModel # noqa: F811
return (
self.shape != Shape.SINGLETON
or lenient_issubclass(self.type_, (BaseModel, list, set, dict... | Whether the field is "complex" eg. env variables should be parsed as JSON. |
def _sitelist(self, matrix):
"""
Returns a list of sites from a SiteMatrix, optionally filtered
by 'domain' param
"""
_list = []
for item in matrix:
sites = []
if isinstance(matrix[item], list):
sites = matrix[item]
eli... | Returns a list of sites from a SiteMatrix, optionally filtered
by 'domain' param |
def write_meta(self):
"""Writes all meta data, ucd,description and units
The default implementation is to write this to a file called meta.yaml in the directory defined by
:func:`DataFrame.get_private_dir`. Other implementation may store this in the DataFrame file itself.
(For instance ... | Writes all meta data, ucd,description and units
The default implementation is to write this to a file called meta.yaml in the directory defined by
:func:`DataFrame.get_private_dir`. Other implementation may store this in the DataFrame file itself.
(For instance the vaex hdf5 implementation does... |
def _sync_outlineexplorer_file_order(self):
"""
Order the root file items of the outline explorer as in the tabbar
of the current EditorStack.
"""
if self.outlineexplorer is not None:
self.outlineexplorer.treewidget.set_editor_ids_order(
[finfo.... | Order the root file items of the outline explorer as in the tabbar
of the current EditorStack. |
def _check_axis(self, ds, name):
'''
Checks that the axis attribute is a string and an allowed value, namely
one of 'T', 'X', 'Y', or 'Z'.
:param netCDF4.Dataset ds: An open netCDF dataset
:param str name: Name of the variable
:rtype: compliance_checker.base.Result
... | Checks that the axis attribute is a string and an allowed value, namely
one of 'T', 'X', 'Y', or 'Z'.
:param netCDF4.Dataset ds: An open netCDF dataset
:param str name: Name of the variable
:rtype: compliance_checker.base.Result |
def grad(self, params, epsilon=0.0001):
"""Used to check gradient estimation through slope approximation."""
grad = []
for x in range(len(params)):
temp = np.copy(params)
temp[x] += epsilon
temp2 = np.copy(params)
temp2[x] -= epsilon
gr... | Used to check gradient estimation through slope approximation. |
def _parse_source_sections(self, diff_str):
"""
Given the output of `git diff`, return a dictionary
with keys that are source file paths.
Each value is a list of lines from the `git diff` output
related to the source file.
Raises a `GitDiffError` if `diff_str` is in an ... | Given the output of `git diff`, return a dictionary
with keys that are source file paths.
Each value is a list of lines from the `git diff` output
related to the source file.
Raises a `GitDiffError` if `diff_str` is in an invalid format. |
def GetScriptHashesForVerifying(self):
"""
Get a list of script hashes for verifying transactions.
Raises:
Exception: if there are no valid assets in the transaction.
Returns:
list: of UInt160 type script hashes.
"""
if not self.References and le... | Get a list of script hashes for verifying transactions.
Raises:
Exception: if there are no valid assets in the transaction.
Returns:
list: of UInt160 type script hashes. |
def copy(self):
"""Make a copy of this instance.
Copies the local data stored as simple types and copies the client
attached to this instance.
:rtype: :class:`~google.cloud.spanner_v1.instance.Instance`
:returns: A copy of the current instance.
"""
new_client = ... | Make a copy of this instance.
Copies the local data stored as simple types and copies the client
attached to this instance.
:rtype: :class:`~google.cloud.spanner_v1.instance.Instance`
:returns: A copy of the current instance. |
def update_entity(self, entity, if_match='*'):
'''
Adds an update entity operation to the batch. See
:func:`~azure.storage.table.tableservice.TableService.update_entity` for more
information on updates.
The operation will not be executed until the batch is committed.
... | Adds an update entity operation to the batch. See
:func:`~azure.storage.table.tableservice.TableService.update_entity` for more
information on updates.
The operation will not be executed until the batch is committed.
:param entity:
The entity to update. Could be a... |
def fasta(self):
'''
str: Returns the sequence, as a FASTA-formatted string
Note: The FASTA string is built using ``Sequence.id`` and ``Sequence.sequence``.
'''
if not self._fasta:
self._fasta = '>{}\n{}'.format(self.id, self.sequence)
return self._fasta | str: Returns the sequence, as a FASTA-formatted string
Note: The FASTA string is built using ``Sequence.id`` and ``Sequence.sequence``. |
def encode(strs):
"""Encodes a list of strings to a single string.
:type strs: List[str]
:rtype: str
"""
res = ''
for string in strs.split():
res += str(len(string)) + ":" + string
return res | Encodes a list of strings to a single string.
:type strs: List[str]
:rtype: str |
def notes(path):
"""This function extracts any experimental notes from a ProCoDA data file.
:param path: The file path of the ProCoDA data file. If the file is in the working directory, then the file name is sufficient.
:type path: string
:return: The rows of the data file that contain text notes inse... | This function extracts any experimental notes from a ProCoDA data file.
:param path: The file path of the ProCoDA data file. If the file is in the working directory, then the file name is sufficient.
:type path: string
:return: The rows of the data file that contain text notes inserted during the experime... |
def find_commands(cls):
""" Finds commands by finding the subclasses of Command"""
cmds = []
for subclass in cls.__subclasses__():
cmds.append(subclass)
cmds.extend(find_commands(subclass))
return cmds | Finds commands by finding the subclasses of Command |
def get_bestfit_line(self, x_min=None, x_max=None, resolution=None):
"""
Method to get bestfit line using the defined
self.bestfit_func method
args:
x_min: scalar, default=min(x)
minimum x value of the line
x_max: scalar, default=max(x)
... | Method to get bestfit line using the defined
self.bestfit_func method
args:
x_min: scalar, default=min(x)
minimum x value of the line
x_max: scalar, default=max(x)
maximum x value of the line
resolution: int, default=1000
... |
def time(self):
"""
Returns the current time for this edit.
:return <QtCore.QTime>
"""
if self.isMilitaryTime():
format = 'hh:mm:ss'
time_of_day = ''
else:
format = 'hh:mm:ssap'
time_of_day = self._timeOfDayC... | Returns the current time for this edit.
:return <QtCore.QTime> |
def get_objective_banks(self):
"""Pass through to provider ObjectiveBankLookupSession.get_objective_banks"""
# Implemented from kitosid template for -
# osid.resource.BinLookupSession.get_bins_template
catalogs = self._get_provider_session('objective_bank_lookup_session').get_objective_b... | Pass through to provider ObjectiveBankLookupSession.get_objective_banks |
def sample_distinct(self, n_to_sample, **kwargs):
"""Sample a sequence of items from the pool until a minimum number of
distinct items are queried
Parameters
----------
n_to_sample : int
number of distinct items to sample. If sampling with replacement,
th... | Sample a sequence of items from the pool until a minimum number of
distinct items are queried
Parameters
----------
n_to_sample : int
number of distinct items to sample. If sampling with replacement,
this number is not necessarily the same as the number of
... |
def regex(pattern, prompt=None, empty=False, flags=0):
"""Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an e... | Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
flags : int, optional
Flags that wi... |
def retrieve_by_id(self, id_):
"""Return a JSSObject for the element with ID id_"""
items_with_id = [item for item in self if item.id == int(id_)]
if len(items_with_id) == 1:
return items_with_id[0].retrieve() | Return a JSSObject for the element with ID id_ |
def start_worker(self):
"""Trigger new process as a RQ worker."""
if not self.include_rq:
return None
worker = Worker(queues=self.queues,
connection=self.connection)
worker_pid_path = current_app.config.get(
"{}_WORKER_PID".format(self.con... | Trigger new process as a RQ worker. |
def server(self):
""" Returns :class:`plexapi.myplex.MyPlexResource` with server of current item. """
server = [s for s in self._server.resources() if s.clientIdentifier == self.machineIdentifier]
if len(server) == 0:
raise NotFound('Unable to find server with uuid %s' % self.machine... | Returns :class:`plexapi.myplex.MyPlexResource` with server of current item. |
def token(self):
"""
Token given by Transbank for payment initialization url.
Will raise PaymentError when an error ocurred.
"""
if not self._token:
self._token = self.fetch_token()
logger.payment(self)
return self._token | Token given by Transbank for payment initialization url.
Will raise PaymentError when an error ocurred. |
def save(self, model_filename, optimizer_filename):
""" Save the state of the model & optimizer to disk """
serializers.save_hdf5(model_filename, self.model)
serializers.save_hdf5(optimizer_filename, self.optimizer) | Save the state of the model & optimizer to disk |
def database_admin_api(self):
"""Helper for session-related API calls."""
if self._database_admin_api is None:
self._database_admin_api = DatabaseAdminClient(
credentials=self.credentials, client_info=_CLIENT_INFO
)
return self._database_admin_api | Helper for session-related API calls. |
def retrieve_equities(self, sids):
"""
Retrieve Equity objects for a list of sids.
Users generally shouldn't need to this method (instead, they should
prefer the more general/friendly `retrieve_assets`), but it has a
documented interface and tests because it's used upstream.
... | Retrieve Equity objects for a list of sids.
Users generally shouldn't need to this method (instead, they should
prefer the more general/friendly `retrieve_assets`), but it has a
documented interface and tests because it's used upstream.
Parameters
----------
sids : iter... |
def Readdir(self, path, fh=None):
"""Reads a directory given by path.
Args:
path: The path to list children of.
fh: A file handler. Not used.
Yields:
A generator of filenames.
Raises:
FuseOSError: If we try and list a file.
"""
del fh
# We can't read a path if it... | Reads a directory given by path.
Args:
path: The path to list children of.
fh: A file handler. Not used.
Yields:
A generator of filenames.
Raises:
FuseOSError: If we try and list a file. |
def from_json(cls, json_doc):
"""
Create and return a new Session Token based on the contents
of a JSON document.
:type json_doc: str
:param json_doc: A string containing a JSON document with a
previously saved Credentials object.
"""
d = json.loads(j... | Create and return a new Session Token based on the contents
of a JSON document.
:type json_doc: str
:param json_doc: A string containing a JSON document with a
previously saved Credentials object. |
def is_exported(bundle):
""" Returns True if dataset is already exported to CKAN. Otherwise returns False. """
if not ckan:
raise EnvironmentError(MISSING_CREDENTIALS_MSG)
params = {'q': 'name:{}'.format(bundle.dataset.vid.lower())}
resp = ckan.action.package_search(**params)
return len(resp... | Returns True if dataset is already exported to CKAN. Otherwise returns False. |
def find_cell_end(self, lines):
"""Return position of end of cell marker, and position
of first line after cell"""
if self.in_region:
self.cell_type = 'markdown'
for i, line in enumerate(lines):
if self.end_region_re.match(line):
return... | Return position of end of cell marker, and position
of first line after cell |
def anneal(self, mode, matches, orig_matches):
""" Perform post-processing.
Return True when any changes were applied.
"""
changed = False
def dupes_in_matches():
"""Generator for index of matches that are dupes."""
items_by_path = config.engine.grou... | Perform post-processing.
Return True when any changes were applied. |
def _fix_quantities(tree):
'''
Stupidly simple function to fix any Items/Quantity disparities inside a
DistributionConfig block before use. Since AWS only accepts JSON-encodable
data types, this implementation is "good enough" for our purposes.
'''
if isinstance(tree, dict):
tree = {k: _... | Stupidly simple function to fix any Items/Quantity disparities inside a
DistributionConfig block before use. Since AWS only accepts JSON-encodable
data types, this implementation is "good enough" for our purposes. |
def _set_fcoe_fcf_map(self, v, load=False):
"""
Setter method for fcoe_fcf_map, mapped from YANG variable /fcoe/fcoe_fabric_map/fcoe_fcf_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_fcoe_fcf_map is considered as a private
method. Backends looking to pop... | Setter method for fcoe_fcf_map, mapped from YANG variable /fcoe/fcoe_fabric_map/fcoe_fcf_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_fcoe_fcf_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj... |
def resolve_dst(self, dst_dir, src):
"""
finds the destination based on source
if source is an absolute path, and there's no pattern, it copies the file to base dst_dir
"""
if os.path.isabs(src):
return os.path.join(dst_dir, os.path.basename(src))
return os.pa... | finds the destination based on source
if source is an absolute path, and there's no pattern, it copies the file to base dst_dir |
def get_activity_admin_session_for_objective_bank(self, objective_bank_id=None):
"""Gets the OsidSession associated with the activity admin service
for the given objective bank.
arg: objectiveBankId (osid.id.Id): the Id of the objective
bank
return: (osid.learning.Act... | Gets the OsidSession associated with the activity admin service
for the given objective bank.
arg: objectiveBankId (osid.id.Id): the Id of the objective
bank
return: (osid.learning.ActivityAdminSession) - an
ActivityAdminSession
raise: NotFound - obje... |
def _make_parent(self):
"""Creates a parent key for the current path.
Extracts all but the last element in the key path and creates a new
key, while still matching the namespace and the project.
:rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType`
:returns: A ne... | Creates a parent key for the current path.
Extracts all but the last element in the key path and creates a new
key, while still matching the namespace and the project.
:rtype: :class:`google.cloud.datastore.key.Key` or :class:`NoneType`
:returns: A new ``Key`` instance, whose path cons... |
def merge_with(self, another, ubound=None, top_id=None):
"""
This method merges a tree of the current :class:`ITotalizer`
object, with a tree of another object and (if needed) increases a
potential upper bound that can be imposed on the complete list of
literals i... | This method merges a tree of the current :class:`ITotalizer`
object, with a tree of another object and (if needed) increases a
potential upper bound that can be imposed on the complete list of
literals in the sum of an existing :class:`ITotalizer` object to a
new value.
... |
def _prep_datum(self, datum, dialect, col, needs_conversion):
"""Puts a value in proper format for a SQL string"""
if datum is None or (needs_conversion and not str(datum).strip()):
return 'NULL'
pytype = self.columns[col]['pytype']
if needs_conversion:
if pytype... | Puts a value in proper format for a SQL string |
def add_graph(patterns, G):
"""Add a graph to a set of unique patterns."""
if not patterns:
patterns.append([G])
return
for i, graphs in enumerate(patterns):
if networkx.is_isomorphic(graphs[0], G, node_match=type_match,
edge_match=type_match):
... | Add a graph to a set of unique patterns. |
def exists(self):
""" Checks if item already exists in database """
self_object = self.query.filter_by(id=self.id).first()
if self_object is None:
return False
return True | Checks if item already exists in database |
def _get_size(size,
size_max,
size_min,
default_max,
default_min):
"""
Helper method for providing a size,
or a range to randomize from
"""
if len(default_max) != len(default_min):
raise ValueError('default_max = {} and default_... | Helper method for providing a size,
or a range to randomize from |
def fit(self, X, y=None, **kwargs):
"""
Fits n KMeans models where n is the length of ``self.k_values_``,
storing the silhouette scores in the ``self.k_scores_`` attribute.
The "elbow" and silhouette score corresponding to it are stored in
``self.elbow_value`` and ``self.elbow_sc... | Fits n KMeans models where n is the length of ``self.k_values_``,
storing the silhouette scores in the ``self.k_scores_`` attribute.
The "elbow" and silhouette score corresponding to it are stored in
``self.elbow_value`` and ``self.elbow_score`` respectively.
This method finishes up by c... |
def to_string(cls, error_code):
"""Returns the string message for the given error code.
Args:
cls (JLinkRTTErrors): the ``JLinkRTTErrors`` class
error_code (int): error code to convert
Returns:
An error string corresponding to the error code.
Raises:
... | Returns the string message for the given error code.
Args:
cls (JLinkRTTErrors): the ``JLinkRTTErrors`` class
error_code (int): error code to convert
Returns:
An error string corresponding to the error code.
Raises:
ValueError: if the error code is inva... |
def _prep_cnv_file(cns_file, svcaller, work_dir, data):
"""Create a CSV file of CNV calls with log2 and number of marks.
"""
in_file = cns_file
out_file = os.path.join(work_dir, "%s-%s-prep.csv" % (utils.splitext_plus(os.path.basename(in_file))[0],
... | Create a CSV file of CNV calls with log2 and number of marks. |
def information_coefficient(total1,total2,intersect):
'''a simple jacaard (information coefficient) to compare two lists of overlaps/diffs
'''
total = total1 + total2
return 2.0*len(intersect) / total | a simple jacaard (information coefficient) to compare two lists of overlaps/diffs |
def _submit(self):
'''submit a uservoice ticket. When we get here we should have:
{'user_prompt_issue': 'I want to do the thing.',
'record_asciinema': '/tmp/helpme.93o__nt5.json',
'record_environment': ((1,1),(2,2)...(N,N))}
Required Client Variables
... | submit a uservoice ticket. When we get here we should have:
{'user_prompt_issue': 'I want to do the thing.',
'record_asciinema': '/tmp/helpme.93o__nt5.json',
'record_environment': ((1,1),(2,2)...(N,N))}
Required Client Variables
self.api_key
... |
async def stop(self):
"""Stop heartbeat."""
self.stopped = True
self.loop_event.set()
# Waiting for shutdown of loop()
await self.stopped_event.wait() | Stop heartbeat. |
def data_directory(self):
"""
The absolute pathname of the directory where pip-accel's data files are stored (a string).
- Environment variable: ``$PIP_ACCEL_CACHE``
- Configuration option: ``data-directory``
- Default: ``/var/cache/pip-accel`` if running as ``root``, ``~/.pip-a... | The absolute pathname of the directory where pip-accel's data files are stored (a string).
- Environment variable: ``$PIP_ACCEL_CACHE``
- Configuration option: ``data-directory``
- Default: ``/var/cache/pip-accel`` if running as ``root``, ``~/.pip-accel`` otherwise |
def add_track(self, *args, **kwargs):
"""
Add a track to a position.
Parameters
----------
track_type: string
The type of track to add (e.g. "heatmap", "line")
position: string
One of 'top', 'bottom', 'center', 'left', 'right'
tileset: hgf... | Add a track to a position.
Parameters
----------
track_type: string
The type of track to add (e.g. "heatmap", "line")
position: string
One of 'top', 'bottom', 'center', 'left', 'right'
tileset: hgflask.tilesets.Tileset
The tileset to be plotte... |
def makedoetree(ddict, bdict):
"""makedoetree"""
dlist = list(ddict.keys())
blist = list(bdict.keys())
dlist.sort()
blist.sort()
#make space dict
doesnot = 'DOES NOT'
lst = []
for num in range(0, len(blist)):
if bdict[blist[num]] == doesnot:#belong
lst = lst + [bl... | makedoetree |
def take_along_axis(large_array, indexes):
""" Take along axis """
# Reshape indexes into the right shape
if len(large_array.shape) > len(indexes.shape):
indexes = indexes.reshape(indexes.shape + tuple([1] * (len(large_array.shape) - len(indexes.shape))))
return np.take_along_axis(large_array, ... | Take along axis |
def reshape(self, shape: tf.TensorShape) -> 'TensorFluent':
'''Returns a TensorFluent for the reshape operation with given `shape`.
Args:
shape: The output's shape.
Returns:
A TensorFluent wrapping the reshape operation.
'''
t = tf.reshape(self.tensor, s... | Returns a TensorFluent for the reshape operation with given `shape`.
Args:
shape: The output's shape.
Returns:
A TensorFluent wrapping the reshape operation. |
def _candidate_merges(self, f):
"""
Identifies those features that originally had the same ID as `f`
(according to the id_spec), but were modified because of duplicate
IDs.
"""
candidates = [self._get_feature(f.id)]
c = self.conn.cursor()
results = c.exec... | Identifies those features that originally had the same ID as `f`
(according to the id_spec), but were modified because of duplicate
IDs. |
def get_displays_params(self) -> str:
'''Show displays parameters.'''
output, error = self._execute(
'-s', self.device_sn, 'shell', 'dumpsys', 'window', 'displays')
return output | Show displays parameters. |
def make_PCEExtension_for_prebuilding_Code(
name, Code, prebuild_sources, srcdir,
downloads=None, **kwargs):
"""
If subclass of codeexport.Generic_Code needs to have some of it
sources compiled to objects and cached in a `prebuilt/` directory
at invocation of `setup.py build_ext` this co... | If subclass of codeexport.Generic_Code needs to have some of it
sources compiled to objects and cached in a `prebuilt/` directory
at invocation of `setup.py build_ext` this convenience function
makes setting up a PCEExtension easier. Use together with
cmdclass = {'build_ext': pce_build_ext}.
files ... |
def node(self, name):
"""Gets a single node from PuppetDB.
:param name: The name of the node search.
:type name: :obj:`string`
:return: An instance of Node
:rtype: :class:`pypuppetdb.types.Node`
"""
nodes = self.nodes(path=name)
return next(node for node... | Gets a single node from PuppetDB.
:param name: The name of the node search.
:type name: :obj:`string`
:return: An instance of Node
:rtype: :class:`pypuppetdb.types.Node` |
def cli(conf):
"""The fedora-messaging command line interface."""
if conf:
if not os.path.isfile(conf):
raise click.exceptions.BadParameter("{} is not a file".format(conf))
try:
config.conf.load_config(config_path=conf)
except exceptions.ConfigurationException as ... | The fedora-messaging command line interface. |
def display_waypoints(self):
'''display the waypoints'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.mission_list = self.module('wp').wploader.view_list()
polygons = self.module('wp').wploader.polygon_list()
self.map.add_object(mp_slipmap.SlipClearLayer('Mission'))
... | display the waypoints |
def _canonicalize(self, filename):
"""Use .collection as extension unless provided"""
path, ext = os.path.splitext(filename)
if not ext:
ext = ".collection"
return path + ext | Use .collection as extension unless provided |
def resource(self, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=None, stream=False, version=None, name=None,
**kwargs):
"""
Create a blueprint resource route from a decorated function.
:param uri: endpoint at which the route will be accessible.
... | Create a blueprint resource route from a decorated function.
:param uri: endpoint at which the route will be accessible.
:param methods: list of acceptable HTTP methods.
:param host:
:param strict_slashes:
:param version:
:param name: user defined route name for url_for
... |
def add_to_item_list(self, item_urls, item_list_url):
""" Instruct the server to add the given items to the specified
Item List
:type item_urls: List or ItemGroup
:param item_urls: List of URLs for the items to add,
or an ItemGroup object
:type item_list_url: String ... | Instruct the server to add the given items to the specified
Item List
:type item_urls: List or ItemGroup
:param item_urls: List of URLs for the items to add,
or an ItemGroup object
:type item_list_url: String or ItemList
:param item_list_url: the URL of the list to w... |
def do_handle_log(self, workunit, level, *msg_elements):
"""Implementation of Reporter callback."""
entry_info = {
'level': self._log_level_str[level],
'messages': self._render_messages(*msg_elements),
}
root_id = str(workunit.root().id)
current_stack = self._root_id_to_workunit_stack[... | Implementation of Reporter callback. |
def get_allowed_methods(self):
"""Returns a coma-separated list of method names that are allowed on
this instance. Useful to set the ``Allowed`` response header.
"""
return ", ".join([method for method in dir(self)
if method.upper() == method
and callable(getattr(... | Returns a coma-separated list of method names that are allowed on
this instance. Useful to set the ``Allowed`` response header. |
def sunset_utc(self, date, latitude, longitude, observer_elevation=0):
"""Calculate sunset time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type lati... | Calculate sunset time in the UTC timezone.
:param date: Date to calculate for.
:type date: :class:`datetime.date`
:param latitude: Latitude - Northern latitudes should be positive
:type latitude: float
:param longitude: Longitude - Eastern longitudes should be... |
def Call(self, Id=0):
"""Queries a call object.
:Parameters:
Id : int
Call identifier.
:return: Call object.
:rtype: `call.Call`
"""
o = Call(self, Id)
o.Status # Test if such a call exists.
return o | Queries a call object.
:Parameters:
Id : int
Call identifier.
:return: Call object.
:rtype: `call.Call` |
def get_item_type_id_from_identifier(self, identifier, item_types=None):
"""
Get an ID of item type for the given identifier. Identifier is a string of
the following form:
<model_prefix>/<model_identifier>
where <model_prefix> is any suffix of database table of the given model
... | Get an ID of item type for the given identifier. Identifier is a string of
the following form:
<model_prefix>/<model_identifier>
where <model_prefix> is any suffix of database table of the given model
which uniquely specifies the table, and <model_identifier> is
identifier of t... |
def default(cls) -> 'PrecalculatedTextMeasurer':
"""Returns a reasonable default PrecalculatedTextMeasurer."""
if cls._default_cache is not None:
return cls._default_cache
if pkg_resources.resource_exists(__name__, 'default-widths.json.xz'):
import lzma
with ... | Returns a reasonable default PrecalculatedTextMeasurer. |
def get_legacy_storage_path(self):
"""
Detect and return existing legacy storage path.
"""
config_dir = os.path.dirname(
self.py3_wrapper.config.get("i3status_config_path", "/tmp")
)
storage_path = os.path.join(config_dir, "py3status.data")
if os.path.... | Detect and return existing legacy storage path. |
def BSearchCeil(a, x, lo=0, hi=None):
"""Returns lowest i such as a[i] >= x, or -1 if x > all elements in a
So, if x is in between two elements in a, this function will return the
index of the higher element, hence "Ceil".
Arguments:
a -- ordered numeric sequence
x -- element to se... | Returns lowest i such as a[i] >= x, or -1 if x > all elements in a
So, if x is in between two elements in a, this function will return the
index of the higher element, hence "Ceil".
Arguments:
a -- ordered numeric sequence
x -- element to search within a
lo -- lowest index to con... |
def from_computed_structure_entry(entry, miller_index, label=None,
adsorbates=None, clean_entry=None, **kwargs):
"""
Returns SlabEntry from a ComputedStructureEntry
"""
return SlabEntry(entry.structure, entry.energy, miller_index, label=label,
... | Returns SlabEntry from a ComputedStructureEntry |
def write(self, node, filehandle):
"""Write JSON to `filehandle` starting at `node`."""
dictexporter = self.dictexporter or DictExporter()
data = dictexporter.export(node)
return json.dump(data, filehandle, **self.kwargs) | Write JSON to `filehandle` starting at `node`. |
def _unpack_oxm_field(self):
"""Unpack oxm_field from oxm_field_and_mask.
Returns:
:class:`OxmOfbMatchField`, int: oxm_field from oxm_field_and_mask.
Raises:
ValueError: If oxm_class is OFPXMC_OPENFLOW_BASIC but
:class:`OxmOfbMatchField` has no such inte... | Unpack oxm_field from oxm_field_and_mask.
Returns:
:class:`OxmOfbMatchField`, int: oxm_field from oxm_field_and_mask.
Raises:
ValueError: If oxm_class is OFPXMC_OPENFLOW_BASIC but
:class:`OxmOfbMatchField` has no such integer value. |
def _run_play(self, play):
''' run a list of tasks for a given pattern, in order '''
self.callbacks.on_play_start(play.name)
# if no hosts matches this play, drop out
if not self.inventory.list_hosts(play.hosts):
self.callbacks.on_no_hosts_matched()
return True
... | run a list of tasks for a given pattern, in order |
def augmentation_transform(self, data, label): # pylint: disable=arguments-differ
"""Override Transforms input data with specified augmentations."""
for aug in self.auglist:
data, label = aug(data, label)
return (data, label) | Override Transforms input data with specified augmentations. |
def activate(self, target=None, **options):
"""Activate DEP communication with a target."""
log.debug("initiator options: {0}".format(options))
self.did = options.get('did', None)
self.nad = options.get('nad', None)
self.gbi = options.get('gbi', '')[0:48]
self.brs = min(... | Activate DEP communication with a target. |
def initialize(self, params, repetition):
"""
Initialize experiment parameters and default values from configuration file
"""
self.name = params["name"]
self.dataDir = params.get("datadir", "data")
self.seed = params.get("seed", 42) + repetition
torch.manual_seed(self.seed)
np.random.se... | Initialize experiment parameters and default values from configuration file |
def water_self_diffusion_coefficient(T=None, units=None, warn=True,
err_mult=None):
"""
Temperature-dependent self-diffusion coefficient of water.
Parameters
----------
T : float
Temperature (default: in Kelvin)
units : object (optional)
obje... | Temperature-dependent self-diffusion coefficient of water.
Parameters
----------
T : float
Temperature (default: in Kelvin)
units : object (optional)
object with attributes: Kelvin, meter, kilogram
warn : bool (default: True)
Emit UserWarning when outside temperature range.
... |
def map_names(lang="en"):
"""This resource returns an dictionary of the localized map names for
the specified language. Only maps with events are listed - if you need a
list of all maps, use ``maps.json`` instead.
:param lang: The language to query the names for.
:return: the response is a dictiona... | This resource returns an dictionary of the localized map names for
the specified language. Only maps with events are listed - if you need a
list of all maps, use ``maps.json`` instead.
:param lang: The language to query the names for.
:return: the response is a dictionary where the key is the map id an... |
def get_data(self, environment_title_or_num=-1, frequency=None):
"""
Parameters
----------
environment_title_or_num
frequency: 'str', default None
'timestep', 'hourly', 'daily', 'monthly', 'annual', 'run_period'
If None, will look for the smallest frequenc... | Parameters
----------
environment_title_or_num
frequency: 'str', default None
'timestep', 'hourly', 'daily', 'monthly', 'annual', 'run_period'
If None, will look for the smallest frequency of environment. |
def log_likelihood(self):
"""
Notice we add the jacobian of the warping function here.
"""
ll = GP.log_likelihood(self)
jacobian = self.warping_function.fgrad_y(self.Y_untransformed)
return ll + np.log(jacobian).sum() | Notice we add the jacobian of the warping function here. |
def step_random_processes(oscillators):
"""
Args:
oscillators (list): A list of oscillator.Oscillator objects
to operate on
Returns: None
"""
if not rand.prob_bool(0.01):
return
amp_bias_weights = [(0.001, 1), (0.1, 100), (0.15, 40), (1, 0)]
# Find out how many o... | Args:
oscillators (list): A list of oscillator.Oscillator objects
to operate on
Returns: None |
def fit(self, x, y=None, batch_size=32, nb_epoch=10, validation_data=None, distributed=True):
"""
Train a model for a fixed number of epochs on a dataset.
# Arguments
x: Input data. A Numpy array or RDD of Sample or Image DataSet.
y: Labels. A Numpy array. Default is None if x i... | Train a model for a fixed number of epochs on a dataset.
# Arguments
x: Input data. A Numpy array or RDD of Sample or Image DataSet.
y: Labels. A Numpy array. Default is None if x is already RDD of Sample or Image DataSet.
batch_size: Number of samples per gradient update.
nb_ep... |
def get(self, key, default=None, as_int=False, setter=None):
"""Gets a value from the cache.
:param str|unicode key: The cache key to get value for.
:param default: Value to return if none found in cache.
:param bool as_int: Return 64bit number instead of str.
:param callable... | Gets a value from the cache.
:param str|unicode key: The cache key to get value for.
:param default: Value to return if none found in cache.
:param bool as_int: Return 64bit number instead of str.
:param callable setter: Setter callable to automatically set cache
value if... |
def delete_state_definition(self, process_id, wit_ref_name, state_id):
"""DeleteStateDefinition.
[Preview API] Removes a state definition in the work item type of the process.
:param str process_id: ID of the process
:param str wit_ref_name: The reference name of the work item type
... | DeleteStateDefinition.
[Preview API] Removes a state definition in the work item type of the process.
:param str process_id: ID of the process
:param str wit_ref_name: The reference name of the work item type
:param str state_id: ID of the state |
def single_page_members(self, page_number=1):
"""获取单个页面内的小组成员信息
:param page_number: 页码
:return: 包含小组成员信息的列表
返回值示例: ::
[{
'id': 123, # member_id
'username': 'jim', # username
'nickname': 'Jim', ... | 获取单个页面内的小组成员信息
:param page_number: 页码
:return: 包含小组成员信息的列表
返回值示例: ::
[{
'id': 123, # member_id
'username': 'jim', # username
'nickname': 'Jim', # 昵称
'role': u'小组长', # 身份
... |
def show_fabric_trunk_info_input_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
show_fabric_trunk_info = ET.Element("show_fabric_trunk_info")
config = show_fabric_trunk_info
input = ET.SubElement(show_fabric_trunk_info, "input")
... | Auto Generated Code |
def vertices(self):
"""Return an array (Nf, 3) of vertices.
If only faces exist, the function computes the vertices and
returns them.
If no vertices or faces are specified, the function returns None.
"""
if self._faces is None:
if self._vertices is None:
... | Return an array (Nf, 3) of vertices.
If only faces exist, the function computes the vertices and
returns them.
If no vertices or faces are specified, the function returns None. |
def _get_event_and_context(self, event, arg_type):
"""Return an INDRA Event based on an event entry."""
eid = _choose_id(event, arg_type)
ev = self.concept_dict[eid]
concept, metadata = self._make_concept(ev)
ev_delta = {'adjectives': [],
'states': get_states(... | Return an INDRA Event based on an event entry. |
def _format_params(self, type_, params):
"""Reformat some of the parameters for sapi."""
if 'initial_state' in params:
# NB: at this moment the error raised when initial_state does not match lin/quad (in
# active qubits) is not very informative, but there is also no clean way to ... | Reformat some of the parameters for sapi. |
def version(self) -> Optional[str]:
"""
获取 http 版本
"""
if self._version is None:
self._version = self._parser.get_http_version()
return self._version | 获取 http 版本 |
def get_biome_color_based_on_elevation(world, elev, x, y, rng):
''' This is the "business logic" for determining the base biome color in satellite view.
This includes generating some "noise" at each spot in a pixel's rgb value, potentially
modifying the noise based on elevation, and finally incorpo... | This is the "business logic" for determining the base biome color in satellite view.
This includes generating some "noise" at each spot in a pixel's rgb value, potentially
modifying the noise based on elevation, and finally incorporating this with the base biome color.
The basic rules regardi... |
def anonymous_login(self):
"""Login as anonymous user
:return: logon result, see `CMsgClientLogonResponse.eresult <https://github.com/ValvePython/steam/blob/513c68ca081dc9409df932ad86c66100164380a6/protobufs/steammessages_clientserver.proto#L95-L118>`_
:rtype: :class:`.EResult`
"""
... | Login as anonymous user
:return: logon result, see `CMsgClientLogonResponse.eresult <https://github.com/ValvePython/steam/blob/513c68ca081dc9409df932ad86c66100164380a6/protobufs/steammessages_clientserver.proto#L95-L118>`_
:rtype: :class:`.EResult` |
def _draw_circle(self, pos_x, pos_y, radius, depth, stroke_width=1., fill_color=None, border_color=None,
from_angle=0., to_angle=2 * pi):
"""Draws a circle
Draws a circle with a line segment a desired position with desired size.
:param float pos_x: Center x position
... | Draws a circle
Draws a circle with a line segment a desired position with desired size.
:param float pos_x: Center x position
:param float pos_y: Center y position
:param float depth: The Z layer
:param float radius: Radius of the circle |
def delete_message(self, messageid="", folderid="", stackid=""):
"""Delete a message or a message stack
:param folderid: The folder to delete the message from, defaults to inbox
:param messageid: The message to delete
:param stackid: The stack to delete
"""
if self.sta... | Delete a message or a message stack
:param folderid: The folder to delete the message from, defaults to inbox
:param messageid: The message to delete
:param stackid: The stack to delete |
def _GetStringValue(self, data_dict, name, default_value=None):
"""Retrieves a specific string value from the data dict.
Args:
data_dict (dict[str, list[str]): values per name.
name (str): name of the value to retrieve.
default_value (Optional[object]): value to return if the name has no valu... | Retrieves a specific string value from the data dict.
Args:
data_dict (dict[str, list[str]): values per name.
name (str): name of the value to retrieve.
default_value (Optional[object]): value to return if the name has no value
set in data_dict.
Returns:
str: value represente... |
def parse_orgtable(lines):
"""
Parse an org-table (input as a list of strings split by newline)
into a Pandas data frame.
Parameters
----------
lines : string
an org-table input as a list of strings split by newline
Returns
-------
dataframe : pandas.DataFrame
A... | Parse an org-table (input as a list of strings split by newline)
into a Pandas data frame.
Parameters
----------
lines : string
an org-table input as a list of strings split by newline
Returns
-------
dataframe : pandas.DataFrame
A data frame containing the org-table's ... |
def strings_to_integers(strings: Iterable[str]) -> Iterable[int]:
"""
Convert a list of strings to a list of integers.
:param strings: a list of string
:return: a list of converted integers
.. doctest::
>>> strings_to_integers(['1', '1.0', '-0.2'])
[1, 1, 0]
"""
return str... | Convert a list of strings to a list of integers.
:param strings: a list of string
:return: a list of converted integers
.. doctest::
>>> strings_to_integers(['1', '1.0', '-0.2'])
[1, 1, 0] |
def multipoint(self, points):
"""Creates a MULTIPOINT shape.
Points is a list of xy values."""
shapeType = MULTIPOINT
points = [points] # nest the points inside a list to be compatible with the generic shapeparts method
self._shapeparts(parts=points, shapeType=shapeType) | Creates a MULTIPOINT shape.
Points is a list of xy values. |
def is_step_visible(self, step):
"""
Returns whether the given `step` should be included in the wizard; it
is included if either the form is idempotent or not filled in before.
"""
return self.idempotent_dict.get(step, True) or \
step not in self.storage.validated_ste... | Returns whether the given `step` should be included in the wizard; it
is included if either the form is idempotent or not filled in before. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.