content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _daily_prevalence(data):
"""
Returns a series where each value is a true fraction of currently infected population.
Args:
(dict): tracker data loaded from pkl file.
Returns:
(np.array): 1D array where each value is the above described fraction
"""
n_infected_per_day = data[... | 2,500 |
def _pytest_deselected(items):
"""
pytest has a hookspec pytest_deselected for accessing the deselected tests.
Example: add this code to conftest.py in your test root dir
Running the tests now will give you an output similar to this:
$ pytest -vv
...
plugins: cov-2.8.1, asyncio-... | 2,501 |
def open_serial_ports(serial_ports):
""" Open all the serial ports in the list. Used when the GUI is first opened. """
global OPEN_SERIAL_PORTS
try:
for s in serial_ports:
OPEN_SERIAL_PORTS.append(serial.Serial(s, SERIAL_SPEED, write_timeout=0, timeout=0))
except (OSError, seri... | 2,502 |
def test_memoryfile_incr_init(data_coutwildrnp_json):
"""In-memory GeoJSON file written in 2 parts can be read"""
with MemoryFile() as memfile:
memfile.write(data_coutwildrnp_json[:1000])
memfile.write(data_coutwildrnp_json[1000:])
with memfile.open() as collection:
assert le... | 2,503 |
def users_view(page):
"""
The user view page
Returns:
a rendered user view template
"""
user_search = request.args.get("search")
user_role = request.args.get("user_role")
users_query = model.User.query
if user_search:
term = "%" + user_search + "%"
users_query ... | 2,504 |
def serial_christie_power_state(connection):
"""Ask a Christie projector for its power state and parse the response"""
connection.reset_input_buffer()
response = serial_send_command(connection, "(PWR?)", char_to_read=21)
result = None
if len(response) > 0:
if "PWR!001" in response:
... | 2,505 |
def printHeader(args, header, sanity):
"""
printHeader(header): Print up our headers
"""
text = header["header_text"]
print("Header")
print("======")
if args.text:
print("")
print(" Request ID: %s" % header["request_id"])
print(" Questions: %d" % int(header["num_questions"]))
prin... | 2,506 |
def cuda_tanh(a):
""" Hyperbolic tangent of GPUArray elements.
Parameters:
a (gpu): GPUArray with elements to be operated on.
Returns:
gpu: tanh(GPUArray)
Examples:
>>> a = cuda_tanh(cuda_give([0, pi / 4]))
array([ 0., 0.6557942])
>>> type(a)
... | 2,507 |
def normalize_multi_header(df):
"""将有MultiIndex的column字符串做标准化处理,去掉两边空格等"""
df_copy = df.copy()
df_copy_columns = [ tuple(y.strip().lower() for y in x) for x in df_copy.columns ]
df_copy.columns = pd.core.index.MultiIndex.from_tuples(df_copy_columns)
return df_copy | 2,508 |
def scapy_packet_Packet_hasflag(self, field_name, value):
"""Is the specified flag value set in the named field"""
field, val = self.getfield_and_val(field_name)
if isinstance(field, EnumField):
if val not in field.i2s:
return False
return field.i2s[val] == value
else:
return (1 << field.names.index([value... | 2,509 |
def grid_points_2d(length, width, div, width_div=None):
"""Returns a regularly spaced grid of points occupying a rectangular
region of length x width partitioned into div intervals. If different
spacing is desired in width, then width_div can be specified, otherwise
it will default to div. If div < 2 i... | 2,510 |
def analytic_gradient(circuit, parameter=None):
"""Return the analytic gradient of the input circuit."""
if parameter is not None:
if parameter not in circuit.parameters:
raise ValueError('Parameter not in this circuit.')
if len(circuit._parameter_table[parameter]) > 1:
... | 2,511 |
def _get_shipping_voucher_discount_for_cart(voucher, cart):
"""Calculate discount value for a voucher of shipping type."""
if not cart.is_shipping_required():
msg = pgettext(
'Voucher not applicable',
'Your order does not require shipping.')
raise NotApplicable(msg)
s... | 2,512 |
async def test_async_delete_authors(aresponses, readarr_client: ReadarrClient) -> None:
"""Test editing authors."""
aresponses.add(
"127.0.0.1:8787",
f"/api/{READARR_API}/author/0",
"DELETE",
aresponses.Response(
status=200,
headers={"Content-Type": "appli... | 2,513 |
def ottawa(location, **kwargs):
"""Ottawa Provider
:param location: Your search location you want geocoded.
"""
return get(location, provider='ottawa', **kwargs) | 2,514 |
def entries():
""" Basic data for a test case """
return copy.deepcopy(
{"arb_key": "text", "randn": random.randint(0, 10),
"nested": {"ntop": 0, "nmid": {"list": ["a", "b"]},
"lowest": {"x": {"a": -1, "b": 1}}},
"collection": {1, 2, 3}}) | 2,515 |
def count_ref_alleles(variant, *traits):
"""Count reference allels for a variant
Parameters
----------
variant : a Variant as from funcgenom
the variant for which alleles should be counted
*traits : str
the traits for which alleles should be counted
Returns
-------... | 2,516 |
def preprocess_image(image, image_sz=48):
"""
Preprocess an image. Most of this is stuff that needs to be done for the Keras CNN model to work,
as recommended by: https://chsasank.github.io/keras-tutorial.html
"""
# we need to convert to saturation, and value (HSV) coordinates
hsv_image = color... | 2,517 |
def pspace_independent(a, b):
"""
Tests for independence between a and b by checking if their PSpaces have
overlapping symbols. This is a sufficient but not necessary condition for
independence and is intended to be used internally.
Notes
=====
pspace_independent(a, b) implies independent(... | 2,518 |
def img_aspect_ratio(width, height):
"""
Returns an image's aspect ratio.
If the image has a common aspect ratio, returns the aspect ratio in the format x:y,
otherwise, just returns width/height.
"""
ratio = round(width/height, 2)
for ar, val in COMMON_ASPECT_RATIOS.items():
... | 2,519 |
def _get_active_sculpting_mesh_for_deformer(deformer):
"""
If sculpting is enabled on the deformer, return the output mesh. Otherwise,
return None.
"""
# If sculpting is enabled, .tweak[0] will be connected to the .tweakLocation of
# a mesh.
connections = cmds.listConnections('%s.tweak[0]' ... | 2,520 |
def get_client_versions():
"""Gets the client versions (or client equivalent for server).
Returns:
A list of client versions (or client equivalent for server).
E.g. '10' for Windows 10 and Windows Server 2016.
"""
version_nubmer = get_os_version_number()
if version_nubmer in _WIN32_CLIENT_NAMES:
... | 2,521 |
def PV_property(name,default_value=nan):
"""EPICS Channel Access Process Variable as class property"""
def prefix(self):
prefix = ""
if hasattr(self,"prefix"): prefix = self.prefix
if hasattr(self,"__prefix__"): prefix = self.__prefix__
if prefix and not prefix.endswith("."): pre... | 2,522 |
def _stream_files(curr_header, fn, mesos_files):
"""Apply `fn` in parallel to each file in `mesos_files`. `fn` must
return a list of strings, and these strings are then printed
serially as separate lines.
`curr_header` is the most recently printed header. It's used to
group lines. Each line has ... | 2,523 |
def supported_platform(logger):
"""Checks if this script is running on supported platform.
Args:
logger: A valid logger instance to log debug/error messages.
Returns:
True if this platform is supported.
"""
# TODO(billy): Look into supporting Windows in the near future.
logger.debug("Current platf... | 2,524 |
def random_in_range(a: int, b: int) -> int:
""" Return a random number r with a <= r <= b. """
return random.randint(a, b) | 2,525 |
def count_items():
"""
:returns: a dictionary with counts in fields 'total', 'done'.
"""
con = sqlite3.connect(PROGRESS_DB_FILE_NAME)
cur = con.cursor()
# do not count root
cur.execute("SELECT COUNT(*) FROM item WHERE pk<>0")
total = cur.fetchone()[0]
cur.execute("SELECT COUNT(*) FRO... | 2,526 |
def app() -> None:
"""This app renders the Data Analyzer page"""
# TEXT:
st.write(
"""
# Data Analysis Dashboard
Please provide an asset name to display historical data.
"""
)
# INPUTs:
st.sidebar.title("Parameters")
col1, col2, c... | 2,527 |
def plot_sparsity(results):
"""Function to visualize the sparsity-accuracy trade-off of regularized decision
layers
Args:
results (dictionary): Appropriately formatted dictionary with regularization
paths and logs of train/val/test accuracy.
"""
if type(results['metrics']['a... | 2,528 |
def warning_test():
"""For testing warning function."""
# Should show warnings in order and only HAPIWarning {1,2} should
# have a different format
from warnings import warn
warn('Normal warning 1')
warn('Normal warning 2')
warning('HAPI Warning 1')
warning('HAPI Warning 2')
warn... | 2,529 |
def valida_cnpj(cnpj):
"""
Valida CNPJs, retornando apenas a string de números válida.
# CNPJs errados
>>> validar_cnpj('abcdefghijklmn')
False
>>> validar_cnpj('123')
False
>>> validar_cnpj('')
False
>>> validar_cnpj(None)
False
>>> validar_cnpj('12345678901234')
Fa... | 2,530 |
def km_miles(kilometers):
"""Usage: Convert kilometers to miles"""
return kilometers/1.609 | 2,531 |
def create_remote(accessory):
"""Define characteristics for a button (that is inn a group)."""
service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL)
char = service_label.add_char(CharacteristicsTypes.SERVICE_LABEL_NAMESPACE)
char.value = 1
for i in range(4):
button = accessory... | 2,532 |
def is_enterprise_learner(user):
"""
Check if the given user belongs to an enterprise. Cache the value if an enterprise learner is found.
Arguments:
user (User): Django User object.
Returns:
(bool): True if given user is an enterprise learner.
"""
cached_is_enterprise_key = get... | 2,533 |
def point_cloud(depth, colors):
"""Transform a depth image into a point cloud with one point for each
pixel in the image, using the camera transform for a camera
centred at cx, cy with field of view fx, fy.
depth is a 2-D ndarray with shape (rows, cols) containing
depths from 1 to 254 inclusive. Th... | 2,534 |
def dict_merge(dct, merge_dct):
""" Recursive dict merge. Inspired by :meth:``dict.update()``, instead of
updating only top-level keys, dict_merge recurses down into dicts nested
to an arbitrary depth, updating keys. The ``merge_dct`` is merged into
``dct``.
:param dct: dict onto which the merge is executed
:para... | 2,535 |
def structural_email(data, pos_parser=True, bytedata_parser_threshold=50, reference_parser_match_type=2):
"""
This is a parser pipeline, parser order matters.
1. string => structure email to separate => header, body, others
2. body => remove typo and some irrelevant words => body
3. body => parse an... | 2,536 |
def build_batches(data, conf, turn_cut_type='tail', term_cut_type='tail'):
"""
Build batches
"""
_turns_batches = []
_tt_turns_len_batches = []
_every_turn_len_batches = []
_response_batches = []
_response_len_batches = []
_label_batches = []
batch_len = len(data[six.b('y')]) ... | 2,537 |
def is_notebook():
"""Check if pyaedt is running in Jupyter or not.
Returns
-------
bool
"""
try:
shell = get_ipython().__class__.__name__
if shell == "ZMQInteractiveShell":
return True # Jupyter notebook or qtconsole
else:
return False
excep... | 2,538 |
def test_handle_check_suite(monkeypatch, capsys):
"""
.
"""
checks.handle_check_suite({"action": "in_progress"}, "abc")
printed = capsys.readouterr()
assert (
printed.out == "Ignoring check_suite action in_progress\n"
), "In progress is skipped"
# pylint: disable=unused-argument... | 2,539 |
def strategy(history, memory):
"""
Tit-for-tat, except we punish them N times in a row if this is the Nth time they've
initiated a defection.
memory: (initiatedDefections, remainingPunitiveDefections)
"""
if memory is not None and memory[1] > 0:
choice = 0
memory = (memory[0], m... | 2,540 |
def get_primitives(name=None, primitive_type=None, primitive_subtype=None):
"""Get a list of the available primitives.
Optionally filter by primitive type: ``transformation`` or ``aggregation``.
Args:
primitive_type (str):
Filter by primitive type. ``transformation`` or ``aggregation``... | 2,541 |
def _unpack_batch_channel(data, old_shape):
"""Unpack the data channel dimension.
"""
data = nnvm.sym.transpose(data, axes=(0, 4, 1, 5, 2, 3))
data = nnvm.sym.reshape(data, shape=old_shape)
return data | 2,542 |
def test_atomic_language_length_nistxml_sv_iv_atomic_language_length_1_3(mode, save_output, output_format):
"""
Type atomic/language is restricted by facet length with value 2.
"""
assert_bindings(
schema="nistData/atomic/language/Schema+Instance/NISTSchema-SV-IV-atomic-language-length-1.xsd",
... | 2,543 |
def get_branch_user(branch):
"""Get user name for given branch."""
with Command('git', 'log', '--pretty=tformat:%an', '-1', branch) as cmd:
for line in cmd:
return line | 2,544 |
def log_handler(*loggers, logname: str = ''):
"""[summary]
Keyword Arguments:
logname {str} -- [description] (default: {''})
"""
formatter = logging.Formatter(
'%(asctime)s %(filename)12s:L%(lineno)3s [%(levelname)8s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
# stream han... | 2,545 |
def consume_fio_output(cons, result, numjobs, mode, bs, env_id):
"""Consumer function."""
cpu_utilization_vmm = result[CPU_UTILIZATION_VMM]
cpu_utilization_vcpus = result[CPU_UTILIZATION_VCPUS_TOTAL]
cons.consume_measurement(CPU_UTILIZATION_VMM, cpu_utilization_vmm)
cons.consume_measurement(CPU_UTI... | 2,546 |
def find_system_cameras() -> Mapping[str, str]:
"""Returns a camera_description -> camera_path map."""
if sys.platform == 'darwin' or sys.platform in ('windows', 'win32'):
try:
from .qtmultimedia import find_system_cameras
except ImportError as e:
return {}
else:
... | 2,547 |
def _shape_list(x):
"""Return list of dims, statically where possible."""
static = x.get_shape().as_list()
shape = tf.shape(x)
ret = []
for i, static_dim in enumerate(static):
dim = static_dim or shape[i]
ret.append(dim)
return ret | 2,548 |
def test_held_karp_ascent():
"""
Test the Held-Karp relaxation with the ascent method
"""
import networkx.algorithms.approximation.traveling_salesman as tsp
np = pytest.importorskip("numpy")
# Adjacency matrix from page 1153 of the 1970 Held and Karp paper
# which have been edited to be di... | 2,549 |
def upgrade_common(ctx, config, deploy_style):
"""
Common code for upgrading
"""
remotes = upgrade_remote_to_config(ctx, config)
project = config.get('project', 'ceph')
extra_pkgs = config.get('extra_packages', [])
log.info('extra packages: {packages}'.format(packages=extra_pkgs))
for ... | 2,550 |
def nested_put(config: Dict[str, Any], nested_keys: List[str], value: Any) -> None:
"""
Puts the given nested key value pair into the given dict. If any part of
the nested key structure does not yet exist, then it will be created in the
process.
>>> config = {}
>>> nested_put(config, ["key"], "... | 2,551 |
def convert2define(name):
"""
returns the name of the define used according to 'name' which is the name of the file
"""
header = toupper(toalphanum(name))
return "__" + header + "__" | 2,552 |
def url_exists(url):
"""
Checks if a url exists
:param url:
:return:
"""
p = urlparse(url)
conn = httplib.HTTPConnection(p.netloc)
conn.request('HEAD', p.path)
resp = conn.getresponse()
return resp.status == 301 or resp.status == 200 | 2,553 |
def add_header(cmd):
"""
:param cmd: the command with its values
:return: adds a header and returns it, ready to be send
"""
# get the length of the length of the cmd (for how many spaces needed)
header = str(len(cmd))
for i in range(get_digits(len(cmd)), HEADERSIZE):
header = heade... | 2,554 |
def stat_selector(player, stat, in_path, year):
"""
Selects stat for player in game year selected
Parameters
----------
player
The player being assessed (str)
stat
The stat being assessed (str)
in_path
The path to the folder containing player data (str)
year
... | 2,555 |
def info(text, newline=True):
""" write the text to standard error followed by a newline """
sys.stderr.write("%s%s" % (text, "\n" if newline else "")) | 2,556 |
def test_iou_metric_compute(
outputs: List[torch.Tensor],
targets: List[torch.Tensor],
weights: List[float],
class_names: List[str],
batch_answers: List[List[float]],
total_answers: List[List[Union[List[float], float]]],
):
"""IOU update, compute test"""
metric = IOUMetric(weights=weight... | 2,557 |
def get_cali_samples(train_data_loader, num_samples, no_label=True):
"""Generate sub-dataset for calibration.
Args:
train_data_loader (torch.utils.data.DataLoader):
num_samples (int):
no_label (bool, optional): If the dataloader has no labels. Defaults to True.
Returns:
t... | 2,558 |
def visualize(args, epoch, model, data_loader, writer):
"""
Logs visualisations of reconstructions to Tensorboard.
:param args: Arguments object, contains reconstruction model hyperparameters.
:param epoch: current training epoch.
:param model: reconstruction model.
:param data_loader: visualis... | 2,559 |
def fetch_available_litteraturbanken_books() -> List[Tuple[str, str]]:
"""Fetch available books from Litteraturbanken."""
url = "https://litteraturbanken.se/api/list_all/etext?exclude=text,parts,sourcedesc,pages,errata&filter_and=%7B%22sort_date_imprint.date:range%22:%221248,2020%22,%22export%3Etype%22:%5B%22xm... | 2,560 |
def soup_from_psf(psf):
"""
Returns a Soup from a .psf file
"""
soup = pdbatoms.Soup()
curr_res_num = None
is_header = True
for line in open(psf):
if is_header:
if "NATOM" in line:
n_atom = int(line.split()[0])
is_header = False
continue
words = line.split()
atom_nu... | 2,561 |
def check_api():
"""
复核货品入库
post req: withlock
{
erp_order_code,
lines: [{
barcode, location, lpn, qty
},]
w_user_code,
w_user_name
}
"""
w_user_code = request.json.pop('w_user_code', None)
w_user_name = request.json.pop('w_user_na... | 2,562 |
def test_qy_assert_():
"""
Test the qy-LLVM assert_() construct.
"""
# should not raise
@emit_and_execute()
def _():
qy.assert_(True)
# should raise
from qy import EmittedAssertionError
def should_raise():
@emit_and_execute()
def _():
qy.assert_... | 2,563 |
def test_integration_download_fail(demisto_client, tmp_path):
"""
Given
- Script to download, that exists on the machine.
- Playbook to download, that doesn't exist on the machine.
When
- Running demisto-sdk download command.
Then
- Ensure that the exit code is 1, since the playbook wa... | 2,564 |
def quadratic_form(u, Q, v, workers=1, **kwargs):
"""
Compute the quadratic form uQv, with broadcasting
Parameters
----------
u : (..., M) array
The u vectors of the quadratic form uQv
Q : (..., M, N) array
The Q matrices of the quadratic form uQv
v : (..., N) array
... | 2,565 |
def reorg(dat):
"""This function grabs the data from the dictionary of data types
(organized by ID), and combines them into the
:class:`dolfyn.ADPdata` object.
"""
outdat = apb.ADPdata()
cfg = outdat['config'] = db.config(_type='Nortek AD2CP')
cfh = cfg['filehead config'] = dat['filehead con... | 2,566 |
def human2pickett(name: str, reduction="A", linear=True, nuclei=0):
""" Function for translating a Hamiltonian parameter to a Pickett
identifier.
An alternative way of doing this is to programmatically
generate the Pickett identifiers, and just use format string
to output the identi... | 2,567 |
def findAndRemove( type, attrs, parent=None ):
"""Removes all the objects of a specific type under a specific parent that have attributes matching attrs"""
children = getFilteredTypeList(type, attrs, parent)
for child in children:
remove(child) | 2,568 |
def get_oauth2_service_account_keys():
"""A getter that returns the required OAuth2 service account keys.
Returns:
A tuple containing the required keys as strs.
"""
return _OAUTH2_SERVICE_ACCOUNT_KEYS | 2,569 |
def output(s):
"""Outputs string s as chat message.
Send the given string to the chat client.
"""
pass | 2,570 |
def read_conf_file(
toml_path: Path,
file_desc: str,
schema_type: str,
) -> Any:
"""Read TOML configuration and verify against schema."""
if not toml_path.exists():
logger.error(f'{file_desc} file "{toml_path}" does not exist')
sys.exit(1)
try:
toml_dict = toml.load(toml_... | 2,571 |
def mocked_requests_post(*args, **kwargs):
"""Mock to replace requests.post"""
class MockResponse:
"""Mock class for KustoResponse."""
def __init__(self, json_data, status_code):
self.json_data = json_data
self.text = text_type(json_data)
self.status_code = ... | 2,572 |
def grammar_info(df, col):
"""return three separate attributes with
clean abstract, flesh score and sentence count"""
df['clean_abstract'] = clean_text(df[col])
df['flesch_score'] = df[col].apply(flesch_score)
df['sentence_count'] = sentence_count(df[col])
return df | 2,573 |
def urbandict(bot, event, *args):
"""lookup a term on Urban Dictionary.
supplying no parameters will get you a random term.
DISCLAIMER: all definitions are from http://www.urbandictionary.com/ - the bot and its
creators/maintainers take no responsibility for any hurt feelings.
"""
term = " ".jo... | 2,574 |
def read_file_list(bld, file):
"""
Read and process a file list file (.waf_file) and manage duplicate files and possible globbing patterns to prepare
the list for injestion by the project
:param bld: The build context
:param file: The .waf_file file list to process
:return: The pr... | 2,575 |
def landing():
"""Landing page"""
return render_template('public/index.html') | 2,576 |
def uniform_decay(distance_array, scale):
"""
Transform a measurement array using a uniform distribution.
The output is 1 below the scale parameter and 0 above it.
Some sample values. Measurements are in multiple of ``scale``; decay value are in fractions of
the maximum value:
+--------------... | 2,577 |
def pid_to_service(pid):
"""
Check if a PID belongs to a systemd service and return its name.
Return None if the PID does not belong to a service.
Uses DBUS if available.
"""
if dbus:
return _pid_to_service_dbus(pid)
else:
return _pid_to_service_systemctl(pid) | 2,578 |
def Frequencies(bands, src):
"""
Count the number of scalars in each band.
:param: bands - the bands.
:param: src - the vtkPolyData source.
:return: The frequencies of the scalars in each band.
"""
freq = dict()
for i in range(len(bands)):
freq[i] = 0;
tuples = src.GetPoint... | 2,579 |
def _load_use_static_shape(ortmodule_config_accessor, data):
"""Loads UseStaticShape from json file onto ORTModule."""
assert hasattr(data, _load_use_static_shape.loading_key)
log.info(f"Found keyword {_load_use_static_shape.loading_key} in json. Loading attributes from file.")
assert isinstance(data.... | 2,580 |
def label_file(input_file, output_file):
"""
label each feature file
"""
# read input file and save them in dict
features = load_protobuf(input_file)
# for each obstacle ID, sort dict by their timestamp
fea_trajs = build_trajectory(features)
# for each obstacle ID, label them, remove r... | 2,581 |
def get_loss_fn(loss: str) -> Callable[..., torch.Tensor]:
"""
Get loss function as a PyTorch functional loss based on the name of the loss function.
Choices include 'cross_entropy', 'nll_loss', and 'kl_div'.
Args:
loss: a string indicating the loss function to return.
"""
loss_fn_mapp... | 2,582 |
def write_to_pubsub(tw):
"""
Publish to the given pubsub topic.
"""
messages = []
[messages.append({"data": json.loads(line, encoding="utf8")}) for line in tw]
body = json.dumps({"messages": messages}, ensure_ascii=False, encoding="utf8")
publisher = pubsub_v1.PublisherClient()
topic_pat... | 2,583 |
def tabular_generator(engines):
"""Generator that produces rows for tabular formats (CSV) from the dict
generated by export_engines.
"""
cols = [
'name',
'makerrace',
# 'description', # every value is "No information available"
'size',
'hull',
'hull_integ... | 2,584 |
def FormatAddress(chainIDAlias: str, hrp: str, addr: bytes) -> str:
"""FormatAddress takes in a chain prefix, HRP, and byte slice to produce a string for an address."""
addr_str = FormatBech32(hrp, addr)
return f"{chainIDAlias}{addressSep}{addr_str}" | 2,585 |
async def get_journal_scopes(
db_session: Session, user_id: str, user_group_id_list: List[str], journal_id: UUID
) -> List[JournalPermissions]:
"""
Returns list of all permissions (group user belongs to and user) for provided user and journal.
"""
journal_spec = JournalSpec(id=journal_id)
await ... | 2,586 |
def clean_script_title(script_title):
"""Cleans up a TV/movie title to save it as a file name.
"""
clean_title = re.sub(r'\s+', ' ', script_title).strip()
clean_title = clean_title.replace('\\', BACKSLASH)
clean_title = clean_title.replace('/', SLASH)
clean_title = clean_title.replace(':', COLON)
clean_ti... | 2,587 |
def runCmd(cmd, timeout=42, sh=False, env=None, retry=0):
"""
Execute an external command, read the output and return it.
@param cmd (str|list of str): command to be executed
@param timeout (int): timeout in sec, after which the command is forcefully terminated
@param sh (bool): True if the command ... | 2,588 |
def _calc_cumsum_matrix_jit(X, w_list, p_ar, open_begin):
"""Fast implementation by numba.jit."""
len_x, len_y = X.shape
# cumsum matrix
D = np.ones((len_x, len_y), dtype=np.float64) * np.inf
if open_begin:
X = np.vstack((np.zeros((1, X.shape[1])), X))
D = np.vstack((np.zeros((1, D.... | 2,589 |
def test_rmse_particlefilter(pf_output, regression_problem):
"""Assert that the RMSE of the mode of the posterior of the PF is a lot smaller than
the RMSE of the data."""
true_states = regression_problem.solution
mode = pf_output.states.mode
rmse_mode = np.linalg.norm(np.sin(mode) - np.sin(true_st... | 2,590 |
def plot_jvm_graph(x_series, y_series, crashes, title, filename):
"""Creates a plot based on the x & y data passed in
Creates a plot with one x_series of data and multiple y_series' of data.
y_series should be a dictionary containing a key for each plot of data.
All of the plots need to have the same ... | 2,591 |
async def test_set_up_local(hass, aioclient_mock):
"""Test we do not set up Almond to connect to HA if we use Hass.io."""
entry = MockConfigEntry(
domain="almond",
data={"type": const.TYPE_LOCAL, "host": "http://localhost:9999"},
)
entry.add_to_hass(hass)
with patch(
"pyalmo... | 2,592 |
def schedules_list(format_):
"""List request schedules in project."""
project_name = get_current_project(error=True)
client = init_client()
response = client.request_schedules_list(project_name=project_name)
client.api_client.close()
print_list(response, LIST_ITEMS, rename_cols=RENAME_COLUMNS... | 2,593 |
def create_field_texture_coordinates(fieldmodule: Fieldmodule, name="texture coordinates", components_count=3,
managed=False) -> FieldFiniteElement:
"""
Create texture coordinates finite element field of supplied name with
number of components 1, 2, or 3 and the componen... | 2,594 |
def get_cert_sha1_by_openssl(certraw: str) -> str:
"""calc the sha1 of a certificate, return openssl result str"""
res: str = None
tmpname = None
try:
tmpname = tmppath / f"{uuid.uuid1()}.crt"
while tmpname.exists():
tmpname = tmppath / f"{uuid.uuid1()}.crt"
tmpname.w... | 2,595 |
def remove(package_name):
"""Removes a holodeck package.
Args:
package_name (str): the name of the package to remove
"""
if package_name not in packages:
raise HolodeckException("Unknown package name " + package_name)
for config, path in _iter_packages():
if config["name"] =... | 2,596 |
def get_files(root_path, extension='*.*'):
"""
- root_path: Path raiz a partir de onde serão realizadas a busca
- extension: Extensão de arquivo usado para filtrar o retorno
- retorna: Retorna todos os arquivos recursivamente a partir de um path raiz
"""
return [y for x in os.walk(r... | 2,597 |
def get_dderivative_skewness(uni_ts: Union[pd.Series, np.ndarray], step_size: int = 1) -> np.float64:
"""
:return: The skewness of the difference derivative of univariate time series within the
function we use step_size to find derivative (default value of step_size is 1).
"""
return get_sk... | 2,598 |
def extract_query(e: Event, f, woi, data):
"""
create a query array from the the event
:param data:
:param e:
:param doi:
"""
assert woi[0] > 0 and woi[1] > 0
e_start_index = resolve_esi(e, data)
st = int(e_start_index - woi[0] * f)
ed = int(e_start_index + woi[0] * f)
return... | 2,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.