content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
def assign_material(obj, materialname):
"""This function assigns a material to an objects mesh.
:param obj: The object to assign the material to.
:type obj: bpy.types.Object
:param materialname: The materials name.
:type materialname: str
"""
if materialname not in bpy.data.materials:
... | 929d4750d5c6e9710fdc8fc735b3792a3b4a63f4 | 3,655,000 |
def _check_stack_axis(axis, dims, default='unnamed'):
""" check or get new axis name when stacking array or datasets
(just to have that in one place)
"""
if axis is None:
axis = default
if axis in dims:
i = 1
while default+"_{}".format(i) in dims:
... | 4dc74da450d6be4872a5f03e61ec16700b197d94 | 3,655,001 |
from typing import Optional
from typing import Dict
from typing import Union
from typing import List
from typing import Any
def eval_push_time_ratios(problem_size: int = 3000) -> Optional[TimeRatioType]:
"""
Function that calculates the execution time ratios, for the different time complexities.
Here, a ... | 72f1a32f227d52040e14af77d91e4419e25480c2 | 3,655,002 |
from ..models import Sprint
from ..forms import SprintForm
def validate_pbi_sprint(sprint, snapshot_date):
""" Validate sprint in a pbi, try to create it if possible """
if sprint is None:
raise ValidationError('Sprint cannot be null')
sprt = Sprint.objects.get(id=sprint.id)
if sprt is not Non... | 8608381c5a57a9f4e776542f11a7f3e38ff4f2e5 | 3,655,003 |
def _load_data():
"""
Internal function to get the data to plot.
"""
# Load homicides
homicides = gv_data.PoliceHomicides.get()
# Calculate concentrated disadvantage
sub_data = []
for cls in [
"PublicAssistance",
"FemaleHouseholders",
"PercentInPoverty",
... | de691b9b1a6b0ce3c75075619024358fd1c09783 | 3,655,004 |
def test_auto_add_dataloader_idx(tmpdir, add_dataloader_idx):
"""test that auto_add_dataloader_idx argument works."""
class TestModel(BoringModel):
def val_dataloader(self):
dl = super().val_dataloader()
return [dl, dl]
def validation_step(self, *args, **kwargs):
... | 51ae3f22709db4a9d2b244b49cc6ac638ee7205d | 3,655,005 |
def move(x_pos, y_pos):
"""Return the G-CODE describing motion to x_pos, y_pos."""
out = ""
out += "G1X"+str(x_pos)+"Y"+str(y_pos)+"F"+str(FEEDRATE)+";\n"
out += "M400;\n"
return out | 0bfaf1e53b4a90094adc28eab4c6f1eba5bd32e8 | 3,655,006 |
def load_scrub_optional_upload(storage_folder: str, filename: str) -> str:
"""Loads a option file that was previously saved in the storage folder.
:param storage_folder: A string representing the path of the storage
folder.
:param filename: A string representing the name of the file that is being
... | fdb5abf217a767c6ac9309e098cecf9f1b70608b | 3,655,007 |
from typing import Dict
def compute_error_decrease(fun, VX, EToV) -> Dict[int, float]:
"""
Computes estimate of possible error decrease for each element in mesh.
:param fun: Function float -> float
:param VX: dict from point id to its position on x axis.
:param EToV: dict from element id to a tup... | 46b3570c7d0ad5f07faa54a954d63ecbce64c3b7 | 3,655,008 |
def file_name_to_title_name(file_name):
"""
#Arguments
check_mk_url (str): URL to Check_Mk web application, check file names and print for each file in the directory in the correct format
#Examples
file_name_to_title_name('activate_mode')
output = 'Activate Mode: activate_mode.md'
"""
... | 330eae5c34cd55f01aaf520ea9df467ea4042b1e | 3,655,009 |
def save_binary_mask_triple(
rgb_img: np.ndarray, label_img: np.ndarray, save_fpath: str, save_to_disk: bool = False
) -> np.ndarray:
"""Currently mask img background is light-blue. Instead, could set it to white. np.array([255,255,255])
Args:
rgb_img:
label_img:
save_fpath
... | ffc9dbd6550200e48548d29e6dedacde6eced3c2 | 3,655,010 |
def get_random_fortune(fortune_file):
"""
Get a random fortune from the specified file. Barfs if the corresponding
`.dat` file isn't present.
:Parameters:
fortune_file : str
path to file containing fortune cookies
:rtype: str
:return: the random fortune
"""
fortune... | d7486abbacc95a2b737471f899d002fd642f72b7 | 3,655,011 |
from typing import List
import pathlib
from typing import Callable
from typing import Dict
from typing import Optional
import sys
def output_data(
files : List[pathlib.Path],
parser : Callable[[List[str]], List[Dict[str, DataValue]]]
) -> Optional[OutputData]:
"""Parses output datapoints from a list of outp... | 3531ca105380145f83b12a9da6ad9a41c223cb70 | 3,655,012 |
async def delete_item(item_id: int, db: Session = Depends(get_db)):
"""
Delete the Item with the given ID provided by User stored in database
"""
db_item = ItemRepo.fetch_by_id(db, item_id)
if db_item is None:
raise HTTPException(status_code=404, detail="Item not found with the given ID")
... | a73ebafa7cc73c24133e4795aabbe2cb0a72172d | 3,655,013 |
import logging
from pycaret import preprocess
def get_clusters(data,
model = None,
num_clusters = 4,
ignore_features = None,
normalize = True,
transformation = False,
pca = False,
pca_components... | adfede338050b85f792e56205ede83ec46c9ea15 | 3,655,014 |
def process_messages(deck, messages, encrypt_or_decrypt):
"""(list of int, list of str, str) -> list of str
Return the messages encrypted or decrypted using the specified deck.
The parameter encrypt_or_decrypt will be ENCRYPT to encrpyt the message,
and DECRYPT to decrypt the message
>>>deck = [1,... | 4b9f2506edd44c916ac69a4c6d1d12a3b58ff89d | 3,655,015 |
def clip(wavelength, spectra, threshold, substitute=None):
""" Removes or substitutes values above the given threshold.
Args:
wavelength <numpy.ndarray>: Vector of wavelengths.
spectra <numpy.ndarray>: NIRS data matrix.
threshold <float>: threshold value for rejection
substitute... | 79d79c6353468f77dacb995cfcca9b717e8ef8e0 | 3,655,016 |
def findMachines(fqpn):
"""
Recursively yield L{MethodicalMachine}s and their FQPNs in and
under the a Python object specified by an FQPN.
The discovery heuristic considers L{MethodicalMachine} instances
that are module-level attributes or class-level attributes
accessible from module scope. M... | 71689b8ffe166dd4b8b8f126d04b3bcf8123257e | 3,655,017 |
def asynchronous_prod_milp_constraint_rule(backend_model, loc_tech, timestep):
"""
BigM limit set on `carrier_prod`, forcing it to either be zero or non-zero,
depending on whether `prod` is zero or one, respectively.
.. container:: scrolling-wrapper
.. math::
\\boldsymbol{carrier_p... | 049454e9a3aafecc8531225bc5f09b666d892fcb | 3,655,018 |
import os
def user_cilogon_certificates_directory_path(instance):
"""
Return full path to filename based on User UUID value
:param instance:
:param filename:
:return:
"""
# file will be uploaded to MEDIA_ROOT/cilogon_certificates/user_<uuid>/<filename>
return os.path.join(MEDIA_ROOT, '... | 66ae355a5c7f86181175d02c3e56edd1d06e0593 | 3,655,019 |
def drawCurveArc(self): #---- only for ELLIPSE -------------------------------------------------------------
"""Given a dxf ELLIPSE object return a blender_curve.
"""
center = self.loc
radius = self.radius
start = self.start_angle
end = self.end_angle
if start > end:
start = start - 360.0
startmatrix = Math... | d8332b171fe9ef654c0fe71b8ce0b636b23221a8 | 3,655,020 |
import pickle
def read_pickle(filename, protocol=-1, **kwargs):
"""
read grid saved in PICKLE format into a GridData object
:param filename: full path to the filename
:type filename: str
:rtype: ~uquake.core.data.grid.Grid
"""
return pickle.load(open(filename, 'rb')) | 8115b5a91698cc508ea05c3097d8d69b0bb77561 | 3,655,021 |
def linked_ims(im_list, pix_per_um, shape=(2,2),
x_range=None, y_range=None, scale_fig=1, scale_height=1.4,
brightness=1, palette='Turbo256', cmap_range='from zero',
show_fig=True, title_list=[], t_fs=24, ax_fs=16, tk_fs=12, cb_fs=14):
"""
Shows multiple f... | 5a6e0cb821a9d9d49243de297d74f5b656825f13 | 3,655,022 |
def read_qmcpack_hamiltonian(filename):
"""Read Hamiltonian from QMCPACK format.
Parameters
----------
filename : string
QMPACK Hamiltonian file.
Returns
-------
hamil : dict
Data read from file.
"""
try:
hc, chol, enuc, nmo, nelec, nmok, qkk2 = (
... | 0fb0a6d0e80ab3180da3cb4d0c6d31ba54749f1d | 3,655,023 |
def run_rnn(file):
# define model params
"""
Run the process to train/test a recurrent neural network using LSTM using a given dataset file.
:param string file: Location of CSV-formatted dataset file
:return: Model with expected (test) targets and associated scores
:rtype: object, dataframe, ob... | e159594350d35a207db904b76ea1fbea2509b235 | 3,655,024 |
def lab_results(request, format=None):
"""Get lab results data."""
if request.method == 'GET':
limit = request.query_params.get("limit", 1000)
if limit:
limit = int(limit)
order_by = request.query_params.get("order_by", "")
# TODO: Get any filters from dict(request.q... | 87d2136e515ad94241540b093c49422b96d3affe | 3,655,025 |
def load(file, file_format=None, **kwargs):
"""Load data from json, yaml, or pickle files.
This method provides a unified api for loading data from serialized files.
Args:
file (str or file-like object): Filename or a file-like object.
file_format (str, optional): If not specified, the fil... | 14970d1f9e477f94f358f6bbb220d4ae1e388ecd | 3,655,026 |
import os
from pathlib import Path
def get_criteo(root):
"""Download the Criteo data if it doesn't exist."""
url = 'https://s3-eu-west-1.amazonaws.com/kaggle-display-advertising-challenge-dataset/dac.tar.gz'
raw_folder = os.path.join(root, 'criteo', 'raw')
processed_folder = os.path.join(root, 'crit... | 9698458a7f8a2475da9a21b7cf91313cd52f2cdc | 3,655,027 |
def StepToGeom_MakeAxis2Placement_Convert(*args):
"""
:param SA:
:type SA: Handle_StepGeom_Axis2Placement3d &
:param CA:
:type CA: Handle_Geom_Axis2Placement &
:rtype: bool
"""
return _StepToGeom.StepToGeom_MakeAxis2Placement_Convert(*args) | 003e8c9986214de4f33eceb35ee9975726a0efe9 | 3,655,028 |
import pwd
def results(year: hug.types.text, firstName: hug.types.text, lastName: hug.types.text):
"""Returns the results for a given candidate for a given year"""
engine = create_engine(
'postgresql://%s:%s@%s/%s' %(user,pwd,ip,user),
client_encoding='utf8',echo=False)
conn = engi... | a9c010d8f2633ef1c77f0903b6cf07d315486f5c | 3,655,029 |
async def get_user_from_event(event):
""" Get the user from argument or replied message. """
args = event.pattern_match.group(1).split(':', 1)
extra = None
if event.reply_to_msg_id and not len(args) == 2:
previous_message = await event.get_reply_message()
user_obj = await event.client.ge... | b503868848a78f8bfacd4fc5e211642001104e7d | 3,655,030 |
import torch
from typing import Optional
from typing import Tuple
def stat_scores_multiple_classes(
pred: torch.Tensor,
target: torch.Tensor,
num_classes: Optional[int] = None,
argmax_dim: int = 1,
reduction: str = 'none',
) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor, torch.Te... | e4b64a881de64b93f6ca8018484e9de52d6ab786 | 3,655,031 |
def build_resnet(
repetitions=(2, 2, 2, 2),
include_top=True,
input_tensor=None,
input_shape=None,
classes=1000,
block_type='usual',
class_detector_top=False):
"""
TODO
"""
# Determine proper input shape
input_shape = _obtain_input_shape(input_shape,
... | 4d248d0def20b713dda2b6cd281cbaa48450d1dc | 3,655,032 |
import requests
def correct_doi(file_name: str):
"""Attempt extract a DOI from a filename which contains a DOI."""
if file_name.startswith("acs.jced") or file_name.startswith("je"):
doi = f"10.1021/{file_name}"
elif file_name.startswith("j.jct"):
doi = f"10.1016/{file_name}"
elif file... | e01ddf648660e0fd126720042cc16b16ffe078d3 | 3,655,033 |
import os
def getTextFromFile(filename):
"""
"""
filepath = os.path.join(CHAPTERDIR, filename)
txt = open(filepath).read()
return txt | 85c3d6698c4c2d1022431259d209f779944c6455 | 3,655,034 |
import sys
import base64
def decode_base64(data):
"""Decode base64, padding being optional.
:param data: Base64 data as an ASCII byte string
:returns: The decoded byte string.
"""
if sys.version_info.major > 2:
data = bytes(data, 'utf-8')
missing_padding = len(data) % 4
if missin... | 3b5d20d1617826a53cfd1949da0658f06a230275 | 3,655,035 |
from typing import Sequence
from typing import Dict
from typing import Tuple
from typing import Any
def create_multi_dataset_generic_benchmark(
train_datasets: Sequence[SupportedDataset],
test_datasets: Sequence[SupportedDataset],
*,
other_streams_datasets: Dict[str, Sequence[Supported... | 74160f178bcd173d53c3e954fe43e2dbeff8c680 | 3,655,036 |
def convert_to_xml_string(string):
"""
For input strings with escaped tags and special characters
issue a set of conversion functions to prepare it prior
to adding it to an article object
"""
string = entity_to_unicode(string)
string = decode_brackets(string)
string = eautils.replace_tag... | 7211ade270167e3bb681aa2c450da79f276ea169 | 3,655,037 |
def read_shear_catalog_type(stage):
"""
Determine the type of shear catalog a stage is using as input.
Returns a string, e.g. metacal, lensfit.
Also sets shear_catalog_type in the stage's configuration
so that it is available later and is saved in output.
"""
with stage.open_input('shear_cat... | 26dd03f3a2ef66acab47741df044ac8f2a92bbfb | 3,655,038 |
from typing import Sequence
from typing import List
import math
import cmath
def inverse_fft_iterative(
poly: Sequence, has_imaginary: bool = False, imag_threshold: float = 1e-14
) -> List:
"""Perform inverse iterative discrete fast Fourier transform (DFT) of a polynomial with a degree that is `2^t-1`, t bein... | 8f12b9d4ab8e4c54aca025a0f8117192c5d49e6b | 3,655,039 |
def admin_uri():
"""
Helper fucntion to get the admin url quickly
:returns: admin url, redirect or print friendly
:rtype: string
"""
return '/' + app.global_content['options']['admin-url'].value | 64a95effb177afbb8a68b5462967130cc6d72f2b | 3,655,040 |
def rlencode(x, check = True, dropna = False):
"""
Run length encoding.
Based on http://stackoverflow.com/a/32681075, which is based on the rle
function from R.
See https://gist.github.com/nvictus/66627b580c13068589957d6ab0919e66
Parameters
----------
x : 1D array_like
Input array to encode
dropna: bool, op... | f1d38b11413da3b9d00950b829f193ca2a11f37f | 3,655,041 |
def config_load(config_path):
"""Load a json config from a file."""
return files.json_load(config_path) | 510fb5db9bfa7e73fa8a88a26b540d4d46f8e199 | 3,655,042 |
from pytz import timezone
def tz2utc(date, tz):
"""Offset between local time and UTC.
Parameters
----------
date : various
The local time, in any format acceptable to `date2time`.
tz : string
date will be processed via `pytz`.
Returns
-------
offset : datetime.timedelta
... | f70d35c13865a4dde75ddac0359e74d420fe55fd | 3,655,043 |
import time
def replace_from_execution_report(replace_id, execution_report):
"""Create OrderCancelReplaceRequest from given execution report
For more info about OrderCancelReplaceRequest look at https://support.xena.exchange/support/solutions/articles/44000222082-ws-trading-api#order_cancel_replace_request
... | 0b4524f4cfee1d795b7eb6b5e88799374adeb950 | 3,655,044 |
def convert_unit(value, factor, offset):
"""Return converted value depending on the provided factor and offset."""
return num2decimal(value) * num2decimal(factor) + num2decimal(offset) | eec519f1977a3881feb18ea33a0f81295bfbf080 | 3,655,045 |
def single_chromosome_graph_scatter(
df,
chromosome,
chosen_template,
marker_width,
colors,
font_size,
xaxis_gridlines,
yaxis_gridlines,
font_family,
samples,
):
""" Filter out current chromosome and set x- and y-max"""
curr_chrom_data = df[df["Chromosome"] == chromosome]... | 9d49d8dc74e140dedc931de22b00c84e695966b5 | 3,655,046 |
def _findall_rmaps_using_reference(filename, observatory="hst"):
"""Return the basename of all reference mappings which mention `filename`."""
return uses_files([filename], observatory, "rmap") | 7dc7a67fb890681d4c94996f4fede550a1010ff5 | 3,655,047 |
def batchGD_bp(X, y, d=3, nH=10, c=3, lr=0.8, T=100, eps=0.0):
"""
BP算法, 每轮迭代使用全部样本
:param X: 训练样本的特征矩阵
:param y: 训练样本的标签向量
:param d: 训练样本的特征维数
:param nH: 隐层的节点数
:param c: 类别数
:param lr: 学习率
:param T: 停机条件1(最大迭代轮数)
:param eps: 停机条件2(相邻两次迭代loss之差的最大允许值), 设为0.0表示不使用这个条件
:return... | ef20400eb0f1012832780eddf73e7fec34579cd9 | 3,655,048 |
from typing import Dict
from typing import Any
from typing import List
import os
def run_ase_opt(
atoms: Atoms,
fmax: float = 0.01,
max_steps: int = 100,
optimizer: str = "FIRE",
opt_kwargs: Dict[str, Any] = None,
scratch_dir: str = SETTINGS.SCRATCH_DIR,
gzip: bool = SETTINGS.GZIP_FILES,
... | 366196552fc646622443ef7811c01cfd72a45d92 | 3,655,049 |
def __slicer(my_str, sub):
"""
Remove everything in a string before a specified substring is found.
Throw exception if substring is not found in string
https://stackoverflow.com/questions/33141595/how-can-i-remove-everything-in-a-string-until-a-characters-are-seen-in-python
Args:
my_str (s... | 50f9ef952ee2f9319c39948505852a209e434690 | 3,655,050 |
def load_nii(src_path, as_array=False, as_numpy=False):
"""
Load a brain from a nifti file
:param str src_path: The path to the nifty file on the filesystem
:param bool as_array: Whether to convert the brain to a numpy array of
keep it as nifty object
:param bool as_numpy: Whether to conver... | a6f34c9164476245a5e9754730bfe748fbe80f5e | 3,655,051 |
def case34_3ph():
"""
Create the IEEE 34 bus from IEEE PES Test Feeders:
"https://site.ieee.org/pes-testfeeders/resources/”.
OUTPUT:
**net** - The pandapower format network.
"""
net = pp.create_empty_network()
# Linedata
# CF-300
line_data = {'c_nf_per_km': 3.8250977, 'r_oh... | f65a01b8d52fc829b368de1414c3dd7df29bff76 | 3,655,052 |
from datetime import datetime
from dateutil import tz
def generate_daily_stats():
"""
Generates dummy daily stats for one year
"""
times = [1577836800 + (i * 86400) for i in range(0,366)]
stats_arr = [[]]
for time in times:
vals = [uniform(0,100) for i in range(843)]
stats... | fc1e172ed0eb3bc9711a1b9e38668d46e3939f9b | 3,655,053 |
def create_mask(imsize: tuple, bbox: tuple) -> Image:
"""
Args:
imsize: (w, h)
bboxes: (x0, y0, x1, y1)
"""
mask = Image.new("L", imsize)
draw = ImageDraw.Draw(mask)
draw.rectangle(bbox, fill=255)
return mask | 5064ab08e27725211796967ca26e10760b2ec45f | 3,655,054 |
def build_resolved_spec(api, spec_lookup, cache, force_build, spec, version,
ecosystem_hash):
"""Builds a resolved spec at a specific version, then uploads it.
Args:
* api - The ThirdPartyPackagesNGApi's `self.m` module collection.
* spec_lookup ((package_name, platform) -> Resolved... | 789cb1f0492b73af763778d35ed095f7d0a4799c | 3,655,055 |
def set_diff(seq0, seq1):
"""Return the set difference between 2 sequences as a list."""
return list(set(seq0) - set(seq1)) | ff10464acc65b60e9355e8971c45fbca8025fda6 | 3,655,056 |
def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer, output_mode):
"""Loads a data file into a list of input features."""
'''
output_mode: classification or regression
'''
if (label_list != None):
label_map = {label : i for i, label in enumerate(label_list)}
features = []
for (ex... | 0d043a98e1a2e2159c0b0653266d65788284fa39 | 3,655,057 |
from .parameter import ModelParameters
import multiprocessing as mp
from .neural import test_dataset
def test_set_wrapper():
"""
Wrapper function to compute test datasets of fixed widths using multiprocessing.
Widths are defined in the parameter file.
Ouputs are in Neural/ folder
"""
a = ModelParameters()
widt... | fe6bf41681f1a1344cffcc15d41e77b3f46c3c46 | 3,655,058 |
from typing import List
def get_unapproved_csr_names(kubeconfig_path: str) -> List[str]:
"""
Returns a list of names of all CertificateSigningRequest resources which
are unapproved.
May raise SubprocessError
"""
return [
csr["metadata"]["name"]
for csr in oc_list(kubeconfig_pa... | 287674bb774982b0fc7ab17edeff9c12ea92b7cd | 3,655,059 |
import string
def function(n, m, f):
"""Assumes that n = m = 1. The argument f is a Python function that takesas input an n-bit string alpha and
returns as output an m-bit string f(alpha). See deutschTest for examples of f. This function returns the (n +
m)-qbit gate F that corresponds to f. """
... | f77fe869e278ede37f477b2479012861b0bb5638 | 3,655,060 |
from functools import reduce
import operator
def get_study_list_qs(user, query_dict):
"""Gets a study list query set annotated with response counts.
TODO: Factor in all the query mutation from the (view) caller.
TODO: Upgrade to Django 2.x and use the improved query mechanisms to clean this up.
Args... | 79faa7eb68df1b8148366ce501099cedcb7144db | 3,655,061 |
def get_life_stages(verbose: bool = False) -> pd.DataFrame:
"""Get table of life stages.
Parameters
----------
verbose : bool
If True, prints the SQL statement used to query the database.
Returns
-------
pandas DataFrame
"""
return __basic_query(LifeStage, verbose=verbose) | 316bbfedd4d550c5bac0bdaf9d1e04bbea6a378e | 3,655,062 |
def bytes_to(value_in_bytes: float, rnd: int | None = ...) -> str:
"""
:param value_in_bytes: the value in bytes to convert
:param rnd: number of digits to round to
:return: formatted string
"""
sizes = ["bytes", "KB", "MB", "GB", "TB"]
now = int()
while value_in_bytes > 1024:
va... | fc2bf917fe7520780b84c6ad3ce572e0ed0341ae | 3,655,063 |
import json
def lambda_handler(event, context):
"""Main Function"""
page_iterator = PAGINATOR.paginate(**OPERATION_PARAMETERS)
for page in page_iterator:
functions = page['Functions']
for function in functions:
funct = {
"Name": function['FunctionName'],
... | 3ff4796b5adc4de1c91ab6a72ea8f5426fcbf7c9 | 3,655,064 |
def _find_first_print(body):
""" This function finds the first print of something """
for (i, inst) in enumerate(body):
if isinstance(inst, ir.Print):
return i
return -1 | 863490b6fdca04fd093c72c8fc098b90dde6c946 | 3,655,065 |
def list_mix(set_key, encoding, in_set = ""):
""" Returns: Seeded Random Shuffle of Input Set by Input Key. """
if in_set == "": char_set = list(encoding["set"])
else: char_set = in_set
seed(set_key)
return sample(char_set, len(char_set)) | cd672d19e252bb8f6bf717cddfc9cc1b89e3ba38 | 3,655,066 |
def leslie(f, s):
"""Create a Leslie matrix.
Given the length n array of fecundity coefficients ``f`` and the length n-1
array of survival coefficients ``s``, return the associated Leslie matrix.
Args:
f (cupy.ndarray): The "fecundity" coefficients.
s (cupy.ndarray): The "survival" coe... | 65b37856e5e4db4d89574a08b91424f75ad424d1 | 3,655,067 |
import logging
def __convert_node(node, default_value='', default_flags=vsflags()):
"""Converts a XML node to a JSON equivalent."""
name = __get_attribute(node, 'Name')
logging.debug('Found %s named %s', node.tagName, name)
converted = {}
converted['name'] = name
switch = __get_attribute(nod... | bf23b3927ec1d9165bbd08e9e49377127ef4be5c | 3,655,068 |
def clone(output, replace=None, *args, **kwargs):
"""
Use as theano.clone().
TODO: Something useful with non-symbolic output ?
"""
if not core.is_theano_object(output):
raise ValueError("`shim.graph.clone()` is undefined for non-symbolic outputs")
return core.gettheano().clone(output, re... | 01013b77ca0f4dd089ccd4acd16835a715fe07d2 | 3,655,069 |
from datetime import datetime
def get_clustermgtd_heartbeat(clustermgtd_heartbeat_file_path):
"""Get clustermgtd's last heartbeat."""
# Use subprocess based method to read shared file to prevent hanging when NFS is down
# Do not copy to local. Different users need to access the file, but file should be wr... | 307baba7c399e8c1277622c5cd2bd2f613ba1974 | 3,655,070 |
def logout():
"""
Logs out user by deleting token cookie and redirecting to login page
"""
APP.logger.info('Logging out.')
resp = make_response(redirect(url_for('login_page',
_external=True,
_scheme=APP.config['SCHEM... | ab10628ec8e9b7a70edfbdc2f511214df7fcdfc9 | 3,655,071 |
import sys
def fileDescribe(*args, **kwargs):
"""
.. deprecated:: 0.42.0
Use :func:`file_describe()` instead.
"""
print("dxpy.fileDescribe is deprecated; please use file_describe instead.", file=sys.stderr)
return file_describe(*args, **kwargs) | ab4101a55bc7b1c35cea3fb0382752ac2ef547a0 | 3,655,072 |
def ATR(df, n, high_column='High', low_column='Low', close_column='Close',
join=None, dropna=False, dtype=None):
"""
Average True Range
"""
high_series = df[high_column]
low_series = df[low_column]
close_prev_series = df[close_column].shift(1)
tr = np.max((
(high_series.value... | f3835e289f23c0095d9fd0563a26d4ee4e5423cf | 3,655,073 |
import os
import urllib
import requests
def lookup(symbol):
"""Look up quote for symbol."""
# Contact API
try:
api_key = os.environ.get("API_KEY")
url = f"https://cloud-sse.iexapis.com/stable/stock/{urllib.parse.quote_plus(symbol)}/quote?token={api_key}"
response = requests.get(ur... | 56929dff46ebe26d8e9cea88fa18eceac1afced1 | 3,655,074 |
def get_full_history(sender, dialog_id):
"""Download the full history for the selected dialog"""
page = 0
limit = 100
history = []
print('Downloading messages...')
while True:
sleep(REQUEST_DELAY)
offset = page * limit
try:
history[0:0] = sender.history(dia... | 1316ebfd592404243392fdbb9e538cf818e3c6ac | 3,655,075 |
def predict_transposition_cost(shape, perm, coefs=None):
"""
Given a shape and a permutation, predicts the cost of the
transposition.
:param shape: shape
:param perm: permutation
:param coefs: trained coefficients or None to get
the default ones
:return: dictionary of features
"... | 502e14cfcc38357e7f915985a8fd15fb5c798bd4 | 3,655,076 |
def checkBuildAMR(parfile,cellfile,**kwargs):
"""
Purpose
-------
Check that BuildAMRfromParticles.f90 builds the cells around the particles
created by mkClouds.f90 in the right places.
Only cloud cells are plotted. If you want to include the field cells, in
BuildAMRfromParticles.f90's subr... | 7497181ab92b723f68cb4448c0d80bfd0f21c0a2 | 3,655,077 |
def dist(df):
"""
Calculate Euclidean distance on a dataframe.
Input columns are arranged as x0, x1, y0, y1.
"""
return np.sqrt((df.iloc[:,0] - df.iloc[:,2])**2 + (df.iloc[:,1] - df.iloc[:,3])**2) | 87cc27c655bce16bc42b0f715be6462af7535d19 | 3,655,078 |
def B_Calc(T, n=2):
"""
Calculate B (Constant in the mass transfer term).
:param T: cell operation temperature [K]
:type T : float
:param n: number of moles of electrons transferred in the balanced equation occurring in the fuel cell
:type n: int
:return: B as float
"""
try:
... | 1f1e8fb60797787c01bfd916b2989c1a640d0a08 | 3,655,079 |
import re
def get_page_likes(response):
"""Scan a page and create a dictionary of the image filenames
and displayed like count for each image. Return the
dictionary."""
# find all flowtow divs
flowtows = response.html.find_all('div', class_='flowtow')
result = dict()
for div in flowtows:
... | e956e54d18d6540d1a8fd07250a5c758b696bcc5 | 3,655,080 |
def firm(K, eta, alpha, delta):
"""Calculate return, wage and aggregate production.
r = eta * K^(alpha-1) * L^(1-alpha) + (1-delta)
w = eta * K^(alpha) * L^(-alpha)
Y = eta * K^(alpha) * L^(1-alpha) + (1-delta) * K
Args:
K: aggregate capital,
eta: TFP value,
alpha: out... | 29be01360e23555d30cccca33cd556f0bd406088 | 3,655,081 |
def get_col(arr, col_name):
""" returns the column from a multi-dimensional array """
return map(lambda x : x[col_name], arr) | faf36e88c73a1f03efce94fd74a1d6b378f74bdb | 3,655,082 |
import urllib
import json
def get_articles(id):
"""function that process the articles and a list of articles objects
"""
get_articles_url = articles_url.format(id, api_key)
with urllib.request.urlopen(get_articles_url) as url:
news_article_results = json.loads(url.read())
news_ar... | 503b8260b6aeaaa526837f091c7e94687bfdd0de | 3,655,083 |
def check_slot_exist(start_time,end_time):
"""
Description:
check_slot_exists is responsible for checking that a slot exists
before a volunteer can create it.
Parameters:
Takes two parameters of type datetime:
start_time:datetime
end_time:datetime
return... | 1492cfb6b118d491166fc1abe202ca84a7fd26ed | 3,655,084 |
from datetime import datetime
def format_relative_date(date):
"""Takes a datetime object and returns the date formatted as a string e.g. "3 minutes ago", like the real site.
This is based roughly on George Edison's code from StackApps:
http://stackapps.com/questions/1009/how-to-format-time-since-xxx-e-g-4-minutes-... | 5dc8614fb3007ee90032cb0e2baa0b2fc910f275 | 3,655,085 |
def Jacobian_rkhs_gaussian(x, vf_dict, vectorize=False):
"""analytical Jacobian for RKHS vector field functions with Gaussian kernel.
Arguments
---------
x: :class:`~numpy.ndarray`
Coordinates where the Jacobian is evaluated.
vf_dict: dict
A dictionary contai... | 0f45147587c02dbcbf879a3f8ed26d4d9eeaea2e | 3,655,086 |
from typing import Tuple
import glob
import os
def predict_images(detection_graph: tf.Graph, image_path: str, output_path: str, output_csv_path: str,
threshold: float = 0.3, save_csv: bool = True) -> Tuple[np.ndarray]:
"""Predict detection on image
Args:
detection_graph (tf.Graph):... | 07fd773457de758405e0fbcce3da8f58fd1140f9 | 3,655,087 |
def load_pickle(file):
"""Gets the file from the cPickle file."""
f = open(file, 'r')
d = cPickle.load(f)
f.close()
logger = get_logger()
logger.info("file %s loaded" % file)
return d | 68caaa36fde8adaad3da60b567488c8e8df1bd69 | 3,655,088 |
def test_register_op_with_extending_steps_works():
"""
Calling the custom pipeline operation with an argument should yield the same
arguments passed back as a result
:return:
"""
test_pipe = Pipeline(STEPS, **PIPELINE_DEF_KWARGS)
def custom_op(doc, context=None, settings=None, **kwargs):
... | c4de3ab07fb3a6659e413f61d77bd48057a025d0 | 3,655,089 |
from typing import Tuple
def get_dates_for_last_30_days(
end_date: date,
) -> Tuple[Tuple[date, date], Tuple[date, date]]:
"""Returns dates for running RCA on the last 30 days.
The first tuple contains t-61, t-31.
The second tuple contains t-30, t.
"""
rca_start_date = end_date - timedelta(da... | c174f457ec46fabaf724665a322d697b541e815f | 3,655,090 |
from datetime import datetime
def get_3rd_friday():
"""获取当前月的第三个星期五"""
first_day_in_month = datetime.now().replace(day=1) # 本月第一天
# 获取当前月的所有星期5的日
fridays = [i for i in range(1, 28) if (first_day_in_month + timedelta(days=i - 1)).isoweekday() == 5]
if len(fridays) < 3:
raise Exception(f'获... | ae83cbb3648fc24940ef874d492f56e9ece56481 | 3,655,091 |
import requests
def retrieve_article_pdf_from_ads(bibcode, eprint_or_pub="PUB"):
"""
Get the PDF file for a given bibcode
"""
endpoint = f"{eprint_or_pub.upper()}_PDF"
safe_bibcode = quote(bibcode)
pdf_filename = f"{safe_bibcode}_{eprint_or_pub.lower()}.pdf"
url = f"{LINK_GATEWAY_BASE_URL}... | fb747a25e3415531e74a980afb476d8d27cb66a6 | 3,655,092 |
import requests
def get_account_info():
"""account information"""
method = 'GET'
path = '/open/api/v2/account/info'
url = '{}{}'.format(ROOT_URL, path)
params = _sign(method, path)
response = requests.request(method, url, params=params)
return response.json() | 5e4835933935db48d5cdbde3a786cbcd6fc83c31 | 3,655,093 |
def str_product(string):
""" Calculate the product of all digits in a string """
product = 1
for i in string:
product *= int(i)
return product | c0c7442ac53aaf49760feffa7d08408d7520d9b4 | 3,655,094 |
def convolution_filter_grad_backward(inputs, base_axis=1, pad=None, stride=None,
dilation=None, group=1, channel_last=False):
"""
Args:
inputs (list of nn.Variable): Incomming grads/inputs to/of the forward function.
kwargs (dict of arguments): Dictionary of the ... | 5d33b4b4c96be6bc95c119e422fcda40ac698309 | 3,655,095 |
def do_after_terminate(source, after_terminate):
"""Invokes an action after an on_complete() or on_error() event.
This can be helpful for debugging, logging, and other side effects
when completion or an error terminates an operation
on_terminate -- Action to invoke after on_complete or throw is call... | 39958d49252a228f525bdd2a346eeb08ee52d8d8 | 3,655,096 |
def _get_dload_scale(dload,
xyz_scale: float,
velocity_scale: float,
accel_scale: float,
force_scale: float) -> None:
"""
LOAD asssumes force
"""
if dload.Type == 'LOAD':
scale = force_scale
elif dload.Type =... | f3a0a7d26d915ebc8231d9e4cab223f27afba2a2 | 3,655,097 |
def job_complete(job):
"""
Should be called whenever a job is completed.
This will update the Git server status and make
any additional jobs ready.
"""
job_complete_pr_status(job)
create_issue_on_fail(job)
start_canceled_on_fail(job)
ParseOutput.set_job_info(job)
ProcessCommands... | 5692ea76a5ab1ac6be0a30f74a8daec4ce5bd6a0 | 3,655,098 |
def direct(sp_script_str, run_dir, nsamp, njobs,
tgt_geo, bath_geo, thy_info, charge, mult,
smin=3.779, smax=11.339, spin_method=1, ranseeds=None):
""" Write input and run output.
:param sp_script_str: submission script for single-point calculation
:type sp_script_str: str
... | d8f977e46f71a61c23a28dc1bd3a58dfec43ba9f | 3,655,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.