content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def extra():
"""Tests faint.extra. That is, less central faint code, possibly
requiring extensions (e.g. tesseract or GraphViz dot).
"""
return unittest.defaultTestLoader.discover("py_tests/test_extra",
top_level_dir="py_tests/") | 4,000 |
def cmd_te_activate(abs_filename):
"""最前面に持ってくる(テキストエディタ向け)
ファイルが含まれるVisual Studioを探し出して最前面に持ってくる。
abs_filename- ファイル名の絶対パス
(Ex.) c:/project/my_app/src/main.cpp
"""
return _te_main2(cmd_activate,abs_filename) | 4,001 |
def search(search_domain, fmt=None):
"""Handle redirect from form submit."""
domain = tools.parse_post_data(search_domain)
if domain is None:
return handle_invalid_domain(search_domain)
if fmt is None:
if features.enable_async_search():
return flask.redirect('/search?ed={}'... | 4,002 |
def test_book_list_get_correct_auth_empty_for_user_with_no_books(testapp, one_user):
"""Test that GET to book-list route returns empty list for user without books."""
data = {
'email': one_user.email,
'password': 'password',
}
res = testapp.get('/books', data)
assert res.json == [] | 4,003 |
def get_graph_from_particle_positions(
particle_positions, box_lengths, cutoff_distance, store_positions=False
):
"""Returns a networkx graph of connections between neighboring particles
Args:
particle_positions (ndarray or dataframe): Shape
(`n_particles`, `n_dimensions`). Each of ... | 4,004 |
def cca(x,y):
""" canonical correlation analysis cca
wx, wy, r = cca(x,y) returns wx, wy two matrices which columns [:,i] correspond to the canonical weights (normalized eigenvectors) and a vector r containing the canonical correlations, all sorted in decreasing order. cca assumes as input matrices x,... | 4,005 |
def create_captcha():
""" 创建图片验证码 """
image = ImageCaptcha(fonts=DEFAULT_FONTS)
code = gen_verify_code(4)
stream = image.generate(code)
# 图片的base64字符串格式:data:image/png;data,<base64字符串>
print('===', str(base64.b64encode(stream.getvalue()), encoding='utf-8'))
image.write(code, '{code}.png'.for... | 4,006 |
def get_assay_table_path(dataset: TemplateDataset, configuration: dict) -> Path:
"""Retrieve the assay table file name that determined as a valid assay based on configuration.
Specifically, defined in subsection 'ISA meta'
:param dataset: A dataset object including a metadata component with an attached ISA... | 4,007 |
def train_models(vae, emulator, em_lr, vae_lr, signal_train, dataset, val_dataset,
epochs, vae_lr_factor, em_lr_factor, vae_min_lr, em_min_lr, vae_lr_patience, em_lr_patience,
lr_max_factor, es_patience, es_max_factor):
"""
Function that train the models simultaneously
:par... | 4,008 |
def get_next_term(cfg):
"""
Gets the next term to be added.
Args:
cfg: Expression config
"""
term = {}
if np.random.choice(['quantity', 'number'], p=[cfg.ratio, 1 - cfg.ratio]) == 'quantity':
idx = np.random.choice(range(len(cfg.quants)))
if cfg.reuse:
term['... | 4,009 |
def test_grid_jan_2022():
"""A specific unit test for January 31st, 2022 not showing up"""
days = {
date(2022, 1, 5): "E",
date(2022, 1, 6): "C",
date(2022, 1, 7): "A",
date(2022, 1, 10): "F",
date(2022, 1, 11): "D",
date(2022, 1, 12): "B",
date(2022, 1, ... | 4,010 |
def test_get_evaldiff(): # ***Incomplete test
"""Test the get_evaldiff function in the search.py file.
"""
##########################
# Arrange.
evalue1 = "evalue1"
evalue2 = "evalue2"
##########################
# Act.
#x = get_evaldiff(evalue1,
# evalue2)
###############... | 4,011 |
def demand_share_per_timestep_constraint_rule(backend_model, group_name, carrier, timestep, what):
"""
Enforces shares of demand of a carrier to be met by the given groups
of technologies at the given locations, in each timestep.
The share is relative to ``demand`` technologies only.
.. container::... | 4,012 |
def create_tables(cur, conn):
"""Loops through all queries and creates tables in redshift cluster"""
for query in create_table_queries:
cur.execute(query)
conn.commit() | 4,013 |
def b2str(data):
"""Convert bytes into string type."""
try:
return data.decode("utf-8")
except UnicodeDecodeError:
pass
try:
return data.decode("utf-8-sig")
except UnicodeDecodeError:
pass
try:
return data.decode("ascii")
except UnicodeDecodeError:
... | 4,014 |
def plotalphaerror(alphaarr,errorarr,errorlagarr):
""" This will plot the error with respect then alpha parameter for the
constraint.
"""
sns.set_style('whitegrid')
sns.set_context('notebook')
Nlag=errorlagarr.shape[-1]
nlagplot=4.
nrows=1+int(sp.ceil(float(Nlag)/(2*nlagp... | 4,015 |
def fill_space(space, dim, size, minval, maxval, factor):
"""Fill a dim-dimensional discrete space of ℕ^{size} with
some random hyperplane with values ranging from minval to
maxval. Returns a ℕ^{size} array. Changes space in-place."""
offsets=[np.array([0]*dim)]
return ndim_diamond_square_rec(space, dim, size, of... | 4,016 |
def sum_to(n):
"""Return the sum of all interger number up to and including n"""
S = 0
for i in range (n+1):
S += i
print(S) | 4,017 |
def trigger_update_xblocks_cache_task(sender, course_key, **kwargs): # pylint: disable=unused-argument
"""
Trigger update_xblocks_cache() when course_published signal is fired.
"""
tasks = import_module('openedx.core.djangoapps.bookmarks.tasks') # Importing tasks early causes issues in tests.
# N... | 4,018 |
def fracorder_lowshelving_eastty(w1, w2, G1, G2, rB=None):
"""
Parameters
----------
w1: float
Lower corner frequency.
w2: float
Upper corner frequency.
G1: float
Target level at lower corner frequency in dB.
G2: float
Target level at upper corner frequency in... | 4,019 |
def get_cookie_date(date):
"""
Return a date string in a format suitable for cookies (https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Date)
:param date: datetime object
:return: date string in cookie format
"""
return date.strftime("%a, %d %b %Y %H:%M:%S GMT") | 4,020 |
def fill_block_with_call(newblock, callee, label_next, inputs, outputs):
"""Fill *newblock* to call *callee* with arguments listed in *inputs*.
The returned values are unwraped into variables in *outputs*.
The block would then jump to *label_next*.
"""
scope = newblock.scope
loc = newblock.loc
... | 4,021 |
def pairplot_correlations(data, sample=30000):
"""
:param data: a DataFrame file.
:param sample: the amount of data points to sample from (default = 30000).
:return: generates a pairwise plot relationship of the data set.
"""
# pairwise relationship of the features distributions in the dataset.
... | 4,022 |
def terraform_write_variables(configs: Dict, variables_to_exclude: List) -> str:
"""Write out given config object as a Terraform variables JSON file.
Persist variables to Terraform state directory. These variables are used
on apply / plan, and are required for deprovisioning.
"""
det_version = con... | 4,023 |
def data_context_service_interface_pointuuid_media_channel_service_interface_point_spec_mc_pool_available_spectrumupper_frequencylower_frequency_frequency_constraint_get(uuid, upper_frequency, lower_frequency): # noqa: E501
"""data_context_service_interface_pointuuid_media_channel_service_interface_point_spec_mc_p... | 4,024 |
def get_ems_config(cluster: str, headers_inc: str):
"""Fetches the EMS configuration"""
url = "https://{}/api/support/ems/".format(cluster)
try:
response = requests.get(url, headers=headers_inc, verify=False)
except requests.exceptions.HTTPError as err:
print(err)
sys.exit... | 4,025 |
def image_to_string(filename):
"""Generate a string representation of the image at the given path, for embedding in code."""
image = pyglet.image.load(filename)
data = image.get_data('LA', 16)
s = ''
for x in data:
s += "\\x%02x" % (ord(x))
return s | 4,026 |
def init_build(build_dir, started=True, finished=True):
"""Create faked files for a build."""
if started:
write(build_dir + 'started.json',
{'version': 'v1+56', 'timestamp': 1406535800})
if finished:
write(build_dir + 'finished.json',
{'result': 'SUCCESS', 'timest... | 4,027 |
def test_client_can_be_created(unauth_client):
"""Test that the client object can be created."""
assert isinstance(unauth_client, AvataxClient) | 4,028 |
def mpf_connectome(
mc, num_sampled, max_depth, args_dict, clt_start=10, sr=0.01, mean_estimate=False
):
"""Perform mpf statistical calculations on the mouse connectome."""
args_dict["max_depth"] = max_depth
args_dict["total_samples"] = num_sampled[0]
args_dict["static_verbose"] = False
args_dic... | 4,029 |
def valuedict(keys, value, default):
"""
Build value dictionary from a list of keys and a value.
Parameters
----------
keys: list
The list of keys
value: {dict, int, float, str, None}
A value or the already formed dictionary
default: {int, float, str}
A defa... | 4,030 |
async def chunks(request):
"""A handler that sends chunks at a slow pace.
The browser will download the page over the range of 2 seconds,
but only displays it when done. This e.g. allows streaming large
files without using large amounts of memory.
"""
async def iter():
yield "<html><hea... | 4,031 |
def start_vitess():
"""This is the main start function."""
topology = vttest_pb2.VTTestTopology()
keyspace = topology.keyspaces.add(name='user')
keyspace.shards.add(name='-80')
keyspace.shards.add(name='80-')
keyspace = topology.keyspaces.add(name='lookup')
keyspace.shards.add(name='0')
vttop = os.env... | 4,032 |
def get_zero_ranges(*args):
"""
get_zero_ranges(zranges, range) -> bool
Return set of ranges with zero initialized bytes. The returned set
includes only big zero initialized ranges (at least >1KB). Some zero
initialized byte ranges may be not included. Only zero bytes that use
the sparse storage method (S... | 4,033 |
def fista_step(L, Wd, X, alpha, last_Z):
"""
Calculates the next sparse code for the FISTA algorithm
Dimension notation:
B - Number of samples. Usually number of patches in image times batch size
K - Number of atoms in dictionary
d - Dimensionality of atoms in dictionary
... | 4,034 |
def get_and_validate_certs_for_replacement(
default_cert_location,
default_key_location,
default_ca_location,
new_cert_location,
new_key_location,
new_ca_location):
"""Validates the new certificates for replacement.
This function validates the new specified certi... | 4,035 |
async def end_session(ctx, tutor, account):
"""remove the tutor object from tutor accounts.
:param Context ctx: the current Context.
:param 'Worker' tutor: the object that represents a tutor.
:param {} account: the dictionary that stores the tutor objects.
"""
try:
# remove tutor object... | 4,036 |
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs) | 4,037 |
def entity_sentiment_text(text):
"""Detects entity sentiment in the provided text."""
language_client = language_service_client.LanguageServiceClient()
document = language_service_pb2.Document()
if isinstance(text, six.binary_type):
text = text.decode('utf-8')
document.content = text.encod... | 4,038 |
def build_model(cfg):
"""
Built the whole model, defined by `cfg.model.name`.
"""
name = cfg.model.name
return META_ARCH_REGISTRY.get(name)(cfg) | 4,039 |
def download_n_parse_3k(url):
"""
Gets the article's metadata
Args:
url: The article's URL
"""
article3k = Article(url)
try:
article3k.download()
article3k.parse()
except Exception:
print(f"Download or Parse:\t{url}")
return
return article3k.text | 4,040 |
def build_model(images, datasets, epochs=None, log=False, use_model=None, save_model='model', pretrained=True):
"""Run the training regime on the model and save its best effort"""
num_epochs = epochs
if not num_epochs: num_epochs = EPOCHS
model_ft = initialise_model(images, use_model=use_model... | 4,041 |
def crop_to_square(img, target_size=None):
"""
Takes numpy array img and converts it to a square by trimming
:param img: np.array representing image
:param target_size: optionally specify target size. If None, will return min(l, w) x min(l, w)
:return: np.array
"""
l, w = img.shape
img_c... | 4,042 |
def reset():
""" Resets the built-in Layer dictionary (controls the coloring in
quickplot() ), and sets the Device universal ID (uid) to zero. """
Layer.layer_dict = {}
Device._next_uid = 0 | 4,043 |
def reboot(name, path=None):
"""
Reboot a container.
path
path to the container parent
default: /var/lib/lxc (system default)
.. versionadded:: 2015.8.0
CLI Examples:
.. code-block:: bash
salt 'minion' lxc.reboot myvm
"""
ret = {"result": True, "changes... | 4,044 |
def find(domain):
""" Finds connected domains within a domain.
A domain is defined to be a connected region of lattice
points, subject to periodic boundary conditions.
Parameters
----------
domain : :py:class:`~fieldkit.mesh.Domain`
The set of nodes to seek connected domains in.
R... | 4,045 |
def url_to_license(url):
"""Given a URL, return the license as a license/version tuple"""
(scheme, netloc, path, *remainder) = urlparse(url)
path_parts = path.split('/')
if len(path_parts) < 4:
raise LicenseException("Did not get 4 path segments, probably not a CC license URL")
license = pa... | 4,046 |
def dsystem_dt(request):
"""Test systems for test_discrete"""
# SISO state space systems with either fixed or unspecified sampling times
sys = rss(3, 1, 1)
# MIMO state space systems with either fixed or unspecified sampling times
A = [[-3., 4., 2.], [-1., -3., 0.], [2., 5., 3.]]
B = [[1., 4.],... | 4,047 |
def update_attributes(dsFolder: types.GirderModel, data: dict):
"""Upsert or delete attributes"""
crud.verify_dataset(dsFolder)
validated: AttributeUpdateArgs = crud.get_validated_model(AttributeUpdateArgs, **data)
attributes_dict = fromMeta(dsFolder, 'attributes', {})
for attribute_id in validated... | 4,048 |
def machado_et_al_2009_matrix_protanomaly(severity):
"""Retrieve a matrix for simulating anomalous color vision.
:param cvd_type: One of "protanomaly", "deuteranomaly", or "tritanomaly".
:param severity: A value between 0 and 100.
:returns: A 3x3 CVD simulation matrix as computed by Machado et al
... | 4,049 |
def derivative_compliance(coord, connect, E, v, rho, alpha, beta, omega_par, p_par, q_par, x_min_m, x_min_k, xval, disp_vector, lam):
""" calculates the derivative of the compliance function.
Args:
coord (:obj:`numpy.array`): Coordinates of the element.
connect (:obj:`numpy.array`): Element con... | 4,050 |
def main():
"""Read input and print output cost of 3 entities generating costs"""
cost_a, cost_b, cost_c = [int(i) for i in input().split()]
parked = [0] * 102
for _ in range(3):
arrive, depart = [int(i) for i in input().split()]
for i in range(arrive, depart):
parked[i] +=... | 4,051 |
def test_status_string(app, authed_client, status_code, status):
"""The status string should populate itself based on status code."""
@app.route('/test_endpoint')
def test_endpoint():
return flask.jsonify('test'), status_code
response = authed_client.get('/test_endpoint')
assert response.g... | 4,052 |
def lanc(numwt, haf):
"""Generates a numwt + 1 + numwt lanczos cosine low pass filter with -6dB
(1/4 power, 1/2 amplitude) point at haf
Parameters
----------
numwt : int
number of points
haf : float
frequency (in 'cpi' of -6dB point, 'cpi' is cycles per interval.
... | 4,053 |
def normalize_target_taxa(target_taxa):
"""
Receives a list of taxa IDs and/or taxa names and returns a set of expanded taxids numbers
"""
from ete3 import NCBITaxa
ncbi = NCBITaxa()
expanded_taxa = set()
for taxon in target_taxa:
taxid = ""
try:
taxid = int(tax... | 4,054 |
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Fully Kiosk Browser switch."""
coordinator = hass.data[DOMAIN][config_entry.entry_id]
async_add_entities([FullyScreenSaverSwitch(hass, coordinator)], False)
async_add_entities([FullyMaintenanceModeSwitch(hass, coordinato... | 4,055 |
def filled (a, value = None):
"""a as a contiguous numeric array with any masked areas replaced by value
if value is None or the special element "masked", get_fill_value(a)
is used instead.
If a is already a contiguous numeric array, a itself is returned.
filled(a) can be used to be sure that the ... | 4,056 |
def get_generic_global(section, prop):
"""Generic getter for getting a property"""
if section is None:
raise GlobalPropertyError("Section cannot be null!")
elif prop is None:
raise GlobalPropertyError("Property cannot be null!")
global_conf = configparser.ConfigParser()
global_con... | 4,057 |
def get_mspec_descriptors(mod, mod_lim=20, freq_lim=8000, n_mod_bin=20, n_freq_bin=20):
"""
Parameters
----------
mod : 2D Numpy array
Modulation spectrogram
mod_lim : int
Upper limit of modulation frequency. The default is 20.
freq_lim : int
Upper limit of fre... | 4,058 |
def set_environment_variables_for_multi_node() -> None:
"""
Sets the environment variables that PyTorch Lightning needs for multi-node training.
"""
az_master_node = "AZ_BATCHAI_MPI_MASTER_NODE"
master_addr = "MASTER_ADDR"
master_ip = "MASTER_IP"
master_port = "MASTER_PORT"
world_rank = ... | 4,059 |
def check_single_table_dataset(dataset, expected_table=None):
"""
Raise if the given dataset is not a single-table dataset.
Parameters
----------
dataset: kartothek.core.dataset.DatasetMetadata
The dataset to be validated
expected_table: Optional[str]
Ensure that the table in th... | 4,060 |
def mustachify(
file,
mustache_file="mustache.png",
rotation=True,
perspective=False, # TODO add perspective transformation
modelsize="small",
):
"""
Pastes a mustache on each face in the image file
:param file: image file name or file object to load
:param mustache_file: file... | 4,061 |
def get_available_gpus():
"""Return a list of available GPUs with their names"""
cmd = 'nvidia-smi --query-gpu=name --format=csv,noheader'
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = process.communicate()
if p... | 4,062 |
def monitor_promo(bot, job):
""" Job to Send Promo Message """
msg = promo_alert.get_new_codes_message()
if msg is None:
print('No new promos')
else:
text_bot = ['Uber poked me privately and said this :wink:',
'I found this promo code while I was in my ActiveWear! :s... | 4,063 |
def register_image_array(img, img_name, img_desc, project_id, sample_id, usr, pwd, host, port=4064):
"""
This function imports a 5D (time-points, channels, x, y, z) numpy array of an image
to an omero server using the OMERO Python bindings
Example:
register_image_array(hypercube, "tomo_0", "thi... | 4,064 |
def fixture_result():
"""structure used to hold details of the intermediate result at each stage of the test"""
fixture = {SUBARRAY_USED: 'ska_mid/tm_subarray_node/1',
SCHEDULING_BLOCK: None,
STATE_CHECK: None}
yield fixture
# teardown
end(fixture) | 4,065 |
def count_symbols (val):
""" Counts the number of symbols in a string.
A symbol is defined as any character that is neither a lowercase letter, uppercase letter or digit.
Args:
val (str): The string to count symbols in.
Returns:
int: The number of symbols in the string.
"""
re... | 4,066 |
def arctan(dy, dx):
""" Returns the arctan of angle between 0 and 2*pi """
arc_tan = math.atan2(dy, dx)
if arc_tan < 0:
arc_tan = arc_tan + 2 * np.pi
return arc_tan | 4,067 |
def meshsize(mesh: dolfin.Mesh,
kind: str = "cell") -> dolfin.MeshFunction:
"""Return the local meshsize `h` as a `MeshFunction` on cells or facets of `mesh`.
The local meshsize is defined as the length of the longest edge of the cell/facet.
kind: "cell" or "facet"
"""
if kind not in ... | 4,068 |
def write_csv_headers(logfile):
"""Write header lines in the CSV file with the schema of the messages
involved."""
for c in MESSAGE_CLASSES:
header_prefix = ["", c.__name__]
header_elements = sorted(c.__slots__)
logfile.write(",".join(
['"%s"' % h for h in (header_prefix ... | 4,069 |
def dep_graph_parser_parenthesis(edge_str):
"""Given a string representing a dependency edge in the 'parenthesis'
format, return a tuple of (parent_index, edge_label, child_index).
Args:
edge_str: a string representation of an edge in the dependency tree, in
the format edge_label(parent_wor... | 4,070 |
def multipass(args) -> None:
"""Install Multipass.
:param args: A Namespace object containing parsed command-line options.
"""
if args.install:
if is_debian_series():
cmd = f"{args.prefix} snap install multipass --classic"
run_cmd(cmd)
elif is_macos():
... | 4,071 |
def transfer_to_infeed(value, device_ordinal=0):
"""Transfers the given value into the XLA infeed queue.
XLA's infeed queue is a single queue that feeds the "XLA virtual machine" with
a totally ordered stream of values. This is dequeued from XLA computations via
the Infeed() operation.
Args:
value: the ... | 4,072 |
def measure_crypts_props_no_paneth(crypt_objs, label_mask, edu_objs, df, row, col, fld):
"""Measure crypt level properties for all crypts in image
Args:
crypt_objs (array): labeled cell objects (e.g. nuclei segmentation)
label_mask (array): labeled crypt objects
edu_objs (list): ids of cell obje... | 4,073 |
def intersection_angle(m1, m2):
"""
Computes intersection angle between two slopes.
"""
return math.degrees(math.atan((m2-m1) / (1+m1*m2))) | 4,074 |
def setup_console_logging(verbosity: int = logging.INFO) -> None:
"""
:param int verbosity: Verbosity level logging.<verbosity>
"""
settings.LOGGING["handlers"]["console"]["level"] = verbosity
settings.LOGGING["handlers"]["syslog"]["level"] = verbosity
logging.config.dictConfig(settings.LOGGING) | 4,075 |
def create_zappa_project(
project_name, stack_name, session, client, username, email, password
):
"""Create the Zappa project."""
aws_rds_host = get_aws_rds_host(stack_name, session)
with open('.env', 'a') as file:
file.write('AWS_RDS_HOST={}\n'.format(aws_rds_host))
aws_lambda_host = depl... | 4,076 |
def test_retrieve_all(database_connection: mysql.connector.connect,
print_response: bool = False):
"""Testing response from info.retrieve_all"""
guests = info.retrieve_all(database_connection)
assert guests is not None
if print_response:
print(json.dumps(guests, indent=2)) | 4,077 |
def erase_create_HDF(filename):
"""Create and return a new HDS5 file with the given filename, erase the file if existing.
See https://github.com/NelisW/pyradi/blob/master/pyradi/hdf5-as-data-format.md
for more information on using HDF5 as a data structure.
open for writing, truncate if exists
http... | 4,078 |
def test_output_group_with(temp_factory):
"""Test option group_with in output statement"""
temp_factory("a.txt", "b.txt")
for ofile in ["a.txt1", "b.txt2"]:
if file_target(ofile).exists():
file_target(ofile).unlink()
#
# string input
execute_workflow(r"""
[0]
... | 4,079 |
def cast2dtype(segm):
"""Cast the segmentation mask to the best dtype to save storage.
"""
max_id = np.amax(np.unique(segm))
m_type = getSegType(int(max_id))
return segm.astype(m_type) | 4,080 |
def get_renders_df(product_df, order_df, user_df, address_df, num_days=90):
"""
Renders - All requested renders from order, both customer and tester
"""
renders_df = pd.merge(product_df, order_df, how='left', on='order_id', suffixes=(None, '_order'))
renders_df = pd.merge(renders_df, user_df, how='l... | 4,081 |
def check_sparsity_level(model, config, ref_sparsity_level):
"""
Check that sparsity level of the model is equal to reference sparse level.
"""
sparsity_algo = MagnitudeSparsity(config, None)
all_weights_nodes = sparsity_algo._get_all_weights_nodes(model)
all_weights = [get_node_value(w_node).fl... | 4,082 |
def dig_single_site(basedir):
"""
Crappy little function to dig into specific sites and look at the
individual 1-month spectra. Mostly a scratchpad function, as what needs
investigating varies.
"""
basedir = Path(basedir)
files = basedir.rglob("Level3/**/*RESMIN*.npy")
# the nyquist fre... | 4,083 |
def send_register_active_email(to_email, username, token):
"""发送激活邮件"""
# 组织邮件的内容
subject = '天天生鲜欢迎信息'
message = ''
sender = settings.EMAIL_FROM
receiver = [to_email]
html_message = """
<h1>%s, 欢迎您成为天天生鲜注册会员</h1>
请点击以下链接激活您的账号<br/>
... | 4,084 |
def get_characters_character_id_contacts(*,
character_id,
token,
if_none_match=None,
page='1'):
"""
:param character_id: An EVE character ID
:pa... | 4,085 |
def test__energy():
""" test the energy read/write functions
"""
ref_ene = -75.00613628303537
ene_file_name = autofile.name.energy('test')
ene_file_path = os.path.join(TMP_DIR, ene_file_name)
ene_str = autofile.write.energy(ref_ene)
assert not os.path.isfile(ene_file_path)
autofile.wri... | 4,086 |
def new(
name: str,
data: typing.Optional[bytes] = b"",
digest_size: typing.Optional[int] = None,
*,
custom: typing.Optional[bytes] = None, # cshakes, kangarootwelve
key: typing.Optional[bytes] = None, # for blakes
) -> Hash:
"""
Instantiate a hash object.
Args:
name: The ... | 4,087 |
def recursively_save_dict_contents_to_group(h5file, path, dic):
"""
....
"""
for key, item in dic.items():
if isinstance(item, (np.ndarray, np.int64, np.dtype(float).type, str, bytes)):
h5file[path + key] = item
elif isinstance(item, dict):
recursively_save_dict_c... | 4,088 |
def stringify_addresses(addresses):
"""
Converts a list of addresses into a string in the
`"John Doe" <john@example.com>, "Jane" <jane@example.com>"` format,
which can be directly used in the headers of an email.
Parameters
----------
addresses : (str or (str, str)) or list of (str or (str,... | 4,089 |
def validator_map_size(string):
"""
Validator for map size input
Raises InputError with error description if string is not valid
:param string: String to check
:return: Bool, if success
"""
result = False
if string.isdigit():
size = int(string)
if 5 <= size ... | 4,090 |
def get_short_token(app_id, app_secret, redirect_url, auth_code):
"""Get a short-lived access token."""
url = f"{OAUTH_URL}/access_token"
payload = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "authorization_code",
"redirect_uri": redirect_url,
"c... | 4,091 |
def random_chinese_name():
"""生成随机中文名字,二到三字
Returns:
str: 随机名字
"""
long = random.randint(2, 3)
first_name = random.choice(FIRST_NAME)
last_name = random.choice(LAST_NAME) if long == 2 else "{}{}".format(random.choice(LAST_NAME),
... | 4,092 |
def generate_json_with_incorrect_prediction_value(features_definition: dict):
"""
Generates a list of dictonaries with keys from the given features_definitions, key in the dictionary
has a corresponding value not allowed by the given definition
"""
mock_requests = []
def_keys = list(features_def... | 4,093 |
def extract_text(file: UploadFile = File(...), lang: str = "eng", text_only: bool = False, custom_config: str = None):
"""
:param file:
:param lang: available: deu, eng
:return:
"""
filepath = "temp/" + file.filename
with file.file:
with open(filepath, "wb") as temp_file:
... | 4,094 |
def split_data(
args,
data_paths: t.List[Path],
val_ratio: float = 0.20,
test_ratio: float = 0.10,
random_state: int = 42,
) -> (t.List[str], t.List[str], t.List[str]):
"""
Split the data into train, val and test and save the splits to
file.
Args:
args
data_paths: list of list of sc... | 4,095 |
def check_chains(sampler, pos, theta_lb, theta_ub,
mode_list=['bounds']):
""" check chains
1> reset out-of-bound chains
2> reset all chains to max likelihood neighbours
"""
mode_all = ['bounds', 'reset_all']
for mode in mode_list:
assert mode in mode_all
n_walkers... | 4,096 |
def get_index_train_test_path(_DATA_DIRECTORY_PATH, split_num, train = True):
"""
Method to generate the path containing the training/test split for the given
split number (generally from 1 to 20).
@param split_num Split number for which the data has to be generated
@param train ... | 4,097 |
def read_output():
"""Reads the complex values from output file sink generated by gnuradio expt 2"""
complex_output = np.fromfile(file_sink_complex_expt2, dtype = 'complex64').reshape(-1,1)
plt.figure()
plt.plot(complex_output[11:18000].real)
plt.plot(complex_output[11:18000].imag)
plt.... | 4,098 |
def sha3_256Validator(value):
"""Predicate that checks if the given value seems to be SHA-3 256 hash."""
# check if the value has the expected type
stringTypeValidator(value)
# SHA-3 256 hash has 64 hexadecimal characters
if not re.fullmatch(r"^[a-fA-F0-9]{64}$", value):
raise Invalid("the ... | 4,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.