content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def predict():
"""Renders the predict page and makes predictions if the method is POST."""
if request.method == 'GET':
return render_predict()
# Get arguments
checkpoint_name = request.form['checkpointName']
if 'data' in request.files:
# Upload data file with SMILES
data = ... | 5,356,200 |
def zernike_name(index, framework='Noll'):
"""
Get the name of the Zernike with input index in input framework (Noll or WSS).
:param index: int, Zernike index
:param framework: str, 'Noll' or 'WSS' for Zernike ordering framework
:return zern_name: str, name of the Zernike in the chosen framework
... | 5,356,201 |
def discriminator_train_batch_mle(batches, discriminator, loss_fn, optimizer):
"""
Summary
1. watch discriminator trainable_variables
2. extract encoder_output, labels, sample_weight, styles, captions from batch and make them tensors
3. predictions = discriminator(encoder_output, captions, styles, t... | 5,356,202 |
def get_subs_dict(expression, mod):
"""
Builds a substitution dictionary of an expression based of the
values of these symbols in a model.
Parameters
----------
expression : sympy expression
mod : PysMod
Returns
-------
dict of sympy.Symbol:float
"""
subs_dict = {}
... | 5,356,203 |
def save_index_summary(name, rates, dates, grid_dim):
"""
Save index file
Parameters
----------
See Also
--------
DataStruct
"""
with open(name + INDEX_SUMMARY_EXT, "w+b") as file_index:
nlist = 0
keywords_data, nums_data, nlist = get_keywords_section_data(rates) ... | 5,356,204 |
def fluxes_SIF_predict_noSIF(model_NEE, label, EV1, EV2, NEE_max_abs):
"""
Predict the flux partitioning from a trained NEE model.
:param model_NEE: full model trained on NEE
:type model_NEE: keras.Model
:param label: input of the model part 1 (APAR)
:type label: tf.Tensor
:param EV1: input... | 5,356,205 |
def xml_string(line, tag, namespace, default=None):
""" Get string value from etree element """
try:
val = (line.find(namespace + tag).text)
except:
val = default
return val | 5,356,206 |
def generate_header(salutation, name, surname, postSalutation, address, zip, city, phone, email):
"""
This function generates the header pdf page
"""
# first we take the html file and parse it as a string
#print('generating header page', surname, name)
with open('/home/danielg3/www/crowdlobbying... | 5,356,207 |
def cli_cosmosdb_collection_exists(client, database_id, collection_id):
"""Returns a boolean indicating whether the collection exists """
return len(list(client.QueryContainers(
_get_database_link(database_id),
{'query': 'SELECT * FROM root r WHERE r.id=@id',
'parameters': [{'name': '@i... | 5,356,208 |
def probabilities (X) -> dict:
""" This function maps the set of outcomes found in the sequence of events, 'X', to their respective probabilty of occuring in 'X'.
The return value is a python dictionary where the keys are the set of outcomes and the values are their associated probabilities."""
# The set of outcomes... | 5,356,209 |
def get_recommend_news():
"""获取新闻推荐列表"""
# 触电新闻主页推荐实际URL
recommend_news_url = 'https://api.itouchtv.cn:8090/newsservice/v9/recommendNews?size=24&channelId=0'
# 当前毫秒时间戳
current_ms = int(time.time() * 1000)
headers = get_headers(target_url=recommend_news_url, ts_ms=current_ms)
resp = requests.... | 5,356,210 |
def put_profile_pic(url, profile):
"""
Takes a url from filepicker and uploads
it to our aws s3 account.
"""
try:
r = requests.get(url)
size = r.headers.get('content-length')
if int(size) > 10000000: #greater than a 1mb #patlsotw
return False
filename, h... | 5,356,211 |
def get_raw_data() -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""Loads serialized data from file.
Returns:
Tuple[np.ndarray, np.ndarray, np.ndarray]: Tuple of
features, labels and classes for the dataset.
"""
data_file: str = Path().absolute().joinpath(RAW_DATA_FILE).__str__()
... | 5,356,212 |
def cog_pixel_value(
lon,
lat,
url,
bidx=None,
titiler_endpoint="https://titiler.xyz",
verbose=True,
**kwargs,
):
"""Get pixel value from COG.
Args:
lon (float): Longitude of the pixel.
lat (float): Latitude of the pixel.
url (str): HTTP URL to a COG, e.g., '... | 5,356,213 |
def select_daily(ds, day_init=15, day_end=21):
"""
Select lead time days.
Args:
ds: xarray dataset.
day_init (int): first lead day selection. Defaults to 15.
day_end (int): last lead day selection. Defaults to 21.
Returns:
xarray dataset subset based on time... | 5,356,214 |
def project_polarcoord_lines(lines, img_w, img_h):
"""
Project lines in polar coordinate space <lines> (e.g. from hough transform) onto a canvas of size
<img_w> by <img_h>.
"""
if img_w <= 0:
raise ValueError('img_w must be > 0')
if img_h <= 0:
raise ValueError('img_h m... | 5,356,215 |
def standardize_for_imshow(image):
"""
A luminance standardization for pyplot's imshow
This just allows me to specify a simple, transparent standard for what white
and black correspond to in pyplot's imshow method. Likely could be
accomplished by the colors.Normalize method, but I want to make this as
expl... | 5,356,216 |
def err_failure(error) :
""" Check a error on failure """
return not err_success(error) | 5,356,217 |
def rah_fixed_dt( u2m, roh_air, cp, dt, disp, z0m, z0h, tempk):
"""
It takes input of air density, air specific heat, difference of temperature between surface skin and a height of about 2m above, and the aerodynamic resistance to heat transport. This version runs an iteration loop to stabilize psychrometric data fo... | 5,356,218 |
def process_grid_subsets(output_file, start_subset_id=0, end_subset_id=-1):
""""Execute analyses on the data of the complete grid and save the processed data to a netCDF file.
By default all subsets are analyzed
Args:
output_file (str): Name of netCDF file to which the results are saved for the... | 5,356,219 |
def bulk_lookup(license_dict, pkg_list):
"""Lookup package licenses"""
pkg_licenses = {}
for pkg in pkg_list:
# Failsafe in case the bom file contains incorrect entries
if not pkg.get("name") or not pkg.get("version"):
continue
pkg_key = pkg["name"] + "@" + pkg["version"]... | 5,356,220 |
def pack_bits(bools):
"""Pack sequence of bools into bits"""
if len(bools) % 8 != 0:
raise ValueError("list length must be multiple of 8")
bytes_ = []
b = 0
for j, v in enumerate(reversed(bools)):
b <<= 1
b |= v
if j % 8 == 7:
bytes_.append(b)
... | 5,356,221 |
def make_coffee(drink_name, order_ingredients):
"""Deduct the required ingredients from the resources."""
for item in order_ingredients:
resources[item] -= order_ingredients[item]
print("Here is your {} ☕. Enjoy!".format(drink_name)) | 5,356,222 |
def init_ring_dihedral(species,instance,geom = []):
"""
Calculates the required modifications to a structures dihedral to create a cyclic TS
"""
if len(geom) == 0:
geom = species.geom
if len(instance) > 3:
if len(instance) < 6:
final_dihedral = 15.
... | 5,356,223 |
def ensure_s3_bucket(s3_client, bucket_name, bucket_region):
"""Ensure an s3 bucket exists, if it does not then create it.
Args:
s3_client (:class:`botocore.client.Client`): An s3 client used to
verify and create the bucket.
bucket_name (str): The bucket being checked/created.
... | 5,356,224 |
def get_largest_contour(
contours: List[NDArray], min_area: int = 30
) -> Optional[NDArray]:
"""
Finds the largest contour with size greater than min_area.
Args:
contours: A list of contours found in an image.
min_area: The smallest contour to consider (in number of pixels)
Returns... | 5,356,225 |
def particle(
engine,
particle_id="",
color: Tuple4 = (1, 0.4, 0.1, 1),
random_color: bool = False,
color_temp: bool = False,
vx=None,
vy=None,
vz=None,
speed_limit=None,
) -> Material:
""" Particle material. """
mat = bpy.data.materials.new(f"Particle{particle_id}")
# F... | 5,356,226 |
def _get_hardware_sharing_throughputs(
outdirs,
device,
device_model,
precs,
filename,
mode,
):
""" The result is in the format of
{
'amp': pd.DataFrame, # df contains max_B rows
'fp32': pd.DataFrame, # df contains max_B rows
}
df format: (`B` is the index)
B {mode}:{prec}... | 5,356,227 |
def mask_to_segm(mask, bbox, segm_size, index=None):
"""Crop and resize mask.
This function requires cv2.
Args:
mask (~numpy.ndarray): See below.
bbox (~numpy.ndarray): See below.
segm_size (int): The size of segm :math:`S`.
index (~numpy.ndarray): See below. :math:`R = N` ... | 5,356,228 |
def plot_2D_vector_field(vector_field, downsampling):
"""vector_field should be a tensor of shape (2,H,W)"""
downsample2D = monai.networks.layers.factories.Pool['AVG', 2](
kernel_size=downsampling)
vf_downsampled = downsample2D(vector_field.unsqueeze(0))[0]
plt.quiver(
vf_downsampled[0, ... | 5,356,229 |
def get_image(_svg_code):
"""
Convert the SVG string to PNG.
"""
svg2png(bytestring=_svg_code, write_to='output.png') | 5,356,230 |
def append_unique(func):
"""
This decorator will append each result - regardless of type - into a
list.
"""
def inner(*args, **kwargs):
return list(
set(
_results(
args[0],
func.__name__,
*args,
... | 5,356,231 |
def _get_unique_figs(tree):
"""
Extract duplicate figures from the tree
"""
return _find_unique_figures_wrap(list(map(_get_fig_values(tree),
tree)), []) | 5,356,232 |
def read_fssp(fssp_handle):
"""Process a FSSP file and creates the classes containing its parts.
Returns:
:header: Contains the file header and its properties.
:sum_dict: Contains the summary section.
:align_dict: Contains the alignments.
"""
header = FSSPHeader()
sum_dict ... | 5,356,233 |
def LoadJSON(json_string):
"""Loads json object from string, or None.
Args:
json_string: A string to get object from.
Returns:
JSON object if the string represents a JSON object, None otherwise.
"""
try:
data = json.loads(json_string)
except ValueError:
data = None
return data | 5,356,234 |
def stopLoop() -> None:
"""
Stop the network loop.
"""
global client
global logger
if client is None or logger is None:
raise MqttClientNotInit()
logger.info('stopping network loop')
client.loop_stop() | 5,356,235 |
def _dimensions_matrix(channels, n_cols=None, top_left_attribute=None):
"""
time,x0 y0,x0 x1,x0 y1,x0
x0,y0 time,y0 x1,y0 y1,y0
x0,x1 y0,x1 time,x1 y1,x1
x0,y1 y0,y1 x1,y1 time,y1
"""
# Generate the dimensions matrix from the docstring.
ds = inspect.getdoc(_dimensions_m... | 5,356,236 |
def is_monotonic_increasing(x):
"""
Helper function to determine if a list is monotonically increasing.
"""
dx = np.diff(x)
return np.all(dx >= 0) | 5,356,237 |
def help_command(update: Update, context: CallbackContext) -> NoReturn:
"""Send a message when the command /help is issued."""
update.message.reply_text(
"""
Comandos:
/cadastro nome-de-usuario ex: /cadastro 000.000.000-00 ou fulandodetal\n
/Aditamento
/boletim
... | 5,356,238 |
def cluster_size_threshold(data, thresh=None, min_size=20, save=False):
""" Removes clusters smaller than a prespecified number in a stat-file.
Parameters
----------
data : numpy-array or str
3D Numpy-array with statistic-value or a string to a path pointing to
a nifti-file with statist... | 5,356,239 |
def dij_delay(parameter,error_rate, dT):
"""calculate the area-to-area latency
:param parameter: two-dimensional list about parameter of constellations
:param error_rate: float, probability of satellite failure
:param dT: int, accuracy of the results
"""
constellation_num = len(parameter[0])
... | 5,356,240 |
def convert_df(df):
"""Makes a Pandas DataFrame more memory-efficient through intelligent use of Pandas data types:
specifically, by storing columns with repetitive Python strings not with the object dtype for unique values
(entirely stored in memory) but as categoricals, which are represented by repeated... | 5,356,241 |
def run_add(request):
"""Add a run."""
if request.method == "POST":
form = forms.AddRunForm(request.POST, user=request.user)
run = form.save_if_valid()
if run is not None:
messages.success(
request, u"Run '{0}' added.".format(
run.name)
... | 5,356,242 |
def map_aemo_facility_status(facility_status: str) -> str:
"""
Maps an AEMO facility status to an Opennem facility status
"""
unit_status = facility_status.lower().strip()
if unit_status.startswith("in service"):
return "operating"
if unit_status.startswith("in commissioning"):
... | 5,356,243 |
def demand_monthly_ba(tfr_dfs):
"""A stub transform function."""
return tfr_dfs | 5,356,244 |
def render_page(page, title="My Page", context=None):
"""
A simple helper to render the md_page.html template with [context] vars, and
the additional contents of `page/[page].md` in the `md_page` variable.
It automagically adds the global template vars defined above, too.
It returns a string, usuall... | 5,356,245 |
def _SortableApprovalStatusValues(art, fd_list):
"""Return a list of approval statuses relevant to one UI table column."""
sortable_value_list = []
for fd in fd_list:
for av in art.approval_values:
if av.approval_id == fd.field_id:
# Order approval statuses by life cycle.
# NOT_SET == 8 ... | 5,356,246 |
def resolve_pointer(document, pointer: str):
"""
Resolve a JSON pointer ``pointer`` within the referenced ``document``.
:param document: the referent document
:param str pointer: a json pointer URI fragment to resolve within it
"""
root = document
# Do only split at single forward slashes w... | 5,356,247 |
def load_config_file(config_file):
""" Loads the given file into a list of lines
:param config_file: file name of the config file
:type config_file: str
:return: config file as a list (one item per line) as returned by open().readlines()
"""
with open(config_file, 'r') as f:
config_doc... | 5,356,248 |
def retrieve_analysis_report(accession, fields=None, file=None):
"""Retrieve analysis report from ENA
:param accession: accession id
:param fields: comma-separated list of fields to have in the report (accessible with get_returnable_fields with result=analysis)
:param file: filepath to save the content... | 5,356,249 |
def save_trajectory(file_name, trajectory):
""" Write trajectory as .csv file in the trajectories folder
Each line represents a single trajectory point with:
time from start (1 value), states (8 values), d/dt states (8 values), d^2/dt^2 states (8 values)
"""
# Create trajectory folder if non-existe... | 5,356,250 |
def _CommonChecks(input_api, output_api):
"""Checks for both upload and commit."""
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, project_name='Native Client',
excluded_paths=tuple(EXCLUDE_PROJECT_CHECKS)))
# The commit queue assumes PRESUBMIT.py is st... | 5,356,251 |
def profile_main_as_logs():
"""Main program for profiling. Profiling data logged.
"""
import cProfile
import pstats
import StringIO
prof = cProfile.Profile()
prof = prof.runctx("real_main()", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
stats.sort_st... | 5,356,252 |
def visualize_cluster_entropy(
doc2vec, eval_kmeans, om_df, data_cols, ks, cmap_name="brg"
):
"""Visualize entropy of embedding space parition. Currently only supports doc2vec embedding.
Parameters
----------
doc2vec : Doc2Vec model instance
Instance of gensim.models.doc2vec.Doc2Vec
ev... | 5,356,253 |
def delete_student_meal_plan(
person_id: str = None,
academic_term_id: str = None):
"""
Removes a meal plan from a student.
:param person_id: The numeric ID of the person.
:param academic_term_id: The numeric ID of the academic term you're interested in.
:returns: String containing ... | 5,356,254 |
def colorize(x):
"""Converts a one-channel grayscale image to a color heatmap image. """
if x.dim() == 2:
torch.unsqueeze(x, 0, out=x)
return
if x.dim() == 3:
cl = torch.zeros([3, x.size(1), x.size(2)])
cl[0] = gauss(x, .5, .6, .2) + gauss(x, 1, .8, .3)
cl[1] = gauss(... | 5,356,255 |
def local_role_density(
annotated_hypergraph, include_focus=False, absolute_values=False, as_array=False
):
"""
Calculates the density of each role within a 1-step neighbourhood
of a node, for all nodes.
Input:
annotated_hypergraph [AnnotatedHypergraph]: An annotated hypergraph.
... | 5,356,256 |
def get(url: str) -> dict:
"""
author、audioName、audios
"""
data = {}
headers = {
"Accept": "application/json, text/plain, */*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "zh-CN,zh;q=0.9",
"Connection": "keep-alive",
"Host": "www.kuwo.cn",
... | 5,356,257 |
def get_subject_mask(subject, run=1, rois=[1030,2030], path=DATADIR,
space=MRISPACE,
parcellation=PARCELLATION):
"""
Get subject mask by run and ROI key to apply to a dataset
(rois are in DATADIR/PARCELLATION.tsv)
inputs:
subject - sid00[0-9]{4}
... | 5,356,258 |
def _add_args(parser, args):
"""
Call subcommand.add_argument() based on args list.
:param parser: the parser being build
:param list args: a data structure representing the arguments to be added
"""
for name, arg in args:
parser.add_argument(name, **arg) | 5,356,259 |
def social_auth(user):
"""
Return True if specified user has logged in with local account, False if user
uses 3rd party account for sign-in.
"""
return True if user.password is not settings.SOCIAL_AUTH_USER_PASSWORD else False | 5,356,260 |
def update_post(post_id):
"""
The route used to update a post. It displays the create_post.html page with the original posts contents filled in,
and allows the user to change anything about the post. When the post has been successfully updated it redirects to
the post route.
"""
post = Post.que... | 5,356,261 |
def lrelu(x, leak=0.2, scope="lrelu"):
"""
leaky relu
if x > 0: return x
else: return leak * x
:param x: tensor
:param leak: float, leak factor alpha >= 0
:param scope: str, name of the operation
:return: tensor, leaky relu operation
"""
with tf.variable_scope(scope):
... | 5,356,262 |
def test_upload(mocker):
"""Sanity check that upload is ok."""
runner = CliRunner()
with runner.isolated_filesystem():
os.mkdir("cli_test_data")
with open("cli_test_data/test.fastq.gz", "w") as fastq_file:
fastq_file.write("AAABBB")
mocked_login = mocker.patch.object(
... | 5,356,263 |
def set_logger(debug_level="info", detail_level=2):
"""Initialises the logger.
Args:
Debug_level (str): Minimum logger level to display.
- debug
- info
Detail level (int): Level of detail for the formatter.
- 0: Only messages
- 1: Messages and tim... | 5,356,264 |
def enable_async(func: Callable) -> Callable:
"""
Overview:
Empower the function with async ability.
Arguments:
- func (:obj:`Callable`): The original function.
Returns:
- runtime_handler (:obj:`Callable`): The wrap function.
"""
@wraps(func)
def runtime_handler(task... | 5,356,265 |
def get_past_data_from_bucket_as_dataframe():
"""Read a blob"""
bucket_name = "deep_learning_model_bucket"
blob_name = "past_data.csv"
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
blob = bucket.blob(blob_name)
return_data = blob.download_as_text()
... | 5,356,266 |
def align_junction_LeftRight(viral_seq, bp_pos, ri_pos, align_to="L"):
"""If align_to="L", put all the ambiguous nucleotides in the
'body' part of the junction defined by bp_pos and ri_pos,
that is the junction point is moved as close to the 5'
end (of viral_seq) as possible. If align_to="R", do the op... | 5,356,267 |
def initialize_sqlite_tables_if_not_initialized() -> bool:
"""
Initialize the sqlite tables if they have not been
initialized yet.
Returns
-------
initialized : bool
If initialized, returns True.
"""
table_exists: bool = _table_exists(
table_name=TableName.EXP... | 5,356,268 |
def poisson2vpvs(poisson_ratio):
"""
Convert Poisson's ratio to Vp/Vs ratio.
Parameters
----------
poisson_ratio : float
Poisson's ratio.
Returns
-------
vpvs_ratio : float
Vp/Vs ratio.
"""
return sqrt(2 * (poisson_ratio - 1) / (2 * poisson_ratio - 1)) | 5,356,269 |
def run_test_problem1b():
""" Tests the problem1b function. """
# -------------------------------------------------------------------------
# TODO: 5. Implement this TEST function.
# It TESTS the problem1b function defined below.
# Include at least ** 4 ** tests. Use the usual form:
... | 5,356,270 |
def vs30_to_z1pt0_cy14(vs30, japan=False):
"""
Returns the estimate depth to the 1.0 km/s velocity layer based on Vs30
from Chiou & Youngs (2014) California model
:param numpy.ndarray vs30:
Input Vs30 values in m/s
:param bool japan:
If true returns the Japan model, otherwise the Ca... | 5,356,271 |
def empowerment(iface, priority=0):
"""
Class decorator for indicating a powerup's powerup interfaces.
The class will also be declared as implementing the interface.
@type iface: L{zope.interface.Interface}
@param iface: The powerup interface.
@type priority: int
@param priority: The prio... | 5,356,272 |
def multigauss_and_bgd_jacobian(x, *params):
"""Jacobien of the multiple Gaussian profile plus a polynomial background to data.
The degree of the polynomial background is fixed by parameters.CALIB_BGD_NPARAMS.
The order of the parameters is a first block CALIB_BGD_NPARAMS parameters (from low to high Legend... | 5,356,273 |
def test_pes_transform(Simulator, seed):
"""Test behaviour of PES when function and transform both defined."""
n = 200
# error must be with respect to transformed vector (conn.size_out)
T = np.asarray([[0.5], [-0.5]]) # transform to output
m = nengo.Network(seed=seed)
with m:
u = nengo... | 5,356,274 |
def get_empty_config():
"""
Return an empty Config object with no options set.
"""
empty_color_config = get_empty_color_config()
result = Config(
examples_dir=None,
custom_dir=None,
color_config=empty_color_config,
use_color=None,
pager_cmd=None,
edito... | 5,356,275 |
def add_user(vo, usercert):
"""Add the user identified by the given cert to the specified VO. Uses direct MySQL statements instead of voms-admin.
The CA cert that issued the user cert must already be in the database's 'ca' table - this happens automatically if
the CA cert is in /etc/grid-security/certificat... | 5,356,276 |
def parse(sql_string):
"""Given a string containing SQL, parse it and return the normalized result."""
parsed = select_stmt.parseString(sql_string)
parsed.from_clause = _normalize_from_clause(parsed.from_clause)
parsed.where_clause = _normalize_where_clause(parsed.where_clause)
return parsed | 5,356,277 |
def check_overwrite(path: str, overwrite: bool = False) -> str:
"""
Check if a path exists, if so raising a RuntimeError if overwriting is disabled.
:param path: Path
:param overwrite: Whether to overwrite
:return: Path
"""
if Path(path).is_file() and not overwrite:
raise RuntimeErr... | 5,356,278 |
def process_amendments(notice, notice_xml):
""" Process the changes to the regulation that are expressed in the notice.
"""
amends = []
notice_changes = changes.NoticeChanges()
amdpars_by_parent = []
for par in notice_xml.xpath('//AMDPAR'):
parent = par.getparent()
exists = filt... | 5,356,279 |
def bytes_to_text(input):
"""Converts given bytes (latin-1 char + padding)*length to text"""
content = struct.unpack((int(len(input)/2))*"sx", input)
return "".join([x.decode("latin-1") for x in content]).rstrip("\x00") | 5,356,280 |
def flatten_conv_capsule(inputs):
"""
:param inputs is output from a convolutional capsule layer
inputs.shape = [N,OH,OW,C,PH] C is channel number, PH is vector length
:return shape = [N,OH*OW*C,PH]
"""
inputs_shape = inputs.shape
l=[]
for i1 in range(inputs_shape[1]):
for i... | 5,356,281 |
def LookupValue(values, name, scope, kind):
"""Like LookupKind, but for constant values."""
# If the type is an enum, the value can be specified as a qualified name, in
# which case the form EnumName.ENUM_VALUE must be used. We use the presence
# of a '.' in the requested name to identify this. Otherwise, we pr... | 5,356,282 |
def SI1452(key,
Aleph=u'\u05d0', Tav=u'\u05ea'):
"""
Minimalist caps action
Make sure latin capital letters are produced in keys carrying them
(additionally, make Hebrew-letter keys go to level 2)
"""
if Aleph<=key.level_chars[1]<=Tav or u'A' <=key.level_chars[2]<=u'Z':
return... | 5,356,283 |
def _testClockwise():
"""
# get
>>> from defcon.test.testTools import getTestFontPath
>>> from defcon.objects.font import Font
>>> font = Font(getTestFontPath())
>>> contour = font['A'][0]
>>> contour.clockwise
False
>>> contour = font['A'][1]
>>> contour.clockwise
True
>... | 5,356,284 |
def integration_tests(ctx, install_deps=False, race=False, remote_docker=False):
"""
Run integration tests for cluster-agent
"""
if install_deps:
deps(ctx)
# We need docker for the kubeapiserver integration tests
tags = DEFAULT_BUILD_TAGS + ["docker"]
test_args = {
"go_buil... | 5,356,285 |
def new_topic(request):
"""添加新主题"""
if request.method != 'POST':
form = TopicForm() # 如果不是POST请求, 表示首次请求, 返回空表单
else:
# POST提交了数据, 对数据进行处理
form = TopicForm(request.POST) # 根据请求传入的数据创建一个表单对象
# is_valid()函数核实用户填写了所有必不可少的字段(表单字段默认都是必不可少的),
# 且输入的数据与要求的字段类型一致
i... | 5,356,286 |
def pkg_config(*packages, **kw):
"""Translate pkg-config data to compatible Extension parameters.
Example usage:
>>> from distutils.extension import Extension
>>> from pkgdist import pkg_config
>>>
>>> ext_kwargs = dict(
... include_dirs=['include'],
... extra_compile_args=['-s... | 5,356,287 |
async def get_xdg_data_dir(app=None):
"""Return a data directory for this app.
Create the directory if it does not exist.
"""
if app is None:
app = await get_package_name()
data_home = Path(await get_xdg_home('XDG_DATA_HOME'))
data_dir = data_home / app
if not await data_dir.exists... | 5,356,288 |
def patch_subscription(subscription, data):
""" Patches the given subscription with the data provided
"""
return stage_based_messaging_client.update_subscription(
subscription["id"], data) | 5,356,289 |
def create_related_profile(instance, created, *args, **kwargs):
"""
checks if the save causing the received signal
is the one that creates a user instance
If the save that caused the signal is an update then,
a user already exists in the database
"""
if instance and created:
instance... | 5,356,290 |
def update_game(game_obj, size, center1, center2):
"""
Update game state
"""
new_game_obj = game_obj.copy()
if center1 is not None:
new_game_obj['rudder1_pos'] = center1
if center2 is not None:
new_game_obj['rudder2_pos'] = center2
# Check if hitting corner
init_vel = n... | 5,356,291 |
def get_background_pools(experiment: Experiment) -> ThreadPoolExecutor:
"""
Create a pool for background activities. The pool is as big as the number
of declared background activities. If none are declared, returned `None`.
"""
method = experiment.get("method")
rollbacks = experiment.get("rollba... | 5,356,292 |
def download_thumb(se: requests.Session, proxy: dict, addr: str) -> str:
"""下载缩略图
Args:
se: 会话对象
proxy: 代理字典
addr: 缩略图地址
Returns:
成功时返回缩略图的本地绝对路径,失败时返回空字符串
"""
header = {'User-Agent': USER_AGENT}
try:
with se.get(addr,
headers=header,... | 5,356,293 |
def iter_dir_completions(arg):
"""Generate an iterator that iterates through directory name completions.
:param arg: The directory name fragment to match
:type arg: str
"""
return iter_file_completions(arg, True) | 5,356,294 |
def climate_eurotronic_spirit_z_fixture(client, climate_eurotronic_spirit_z_state):
"""Mock a climate radio danfoss LC-13 node."""
node = Node(client, climate_eurotronic_spirit_z_state)
client.driver.controller.nodes[node.node_id] = node
return node | 5,356,295 |
def shuffles_from_transition_counts(transition_counts, initial_state, final_state):
"""
iterate over sequences having transition count N
starting in initial_state, ending in final_state
Args:
* N - int array of transition counts
* initial_state - int initial state
* final_st... | 5,356,296 |
def cli():
"""Command line interface for Mach O Peek"""
pass | 5,356,297 |
def call_experiment(thunk, thunk_params_dict_list, args, cpu_num, **kwargs):
"""
:params_dict thunk:待启动的函数
:params_dict params_dict:批量参数名
:params kwargs: 其他的一些没考虑到的参数~用处不大,没事儿最好别写这个,容易造成混乱~
正常的函数,传入参数之后,就会直接执行。
但是通过这个神奇的lambda,就可以即把参数传进去,又不执行。返回出一个函数
再次调用的时候,只需要将返... | 5,356,298 |
def xi_eta_to_ab(ξ, η):
""" function to transform xi, eta coords to a, b
see Hesthaven function 'rstoab'
@param xi, eta vectors of xi, eta pts
"""
a, b = np.zeros_like(ξ), np.zeros_like(η)
singular = np.isclose(η, 1.0)
nonsingular = np.logical_not(singular)
a[nonsingular] = 2*(1. + ξ[non... | 5,356,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.