content stringlengths 35 762k | sha1 stringlengths 40 40 | id int64 0 3.66M |
|---|---|---|
import struct
def test_eap_proto_otp_errors(dev, apdev):
"""EAP-OTP local error cases"""
def otp_handler2(ctx, req):
logger.info("otp_handler2 - RX " + req.encode("hex"))
if 'num' not in ctx:
ctx['num'] = 0
ctx['num'] = ctx['num'] + 1
if 'id' not in ctx:
... | c32797121b695ad30f3cb3013a79c0e309d88715 | 3,654,500 |
def pivot_proportions(df, groups, responses, weights=1):
"""
Pivot data to show the breakdown of responses for each group.
Parameters:
df: a pandas DataFrame with data to be aggregated
groups: the name of the column containing the groups to partition by
respones: the name of the column th... | 7bf8cdc199fe800cb1bb280ceb2ffdb489f0d342 | 3,654,501 |
def row_stack(a1, a2):
"""
Stacks data from subsequent sweeps, while padding "empty" columns from
subsequent sweeps.
Inputs
------
a1: np.array
destination array
a2: np.array
array which is added onto the first array
Returns
-------
out: np.array
stacked d... | 4e8961351283a1702bc25349f2523c068cfb5424 | 3,654,502 |
def globalPrediction(vid, category_names, vid_probs, predicted_labels):
"""
Get a matrix of probabilities over the classes for the c3d features of
a video. Generate the top 3 predictions from the prob matrix
"""
anno_list = []
# Idea 1 : To form the hist over the categories, each bin ha... | 51676499cbf719874c49b89557d960ed8a136243 | 3,654,503 |
def GetApexServerStatus(api_key):
"""
get the status of Apex Legends servers.
:param api_key: The API key to use.
Warning
You must put either a clickable link to "https://apexlegendsstatus.com" OR have a message such as "Data from apexlegendsstatus.com" when displaying data coming from this API. You... | 362ca4e68ffbf395f56ccb6aad65cc9d13ab4545 | 3,654,504 |
def construct_mdx(cube_name, rows, columns, contexts=None, suppress=None):
""" Method to construct MDX Query from
:param cube_name: Name of the Cube
:param rows: Dictionary of Dimension Names and Selections
:param columns: Dictionary of Dimension Names and Selections (Dimension-MDX, List of Elementna... | 117d554b71fcb5c065664e51a9064b2edb504ed6 | 3,654,505 |
def mock_train_model(spark_context, testserver):
"""Pre-condition: worker.update_one is assumed to be working."""
inq = Queue()
outq = Queue()
job = get_job()
job['urls'] = [testserver.url]
db = get_fake_mongo_client().ophicleide
db.models.insert_one(job)
inq.put(job)
update_model... | eb862f8f600a6aa64cb65685f122dd577a6e51df | 3,654,506 |
def calc_number_of_children(*args):
"""
calc_number_of_children(loc, tif, dont_deref_ptr=False) -> int
Calculate max number of lines of a formatted c data, when expanded (
'PTV_EXPAND' ).
@param loc: location of the data ( ALOC_STATIC or ALOC_CUSTOM )
(C++: const argloc_t &)
@param ti... | cfc7427ec5ff4d0fc78d87d315460c62d130cd3d | 3,654,507 |
def _entity_namespace_key(entity, key):
"""Return an entry from an entity_namespace.
Raises :class:`_exc.InvalidRequestError` rather than attribute error
on not found.
"""
ns = entity.entity_namespace
try:
return getattr(ns, key)
except AttributeError as err:
util.raise_(... | ffd063523a8011a8ee2dd3700920a0523465d6cc | 3,654,508 |
def get_messages(mtype, read=False, uid=None):
""" Returns query for messages. If `read` is True it only queries for unread messages """
query = Message.select().where(Message.mtype << mtype)
query = query.where(Message.receivedby == current_user.uid if not uid else uid)
if read:
query = query.w... | 7959a0510d8f6794ff40d8467d09b0833279be10 | 3,654,509 |
import numpy
def coords_to_indices(coords, top, left, csx, csy, shape, preserve_out_of_bounds=False):
"""
Convert coordinates to array indices using the given specs.
Coordinates outside of the shape are not returned.
:param coords: Tuple of coordinates in the form ([x...], [y...])
:param top: T... | 89b99ffc159c56855792d0daeb8bdb5a5d04ad9f | 3,654,510 |
def sanitize_vcf_file(vcf_file, out_file, snp_log_file, sample_log_file, logging, min_count=1, max_missing=0.25,
max_alt_states=4, disruptive_threshold=1,window_size=30,max_snps=2):
"""
Filter a user provided vcf and write a filtered vcf file
Parameters
----------
vcf_file [str... | b5d96e9224b5eddad1dff8dcf2caf558522376bc | 3,654,511 |
from typing import Optional
from typing import Any
def geq(column: str, value: Optional[Any]) -> str:
"""
>>> geq("col", None)
'1'
>>> geq("col", 1)
'col >= 1'
>>> geq("col", "1")
"col >= '1'"
"""
if not value:
return "1"
if isinstance(value, str):
return f"{col... | 9216b8e2480232840ad37d8fe0e5c0f07b88873f | 3,654,512 |
from keras.layers import Conv2D, Dense
from palmnet.layers import Conv2DCustom
from palmnet.layers.sparse_facto_sparse_tensor_deprecated import SparseFactorisationDense
def count_model_param_and_flops(model, dct_layer_sparse_facto_op=None):
"""
Return the number of params and the number of flops of 2DConvolut... | 142b04ad327f662d315d7c92322df8aef2ae9871 | 3,654,513 |
def longest_match(list1, list2):
"""
Find the length of the longest substring match between list1 and list2.
>>> longest_match([], [])
0
>>> longest_match('test', 'test')
4
>>> longest_match('test', 'toast')
2
>>> longest_match('supercalifragilisticexpialidocious', 'mystical califor... | 4a84dacbb0d59fc7f9c4b59e87e55c72416b8c80 | 3,654,514 |
def deserialize_config(data, **kwargs):
"""Create instance of a JobConfiguration from a dict.
Parameters
----------
data : dict
Dictionary loaded from a serialized config file.
Returns
-------
JobConfiguration
"""
registry = Registry()
config_module = data["configurati... | eff887d4e676935742b8169c62a9a581b5f239ce | 3,654,515 |
import numpy
def pmat06(date1, date2):
"""
Wrapper for ERFA function ``eraPmat06``.
Parameters
----------
date1 : double array
date2 : double array
Returns
-------
rbp : double array
Notes
-----
The ERFA documentation is below.
- - - - - - - - - -
e r a P m... | 69b38637701d804ca83733d7f55fca1fd57a5b72 | 3,654,516 |
def _splitData(data):
"""Takes either a cursor or result set and returns result set and list of columns."""
if hasattr(data, 'fetchall'):
rows = data.fetchall()
cols = data.columns()
elif isinstance(data, list):
rows = data
if hasattr(rows[0], '_fields'):
cols = r... | 9953be08f29fb457782e5401c3dfded8f780924b | 3,654,517 |
import multiprocessing
def get_cpu_count():
"""
Try and estimate the number of CPU on the host. First using multiprocessing
native function, other using content of /proc/cpuinfo. If none of those
methods did work, 4 is returned.
"""
try:
cpucount = multiprocessing.cpu_count()
excep... | db58112537c4a111ec1ef24eeab70227678d6d1e | 3,654,518 |
def get_relation_data(collection, relation_paths):
"""Prepare relations for usage inside extend_relations."""
out = []
for path in relation_paths:
promote = path.get("promote", False)
numpy_path = []
for step in path["steps"]:
if isinstance(step, str):
ste... | 8b4cd9145995aee5e3c9b880073dfd10320b24e5 | 3,654,519 |
def generate_paddle_quads():
"""
This function builds a matrix of paddles, each row in the matrix
represents the paddle skin (four colors) and each column represents
the size.
"""
paddle_base_width = 32
paddle_height = 16
x = 0
y = paddle_height * 4
spritesheet = []
for _ ... | e82259d5e203257574c5ae91ad4a5c3a625e5b5a | 3,654,520 |
def cut(img):
"""
Applies central horizontal threshold in Fourier spectrum
"""
# Apply fourier transform and shift
img_fft = fftn(img)
img_fft_shift = fftshift(img_fft)
# Print spectrum before
plt.imshow(np.abs(img_fft_shift), cmap='gray', norm=LogNorm(vmin=5))
plt.show()
# Fi... | 74ce6db709aaa91fec2321dc6cc70fc6d5a8c552 | 3,654,521 |
def csrmm2(m, n, k, descrA, csrValA, csrRowPtrA, csrColIndA, B, handle=None,
C=None, nnz=None, transA=CUSPARSE_OPERATION_NON_TRANSPOSE,
transB=CUSPARSE_OPERATION_NON_TRANSPOSE, alpha=1.0, beta=0.0,
ldb=None, ldc=None, check_inputs=True):
""" multiply two sparse matrices: C = transA... | fffbecab90dfb831a4429aed759c0218b065aa4d | 3,654,522 |
def is_translated(path):
""" Checks if all files in the translation has at least one translation.
Arguments:
path (str): path to po-file
Returns: True if all files in translation has at least one translation,
otherwise False.
"""
po = polib.pofile(path)
files = []
for ... | eeacbbc8ff068684e56d79e1aaa65d564b2e33ec | 3,654,523 |
def pylm_component(name):
"""Decorator for registering a class to lightmetrica"""
def pylm_component_(object):
# Get base class
base = object.__bases__[0]
base.reg(object, name)
return object
return pylm_component_ | 531c7e3f224b824b438011d4be348a76154b3444 | 3,654,524 |
import torch
def dice_score(input_mask, target_mask, eps=1e-5):
"""
input mask: (B * K, HW) #probabilities [0, 1]
target_mask: (B * K, HW) #binary
"""
dims = tuple(range(1, input_mask.ndimension()))
intersections = torch.sum(input_mask * target_mask, dims) #(B, N)
cardinalities = torch.su... | 8fbe4b7aaec4a45d7dec4705e4c3feb348250b64 | 3,654,525 |
def append_write(filename="", text=""):
"""
appends a string at the end of a text file (UTF8)
and returns the number of characters added
"""
with open(filename, "a", encoding="utf-8") as f:
f.write(text)
return len(text) | 6767f61b6624b82d732e7277507d03c3f4daf04a | 3,654,526 |
import torch
def psnr(img1, img2):
"""
compute PSNR between two images
"""
MSE = torch.mean((img1-img2)**2)
return 10*torch.log10(1**2/MSE) | f216733631d224aa27f5c5a395c143c3768f8f28 | 3,654,527 |
def is_scalar(dims):
"""
Returns True if a dims specification is effectively
a scalar (has dimension 1).
"""
return np.prod(flatten(dims)) == 1 | d2f2f1a1f2dd66ec01d9e653315d37b4ee4990e1 | 3,654,528 |
def applyMinv(obj, inputs, shape_cache):
"""Simple wrapper around a component's applyMinv where we can reshape the
arrays for each input and expand any needed array elements into full arrays.
"""
inputkeys = sorted(inputs.keys())
for key in inputkeys:
pre_process_dicts(obj, key, inputs, sha... | 9fd805408bea659f26eec93b430e450ea9228145 | 3,654,529 |
import os
def get_para_input(arg):
"""Get input directory parameter"""
input_dir = os.path.abspath(arg)
if str(input_dir).endswith('/'):
input_dir = input_dir[:-1]
input_dir = input_dir.replace('\\', '/')
return input_dir | 13ad4d14cac7c4b77e40d0e264e5197b2fbb459b | 3,654,530 |
import json
import requests
import time
def get_county_data():
"""Get the raw data from coronavirus-tracker-api.herokuapp.com."""
url = ('https://coronavirus-tracker-api.herokuapp.com/v2/locations?source=csbs')
raw_data = None
while raw_data is None:
try:
raw_data = json.loads(requests.request('GET'... | 33404a65e6242b7416304f7194dc2a5c7f073d5d | 3,654,531 |
def r2lm(measured_y, estimated_y):
"""
r^2 based on the latest measured y-values (r2lm)
Calculate r^2 based on the latest measured y-values. Measured_y and estimated_y must be vectors.
Parameters
----------
measured_y: numpy.array or pandas.DataFrame
estimated_y: numpy.array or pandas.Data... | f75c89ca3f99659a3e2e12555a3968745fad1007 | 3,654,532 |
def G_to_NX_sparse(X, Y):
"""convert sparse adj matrix to NetworkX Graph"""
Gs = []
N = len(Y)
for n in range(N):
x = X[n]
G = nx.DiGraph()
for i,j,w in x:
G.add_edge(i,j, weight=w)
Gs.append(G)
return Gs, Y | 8113ede05a0015119cceaa9c817b8bf3d46003c0 | 3,654,533 |
def pmf(k, n, a, b, loc=0):
"""JAX implementation of scipy.stats.betabinom.pmf."""
return lax.exp(logpmf(k, n, a, b, loc)) | efba7202231dde7d0dec1e56df7a52dccf7135a0 | 3,654,534 |
def discrete_bottleneck(x,
hidden_size,
z_size,
filter_size,
name,
mode=None,
startup_steps=50000,
bottleneck_kind='dvq',
num_bl... | ec1576b2b6a19a03995ec6dfb9a67592b925a28c | 3,654,535 |
def binarize_categorical(x, ids):
""" replace categorical feature with multiple binary ones """
x_ = np.zeros((x.shape[0], 1))
for idx in ids:
x_ = np.hstack((x_, binarize_categorical_feature(x[:, idx:idx+1])))
x = np.delete(x, ids, axis=1)
x = np.hstack((x, x_[:, 1:]))
return x | 625b551b437297c6a0c48f5ebfe2796c3be84c89 | 3,654,536 |
def import_json_dataset(fileset):
"""Returns a list of imported raw JSON data for every file in the fileset.
"""
d = []
for f in fileset:
d.append(import_json_data(f))
return d | 043720f9400cf2734598f6fe476077e004b8ef69 | 3,654,537 |
import math
def angle_difference(angle1, angle2):
"""
Calculates the difference between the given angles in clockwise direction as radians.
:param angle1: float
:param angle2: float
:return: float; between 0 and 2*Pi
"""
if (angle1 > 0 and angle2 >= 0) and angle1 > angle2:
return ... | 377d1915e58a96b7f1526dceb31febf45c90567b | 3,654,538 |
def merge_nd(nd_cdp, nd_lldp):
""" Merge CDP and LLDP data into one structure """
neis = dict()
nd = list()
for n in nd_lldp:
neis[(n['local_device_id'], n['remote_device_id'], n['local_int'], n['remote_int'])] = n
for n in nd_cdp:
# Always prefer CDP, but grab description from L... | 90d55ffdabb6c28198ee4c59bc36fdcb6fa54e62 | 3,654,539 |
def combine_divisions(division):
"""Return the new pattern after the rules have been applied to every division"""
size = int(sqrt(len(division)))
matrix = []
for r in xrange(size):
matrix.append([])
for c in xrange(r * size, (r + 1) * size):
matrix[len(matrix) - 1].append(di... | a112449421603a227e4ee470330aa1a1ece47762 | 3,654,540 |
def is_repair(expr):
"""
判断赋值表达式是否出现过滤函数,如果已经过滤,停止污点回溯,判定漏洞已修复
:param expr: 赋值表达式
:return:
"""
is_re = False # 是否修复,默认值是未修复
global is_repair_functions
if expr in is_repair_functions:
logger.debug("[AST] function {} in is_repair_functions, The vulnerability does not exist ".form... | 4184cbedaa006b75d5f6171a5201f218f852820c | 3,654,541 |
import string
def modified_greedy(sentences,
tokenized,
model,
stopwords,
original_indices,
sent_representations,
objective_function,
min_sentence_length):
"""Implementation ... | b542c025fe870e1e7d41d33349de10a395a17eb3 | 3,654,542 |
def noiseless(rho, unitary):
"""Returns the noiseless predictions."""
rhotilde = unitary @ rho @ unitary.conj().T
elt = rhotilde[0, 0]
if elt >= 0.49999999:
return 0, elt
return 1, elt | bfa265046361b159e7d264aa8312b75cd7a0df3f | 3,654,543 |
def __get_service_info_from_thrift(root_path, idl_service, need_test_methods):
"""从指定IDL_Service和request_config配置表中,获取测试方法和Request的映射表"""
customized_request_config = yaml.load(
open(os.path.join(root_path, 'test_red', 'request_config.yaml')))
method_request = collections.OrderedDict()
idl_meth... | 0b736bb6b5411904bc28f887e6596c1242c324c9 | 3,654,544 |
def energy_calc(p, t):
"""
Calculates energy from power and time using the formula:
energy = power * time
Parameters
----------
p: Int or float
The power value of the equation.
t: Int or float
The time value of the equation (seconds).
Returns
-------
Int
... | 7df3180fdb56989e62a69305763455edbfa44ebc | 3,654,545 |
import base64
import uuid
import os
def copy_data_to_device(device, data, destination, filename=None):
""" Copies data into a device and creates a file to store that data.
Args:
data ('str'): The data to be copied
destination ('str'): Folder of where to store file
filen... | 73fad19637363a31c19e55c59e42479f2b9b0c84 | 3,654,546 |
import logging
def api_images_list_json(version):
"""
Return Docker Image listing https://docs.docker.com/engine/api/v1.41/#tag/Image
:param version: Docker API version
:return: string of fake images associated with honeypot.
"""
logging.info("images-list - %s, %s, %s, %s, %s" % (
ver... | 083911840c02ddc79af5ed457c42a29a19f1c57f | 3,654,547 |
def _handle_eval_return(self, result, col, as_pyranges, subset):
"""Handle return from eval.
If col is set, add/update cols. If subset is True, use return series to subset PyRanges.
Otherwise return PyRanges or dict of data."""
if as_pyranges:
if not result:
return pr.PyRanges()
... | 84698bcb3b1f1e961ac7f3c4e347d65ce0790066 | 3,654,548 |
def compute_sigma0_sparse(V, dX, W_sensors, W_points, W_observations, column_dict):
"""
Computes the resulting standard deviation of the residuals for the current state of the bundle network.
Parameters
----------
V : ndarray
An array of residuals of the difference between registered meas... | 05606efe21d61f67539eae627caea976a532f85f | 3,654,549 |
from typing import Any
from sys import version
def version_callback() -> Any:
"""Print the version of the package."""
print(f"version: {version}")
return version | 987643727d133dc09163cebd6c4293f78b0b7f6a | 3,654,550 |
def fill(bitdef, value):
"""
Fill undefined bits with a value.
For example ``1..0100.1`` becomes ``111010011`` when filled with 1s.
Args:
bitdef (str): The bitdef to fill.
value (str): The value to fill with, "0" or "1".
Returns:
str: The filled bitdef.
"""
output ... | eef3ac59a2a7c4d1a25851a2ca14b3ffed6d1463 | 3,654,551 |
import requests
import json
def get_cman_info(state):
"""
Will take a list of congressmen and return the relevant attributes
:param congress_list: list of divs that contain congress data
:param state: state you are scraping
:return: list of relevant scraped attributes
"""
cman_attrs = []
... | afe180c4bbd930cfbfe42e28a769d07f2c4378cd | 3,654,552 |
def concatenate_data(data, field='normalized_data'):
"""
Concatenate trial data in a list of dictionaries
:param data: nested dict, contains all trial infos
:param field: str, dict key in info dict in general data structure
:return:
"""
time_series = np.concatenate([info[field] for info in d... | 7f3dfb7aed2ffedf2124a9f57df0abf8491d1af6 | 3,654,553 |
def _find_weight_ops(op, graph, weights):
""" Find the vars come from operators with weight.
"""
pre_ops = graph.pre_ops(op)
for pre_op in pre_ops:
### if depthwise conv is one of elementwise's input,
### add it into this same search space
if _is_depthwise(pre_op):
f... | 04e4a21079a3857815e39be3fe00e15aeac2f3b3 | 3,654,554 |
def get_GUI_presets_dict():
"""Return a dictionary of all of the available potential functions."""
preset_dict = {'cosine_potential': np.array([3.14, -6.28, 12.57, 0.01, 0,
0, 0, 0]).astype(str),
'two_gaussian_potential': np.array([2.67, -4, 4,... | 0034ecdbde2f27e1b8db25a82231fca9bc79485c | 3,654,555 |
def _escapeEnds(original):
"""Comment, function end.
Escape comment end, because non-greedy becomes greedy in context. Example:
blockCommentNonGreedy = '(\s*/\*[\s\S]+?\*/\s*){0,1}?'
"""
original = _escapeWildCard(original)
commentEscaped = original \
.replace(commentEndEscape, commentE... | 5a0df98f42d2df2b424cd6bfa7c533e0016557fe | 3,654,556 |
def handle_bad_request(error: BadRequest) -> Response:
"""Render the base 400 error page."""
rendered = render_template("base/400.html", error=error,
pagetitle="400 Bad Request")
response: Response = make_response(rendered)
response.status_code = status.BAD_REQUEST
ret... | 70c6c835ef31839ff7b637443c414abbb549bcb0 | 3,654,557 |
import torch
def top_k_top_p_filtering(
logits: torch.FloatTensor,
top_k: int = 0,
top_p: float = 1.0,
filter_value: float = -float("Inf"),
min_tokens_to_keep: int = 1,
) -> torch.FloatTensor:
"""
Filter a distribution of logits using top-k and/or nucleus (top-p) filtering
Args:
... | 0c2f8392dcc6ada2afb1dc33575465e194a52199 | 3,654,558 |
def parseFimo(fimoFile, strand):
""" parse the fimo.txt file
Args:
the fimo.txt file
strand = single or double
Returns:
fimoDict: a dict between motif ID and a list of sequences it occurs in
"""
#dict to store for each motif list of seqs that it occurs in
fimoDict = {}
#read the fimo.txt file
with open... | ea6e0765c474e367653571e9a88e6449fc947ff5 | 3,654,559 |
def pad_batch_dimension_for_multiple_chains(
observed_time_series, model, chain_batch_shape):
""""Expand the observed time series with extra batch dimension(s)."""
# Running with multiple chains introduces an extra batch dimension. In
# general we also need to pad the observed time series with a matching batc... | ec072f3fa5318ee3f4c82dcc0d3697a5160b257f | 3,654,560 |
from typing import Union
import re
def get_bytes(size: Union[str, int]) -> int:
"""Converts string representation of bytes to a number of bytes.
If an integer is passed, it is returned as is (no conversion).
Args:
size (Union[str, int]): A string or integer representation of bytes to be converte... | 76cd67a0d581b79105a79bc84d66126d3201b07a | 3,654,561 |
def port_translation_func(req: AdvancedDataTypeRequest) -> AdvancedDataTypeResponse:
"""
Convert a passed in AdvancedDataTypeRequest to a AdvancedDataTypeResponse
"""
resp: AdvancedDataTypeResponse = {
"values": [],
"error_message": "",
"display_value": "",
"valid_filter_... | b8c41d8c3d3c2fa0a9e67b8ef9ff93422921e7e3 | 3,654,562 |
import subprocess
def stop(cli):
"""Wrapper function for the relevant RPC function call.
Args:
cli (str): Full path to cli binary associated with coin.
Returns:
String: String containing the command output.
"""
command = DAEMON_STOP_COMMAND.format(cli)
ret... | 4b39c08cab60017b22ceecb9d97b626e890731d0 | 3,654,563 |
import random
def get_two_diff_order_index(start=0, stop=1, order=True, diff=True):
"""
Returns two integers from a range, they can be:
put in order (default) or unordered
always different(default) or can be repeated
start - integer (default = 0)
stop - integer (default= 1)
order -... | 7bd0e17efb969ea59e7a30d8fdaae55d901a718e | 3,654,564 |
import os
def setup(args):
"""
Create configs and perform basic setups.
"""
cfg = config.get_cfg()
if args.gpus is not None:
gpus = args.gpus
else:
gpus = []
gpus_str = ""
for g in gpus:
gpus_str += str(g) + ","
gpus_str = gpus_str[:-1]
os.environ['CUDA... | de22f24da45bc87d22fa0dd0937b11647996c50b | 3,654,565 |
import math
def GriewankRosenbrock(arr: np.ndarray, seed: int = 0) -> float:
"""Implementation for BBOB GriewankRosenbrock function."""
dim = len(arr)
r_x = np.matmul(_R(dim, seed, b"R"), arr)
# Slightly off BBOB documentation in order to center optima at origin.
# Should be: max(1.0, (dim**0.5) / 8.0) * r_... | 9a9ca4f043e60fb971c5212de33379c29aaade58 | 3,654,566 |
def listCurrentAuctionsByKeyword(username, keyword):
"""Listar os leilões que estão a decorrer"""
try:
valid = utils.validateTypes([keyword], [str])
if not valid:
return jsonify({'erro': 404})
auctions = db.listAuctions(keyword)
if auctions == "noResults":
... | c02c58a294b3d65821f36872dcf23e4f7abff49b | 3,654,567 |
import os
def _get_relative_maddir(maddir, port):
""" Return a relative path version of maddir
GPDB and HAWQ installations have a symlink outside of GPHOME that
links to the current GPHOME. After a DB upgrade, this symlink is updated to
the new GPHOME.
'maddir_lib', which uses the absolute path ... | 7ad76b8d44f68ebd61813a851672b4f4aa18b77d | 3,654,568 |
from typing import Dict
def hash_dict(data: Dict) -> int:
"""
Hashes a Dictionary recursively.
List values are converted to Tuples.
WARNING: Hashing nested dictionaries is expensive.
"""
cleaned_dict: Dict = {}
def _clean_dict(data: Dict) -> Dict:
d: Dict = {}
for k, v in ... | 42b579151c90a42fadf2b53751978eec421ea03c | 3,654,569 |
import logging
import os
import time
import configparser
def sele():
"""身份验证的JSID获取.
Return:
若获取成功,则返回JSID字符串,
若获取失败,则返回空字符串""
"""
logger = logging.getLogger("sele.py")
logger.info("Start sele")
try:
# phantomjs请求头设置
dcap = dict(DesiredCapabilities.PHANTOMJS)
... | 71f89946b5811458450de54ef70354644a135866 | 3,654,570 |
def instrument_packages_ip_template(instrument, ip_version, template_name=None):
"""
Retrieves the specified instrument package template metadata
:param instrument: instrument used to make observation
:type instrument: str
:param ip_version: ip version description here
:type ip_version: float
... | 46d3cd57e05a64c03411c31d2b18ca47f670036d | 3,654,571 |
from typing import Literal
def add_feature_metadata(id, description, type):
"""Generate RDF metadata for a feature
:param id: if used to identify the feature
:param description: feature description
:param type: feature type
:return: rdflib graph after loading the feature
"""
g = Graph()
... | 0d4987807b3ed97baa50f8b14c588ef162b5c8ac | 3,654,572 |
import copy
def sink(input_flow_direction_raster):
"""
Creates a raster layer identifying all sinks or areas of internal drainage.
The value type for the Sink function output raster layer is floating point.
For more information, see
https://pro.arcgis.com/en/pro-app/help/data/imagery/sink-funct... | 6d1b22dacd48a0939b7822d62a4867b2b7574c42 | 3,654,573 |
def bad_multi_examples_per_input_estimator_out_of_range_input_refs(
export_path, eval_export_path):
"""Like the above (good) estimator, but the input_refs is out of range."""
estimator = tf.estimator.Estimator(model_fn=_model_fn)
estimator.train(input_fn=_train_input_fn, steps=1)
return util.export_model_a... | 539ec039451c53db72cb676881f48fbe45874dfa | 3,654,574 |
def vector_to_diagonal(v):
"""Converts a vector to a diagonal matrix with vector elements
as the diagonal elements of the matrix"""
diag_matrix = [[0 for i in range(len(v))] for j in range(len(v))]
for i in range(len(v)):
diag_matrix[i][i] = v[i]
return diag_matrix | 6cbaf54a083633a47af92acc7f69421ed68a1c0b | 3,654,575 |
from typing import Union
from pathlib import Path
from typing import List
def _get_filenames(path: Union[str, Path], media_type: MediaType) -> List[str]:
"""
Get filenames from a directory or a path to a file.
:param path: Path to the file or to the location that contains files.
:param media_type: Typ... | 953bcfce17c6db45772a8eac8890fa161c128322 | 3,654,576 |
from venusian import attach
def method(method_class):
"""Decorator to use to mark an API method.
When invoking L{Registry.scan} the classes marked with this decorator
will be added to the registry.
@param method_class: The L{Method} class to register.
"""
def callback(scanner, name, method_... | 4e40d265a4a5767686f0e37b4d1adf681ce36722 | 3,654,577 |
def generic_validator(check, error_message):
"""
Validator factory
>>> v = generic_validator(is_int, "invalid int")
>>> v(6)
6
>>> v("g")
Traceback (most recent call last):
...
ValidationError: [u'invalid int']
"""
# Validator closure
def inner_validator(value, *args, **... | 21134ecee1d8c23b10e94181c0c1aa602ce4b76e | 3,654,578 |
def get_molec_shape(mol, conf, confId, vdwScale=1.0,
boxMargin=2.0, spacing=0.2):
"""
Get the shape of a conformer of a molecule as a grid
representation.
"""
box = Chem.ComputeConfBox(conf)
sideLen = (box[1].x-box[0].x + 2*boxMargin,
box[1].y-box[0].y + 2*box... | 6a7b404224a116a52d70f7ab14d4301215c1700f | 3,654,579 |
import math
def autoencoder(dimensions=[784, 512, 256, 64]):
"""Build a deep denoising autoencoder w/ tied weights.
Parameters
----------
dimensions : list, optional
The number of neurons for each layer of the autoencoder.
Returns
-------
x : Tensor
Input placeholder to t... | d9cc8b6f2c8e7df0bc4fb580e1de20dc57f93c7a | 3,654,580 |
def _asymptotic_expansion_of_normalized_black_call(h, t):
"""
Asymptotic expansion of
b = Φ(h+t)·exp(x/2) - Φ(h-t)·exp(-x/2)
with
h = x/s and t = s/2
which makes
b = Φ(h+t)·exp(h·t) - Φ(h-t)·exp(-h·t)
exp(-(h²+t²)/2)
... | 9985b36e7f0dec1877d275a23ae747d9a57c1163 | 3,654,581 |
def date_read(date_string, *, convert_to_current_timezone: bool = False):
"""Read the given date (if possible)."""
return date_parse(date_string, convert_to_current_timezone=convert_to_current_timezone) | 96f21f7fcae995a9a17f6008c8e5a4161ed971f2 | 3,654,582 |
import urllib
def encode_name(name):
"""
Encode a unicode as utf-8 and then url encode that
string. Use for entity titles in URLs.
"""
return urllib.quote(name.encode('utf-8'), safe='') | 6e9d34516613ecdf0ce94fb9cfc594de7e76b72f | 3,654,583 |
def cmp_str(element1, element2):
"""
compare number in str format correctley
"""
try:
return cmp(int(element1), int(element2))
except ValueError:
return cmp(element1, element2) | 7c8df75bc1b1ad3997db4a4d6f1b58a37c4e1dd7 | 3,654,584 |
def parse_page_file(page_raw: str, type: str, file_name: str) -> Page:
"""
FIXME: add documentation
"""
page_id = extract_page_id(file_name)
title, fields = parse_md(page_raw)
return Page(
id=page_id,
type=type,
title=title,
fields=fields,
) | bf53026374c1720cf2cdcf785e256ce3374226ce | 3,654,585 |
from sys import path
import joblib
def fetch(name):
"""
Fetches an appropriate model to perform the prediction.
:param name: model's name
:return: a trained model
"""
K.clear_session()
try:
full_weights_path = path.join(path_prefix, *load_weights()[name])
if name == 'svm'... | fcb82ec61b984e07ece0822c0bcf78dff451eafa | 3,654,586 |
def post(text, appid=2, touser=None, toparty=None):
"""
party
"""
#print '=========',type(text)
if type(text) is unicode:
text = text.encode('utf8')
if not touser:
touser = []
if not toparty:
toparty = ['2']
url = 'https://qyapi.weixin.qq.com/cgi-bin/message/send?... | b2a92a274007b0502431a856457a244c12b925a9 | 3,654,587 |
import six
import codecs
def hex_encrypt(msg):
"""Hex encrypts a message.
:param bytes msg: string message to be encrypted.
:return: string for encrypted version of msg in hex.
:rtype: bytes
"""
if not cipher:
return msg
if not isinstance(msg, six.binary_type):
raise Value... | c2d913d181b8ceb33b3e7d99fc5f21b025da58ea | 3,654,588 |
import requests
def http_request(source_id, endpoint_id, args, kwargs, # pylint: disable=too-many-arguments
service_addr, auth=None):
"""Call http endpoint"""
headers = {"content-type": "application/json"}
if auth is not None:
headers["Authorization"] = basic_auth_header(auth)
... | e259508e78aaa7bf9c663a538a2c6b6471938f5e | 3,654,589 |
import regex
async def filter_by_game_stats(opsdroid, string, room, action):
"""Match incoming messages against the current games stats."""
if room not in STAT_REGEXES.keys():
gamestats = await get_stat_names(opsdroid, room)
if not gamestats:
return []
STAT_REGEXES[room] = ... | 4971e5567c8a1b89aa47fdaab2e42e51620f475b | 3,654,590 |
def password_provider():
"""
Provides the full password check
"""
return [(n,) for n in range(5)] | afdb188844e4b0979528b290477130313679e4df | 3,654,591 |
def make_combiparameter(*args, **kwargs):
"""
Make a combined qcodes parameter.
Args:
*args : list of gates or parameters
(e.g. make_combiparameter("A1", "A3", station.gates.B1 ))
"""
station = qc.Station.default
parameters = []
for i in args:
if type(i) == str:
... | 6482187dc463c67e322a281181ba827eb39eb28d | 3,654,592 |
def get_delta_fmt(delta):
"""arbitrary colour formatting of rank delta
more red for bigger losses, more green for bigger gains
"""
col = (0, 0, 0, 255)
n = abs(delta)
s = delta
if delta < 0:
sat = min(n/200 + 0.2, 1)
r, g, b = hsv_to_rgb(0, sat, 1)
col = (r, g, b, 1)
... | a7860df4f19632c9623c39c38ac70a76f405ae56 | 3,654,593 |
import sys
def calculate_wtv(sample_values, epoch_time_interval=WTV_EPOCH_TIME, relative_to_time=None):
"""
Calculate the Wear-Time Validation (30-minute epochs) for a given sample ndarray [[time_seconds, accel_x, accel_y, accel_z]].
Based on the method by van Hees et al in PLos ONE 2011 6(7),
"Es... | c829825e9875d57cbce94a704ad162349a2143c7 | 3,654,594 |
import inner_imports
from re import X
def function(default=None):
"""Docstring comes first.
Possibly many lines.
"""
# FIXME: Some comment about why this function is crap but still in production.
if inner_imports.are_evil():
# Explains why we have this if.
# In great detail indee... | 180e412bdeb275a3d7ca56fd0588a565b64778fd | 3,654,595 |
def fit_pk_parms_1d(p0, x, f, pktype='pvoigt'):
"""
Performs least squares fit to find parameters for 1d analytic functions fit
to diffraction data
Required Arguments:
p0 -- (m) ndarray containing initial guesses for parameters
for the input peaktype
x -- (n) ndarray of coordinate... | 52dbff47fd8ad6f7727b0241bba48d2b10393a18 | 3,654,596 |
from meerschaum.utils.debug import dprint
from typing import Union
def is_pipe_registered(
pipe : Union['meerschaum.Pipe', 'meerschaum.Pipe.MetaPipe'],
pipes : dict,
debug : bool = False
):
"""
Check if a Pipe or MetaPipe is inside the pipes dictionary.
"""
ck, mk, lk = pip... | b3630de8316858afe9272698593a7dec7f984762 | 3,654,597 |
import jinja2
def truncate(s, length=255, killwords=True, end='...'):
"""
Wrapper for jinja's truncate that checks if the object has a
__truncate__ attribute first.
Altering the jinja2 default of killwords=False because of
https://bugzilla.mozilla.org/show_bug.cgi?id=624642, which could occur
... | 70c154fbfa344bd24f685f5209e2121d8aac0057 | 3,654,598 |
def tract_segmentation_single_example_lap (kdt_T_A, prototypes_T_A,sid, num_NN,T_A ):
""" step 1: tract segmentation from a single example using Jonker-Volgenant algorithm (LAPJV)
"""
E_t_filename= 'data/example/'+ str(sid) +'_'+str(tract_name)+'.trk'
print("Loa... | cc14e598f359fc9b92995bdc3a6a98192333b800 | 3,654,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.