content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def clean_data(df):
"""
Clean Data :
1. Clean and Transform Category Columns from categories csv
2.Drop Duplicates
3.Remove any missing values
Args:
INPUT - df - merged Dataframe from load_data function
OUTPUT - Returns df - cleaned Dataframe
"""
# Split categories into... | 5,355,300 |
def test_determine_cum_stored_energy_series_simple_up_down():
"""
/\
:return:
"""
gamma = np.array([0., 1., 0.5])
tau = np.array([0., 1., 0])
expected_delta_e = 0.75 # two triangles (1x1x0.5 + 1x0.5x0.5)
et = ShearTest(tau, gamma)
energy = assess.calc_case_et(et)
assert energy[-... | 5,355,301 |
def commitFile(file: str = None, message: str = None, debug: bool = False) -> bool:
"""Commit a file when it is changed.
:param file: The name of the file we want to commit.
:type file: str
:param message: The commit message we want to use.
:type message: str
:param debug: If we want debug logg... | 5,355,302 |
def all_pairs_shortest_path_length(G,cutoff=None):
""" Compute the shortest path lengths between all nodes in G.
Parameters
----------
G : NetworkX graph
cutoff : integer, optional
depth to stop the search. Only paths of length <= cutoff are returned.
Returns
-------
lengths :... | 5,355,303 |
def get_cols_to_keep(gctoo, cid=None, col_bool=None, cidx=None, exclude_cid=None):
""" Figure out based on the possible columns inputs which columns to keep.
Args:
gctoo (GCToo object):
cid (list of strings):
col_bool (boolean array):
cidx (list of integers):
exclude_cid... | 5,355,304 |
def check_zenity():
""" Check if zenity is installed """
warning = '''zenity was not found in your $PATH
Installation is recommended because zenity is used to
indicate that protonfixes is doing work while waiting
for a game to launch. To install zenity use your system's
package manager.
'''... | 5,355,305 |
def get_auth_token():
"""
Return the zerotier auth token for accessing its API.
"""
with open("/var/snap/zerotier-one/common/authtoken.secret", "r") as source:
return source.read().strip() | 5,355,306 |
def sph_harm_transform(f, mode='DH', harmonics=None):
""" Project spherical function into the spherical harmonics basis. """
assert f.shape[0] == f.shape[1]
if isinstance(f, tf.Tensor):
sumfun = tf.reduce_sum
def conjfun(x): return tf.conj(x)
n = f.shape[0].value
else:
... | 5,355,307 |
def create_model(experiment_settings:ExperimentSettings) -> OuterModel:
"""
function creates an OuterModel with provided settings.
Args:
inner_settings: an instannce of InnerModelSettings
outer_settings: an instannce of OuterModelSettings
"""
model = OuterModel(experiment_settings.ou... | 5,355,308 |
def LoadTrainingTime(stateNum):
"""
Load the number of seconds spent training
"""
filename = 'time_' + str(stateNum) + '.pth'
try:
timeVals = pickle.load( open(GetModelPath() + filename, "rb"))
return timeVals["trainingTime"]
except:
print("ERROR: Failed to load traini... | 5,355,309 |
def adjust_learning_rate(optimizer, epoch):
"""Sets the learning rate to the initial LR decayed by 10 every 15 epochs"""
lr = args.lr * (0.1 ** (epoch // args.lr_epochs))
print('Learning rate:', lr)
for param_group in optimizer.param_groups:
if args.retrain and ('mask' in param_group['key']): # ... | 5,355,310 |
def check_one_file(test_info, ref_file, output_file, eq_args):
"""Check a single output file produced by a test run against a reference file
Parameters
----------
test_info : dict
Dictionary containing miscellaneous test information
ref_file : str
Name of reference file, e... | 5,355,311 |
def disk_status(hardware, disk, dgtype):
"""
Status disk
"""
value = int(float(disk['used']) / float(disk['total']) * 100.0)
if value >= 90:
level = DiagnosticStatus.ERROR
elif value >= 70:
level = DiagnosticStatus.WARN
else:
level = DiagnosticStatus.OK
# Make boa... | 5,355,312 |
def hr_admin(request):
""" Views for HR2 Admin page """
user = request.user
# extra_info = ExtraInfo.objects.select_related().get(user=user)
designat = HoldsDesignation.objects.select_related().get(user=user)
if designat.designation.name =='hradmin':
template = 'hr2Module/hradmin.h... | 5,355,313 |
def test_extra():
"""Returns dict of extrapolation testing modules."""
return {name: module.test_extra() for name, module in six.iteritems(all_)} | 5,355,314 |
def add_copy_elf_task(self):
"""creates a task to copy the elf file into the output root
(task :py:class:`f_ti_arm_cgt.copy_elf`)"""
if self.bld.variant_dir == self.link_task.outputs[0].parent.abspath():
return
if not hasattr(self, "link_task"):
return
if self.bld.variant_dir:
... | 5,355,315 |
def determine_family(reaction: 'ARCReaction',
db: Optional[RMGDatabase] = None,
):
"""
Determine the RMG reaction family for an ARC reaction.
A wrapper for ARCReaction.determine_family().
This wrapper is useful because it makes a new instance of the rmgdb if nee... | 5,355,316 |
def _switch_component(
x: torch.Tensor, ones: torch.Tensor, zeros: torch.Tensor
) -> torch.Tensor:
"""
Basic component of switching functions.
Args:
x (torch.Tensor): Switch functions.
ones (torch.Tensor): Tensor with ones.
zeros (torch.Tensor): Zero tensor
Returns:
... | 5,355,317 |
def get_vector(x_array, y_array, pair):
"""This function is for calculating a vector of a bone from the openpose skelleton"""
x = x_array[:,pair[0]]-x_array[:,pair[1]]
y = y_array[:,pair[0]]-y_array[:,pair[1]]
return [x, y] | 5,355,318 |
def update_bar(tweets_json, handle):
"""
Pull data from signal and updates aggregate bar graph
This is using thresholds that combine toxicity and severe toxicity models
suggested by Lucas.
"""
if not tweets_json:
raise PreventUpdate('no data yet!')
tweets_df = pd.read_json(tweets_j... | 5,355,319 |
def defineConsole():
"""
defines the program console line commands
"""
parser = argparse.ArgumentParser(description="SBML to BNGL translator")
parser.add_argument(
"-f1", "--file1", type=str, help="reference file", required=True
)
parser.add_argument(
"-f2", "--file2", type=s... | 5,355,320 |
def query_command():
"""
Interactive querying of code snippets
"""
keyword = input("What do you want to query: ").lower().strip()
query = StackoverflowQuery(keyword, ['python', 'js', 'ruby'])
for snippet in query.code_snippets():
print("=======================")
print("Language: ... | 5,355,321 |
def device_boot():
"""
Starts timer to activate pump every PUMP_ACTIVATION_DELTA hours.
If for some reason the device gets rebooted during a water release
it should turn off the pump after being restarted.
"""
logger.info('Turning pump off after device start')
pump = WaterPumpControl()... | 5,355,322 |
def render_foreign_derivation(tpl: str, parts: List[str], data: Dict[str, str]) -> str:
"""
>>> render_foreign_derivation("bor", ["en", "ar", "الْعِرَاق", "", "Iraq"], defaultdict(str))
'Arabic <i>الْعِرَاق</i> (<i>ālʿrāq</i>, “Iraq”)'
>>> render_foreign_derivation("der", ["en", "fro", "-"], defaultdict... | 5,355,323 |
def create_from_ray(ray):
"""Converts a ray to a line.
The line will extend from 'ray origin -> ray origin + ray direction'.
:param numpy.array ray: The ray to convert.
:rtype: numpy.array
:return: A line beginning at the ray start and extending for 1 unit
in the direction of the ray.
... | 5,355,324 |
def cluster_sampling(sents: List[Sentence], tag_type: str, **kwargs) -> List[int]:
"""Cluster sampling.
We create cluster sampling as a kind of diversity sampling method.
Different with most of sampling methods that are based on sentence level,
Cluster sampling method is implemented on entity level.
... | 5,355,325 |
def paths_to_dirs(paths): # type: (t.List[str]) -> t.List[str]
"""Returns a list of directories extracted from the given list of paths."""
dir_names = set()
for path in paths:
while True:
path = os.path.dirname(path)
if not path or path == os.path.sep:
brea... | 5,355,326 |
def opts2dict(opts):
"""Converts options returned from an OptionParser into a dict"""
ret = {}
for k in dir(opts):
if callable(getattr(opts, k)):
continue
if k.startswith('_'):
continue
ret[k] = getattr(opts, k)
return ret | 5,355,327 |
def alignment_scan_timing_system_start_images(image_numbers):
"""Configure timing system
image_numbers: list of 1-based integers
e.g. image_numbers = alignment_pass(1)"""
nimages = len(image_numbers)
# The detector trigger pulse at the beginning of the first image is to
# dump zingers that may h... | 5,355,328 |
def combined_directions(a_list, b_list):
"""
Takes two NoteList objects.
Returns a list of (3)tuples each of the form:
(
int: a dir,
int: b dir,
(int: bar #, float: beat #)
)
"""
onsets = note_onsets(a_list, b_list)
a_dirs = directions(a_list)
b_dirs = directi... | 5,355,329 |
def _naive_csh_seismology(l, m, theta, phi):
"""
Compute the spherical harmonics according to the seismology convention, in a naive way.
This appears to be equal to the sph_harm function in scipy.special.
"""
return (lpmv(m, l, np.cos(theta)) * np.exp(1j * m * phi) *
np.sqrt(((2 * l + 1)... | 5,355,330 |
def is_room_valid(room):
"""Check if room is valid."""
_, names, checksum = room
letters = defaultdict(int)
complete_name = ''.join(names)
for letter in complete_name:
letters[letter] += 1
sorted_alphabetic = sorted(letters)
sorted_by_occurrences = sorted(
sorted_alphabetic, ... | 5,355,331 |
def _get_attributes_entropy(dataset: FingerprintDataset,
attributes: AttributeSet
) -> Dict[Attribute, float]:
"""Give a dictionary with the entropy of each attribute.
Args:
dataset: The fingerprint dataset used to compute the entropy.
att... | 5,355,332 |
def _mag_shrink_hard(x, r, t):
""" x is the input, r is the magnitude and t is the threshold
"""
gain = (r >= t).float()
return x * gain | 5,355,333 |
def plot_along(a, title=''):
"""Plot infos from a DataFrame created by run.along
Arguments
---------
a : Pandas DataFrame
run.along output
"""
f, ax = plt.subplots(2, figsize=(16, 16), dpi= 80, )#wspace=0, hspace=0)
x = a['xo'].values
pc = a['pc'].values
pn = a['pn'].values... | 5,355,334 |
def is_macports_env():
"""
Check if Python interpreter was installed via Macports command 'port'.
:return: True if Macports else otherwise.
"""
# Python path prefix should start with Macports prefix.
env_prefix = get_macports_prefix()
if env_prefix and base_prefix.startswith(env_prefix):
... | 5,355,335 |
async def test_async_remove_no_platform(hass):
"""Test async_remove method when no platform set."""
ent = entity.Entity()
ent.hass = hass
ent.entity_id = "test.test"
await ent.async_update_ha_state()
assert len(hass.states.async_entity_ids()) == 1
await ent.async_remove()
assert len(hass... | 5,355,336 |
def wiki_data(request, pro_id):
""" 文章标题展示 """
data = models.Wiki.objects.filter(project_id=pro_id).values('id', 'title', 'parent_id').order_by('deepth')
return JsonResponse({'status': True, 'data': list(data)}) | 5,355,337 |
def chain(*tasks):
"""
Given a number of tasks, builds a dependency chain.
chain(task_1, task_2, task_3, task_4)
is equivalent to
task_1.set_downstream(task_2)
task_2.set_downstream(task_3)
task_3.set_downstream(task_4)
"""
for up_task, down_task in zip(tasks[:-1], tasks[1:]):
... | 5,355,338 |
def print_epoch_progress(train_loss, val_loss, time_duration, train_metric,
val_metric):
"""Print all the information after each epoch.
:train_loss: average training loss
:val_loss: average validation loss
:time_duration: time duration for current epoch
:train_metric_collec... | 5,355,339 |
def create_task_spec_def():
"""Returns the a :class:`TaskSpecDef` based on the environment variables for distributed training.
References
----------
- `ML-engine trainer considerations <https://cloud.google.com/ml-engine/docs/trainer-considerations#use_tf_config>`__
- `TensorPort Distributed Comput... | 5,355,340 |
def clkdirpwm_main():
"""
Main routine for clkdirpwm commandline function.
"""
parser = optparse.OptionParser(usage=CLKDIRPWM_USAGE_STR)
parser.add_option('-v', '--verbose',
action='store_true',
dest='verbose',
help='verbose mode - ... | 5,355,341 |
def test_baked_query(n):
"""test a baked query of the full entity."""
bakery = baked.bakery()
s = Session(bind=engine)
for id_ in random.sample(ids, n):
q = bakery(lambda s: s.query(Customer))
q += lambda q: q.filter(Customer.id == bindparam("id"))
q(s).params(id=id_).one() | 5,355,342 |
def many_capitalized_words(s):
"""Returns a function to check percentage of capitalized words.
The function returns 1 if percentage greater then 65% and 0 otherwise.
"""
return 1 if capitalized_words_percent(s) > 66 else 0 | 5,355,343 |
def cmd_te_solution_build(abs_filename,wait=False,print_output=False,clear_output=False):
"""ソリューションをビルドする(テキストエディタ向け)
ファイルが含まれるVisual Studioを探し出してソリューションをビルドする。
VisualStudioの「メニュー -> ビルド -> ソリューションのビルド」と同じ動作。
abs_filename- ファイル名の絶対パス
(Ex.) c:/project/my_app/src/main.cpp... | 5,355,344 |
def main(paths, verbose, dry_run):
"""Rename filesystem entries to ASCII equivalent transliterations."""
for start_path in (os.path.expanduser(decode_filesystem_name(p)) for p in paths):
if os.path.isdir(start_path):
for root, dirs, files in os.walk(start_path):
files = [deco... | 5,355,345 |
def log_ratio_measure(
segmented_topics, accumulator, normalize=False, with_std=False, with_support=False):
"""
If normalize=False:
Popularly known as PMI.
This function calculates the log-ratio-measure which is used by
coherence measures such as c_v.
This is defined as: ... | 5,355,346 |
def _calculate_monthly_anomaly(data, apply_filter=False, base_period=None,
lat_name=None, lon_name=None, time_name=None):
"""Calculate monthly anomalies at each grid point."""
# Ensure that the data provided is a data array
data = rdu.ensure_data_array(data)
# Get coordi... | 5,355,347 |
def process_song(song_id):
"""
歌曲id、歌曲名、歌手id、所属专辑id、歌词、评论数
process song information
:param song_id: 歌曲id
:return: 处理状态(True or False)
"""
log("正在处理歌曲:{}".format(song_id))
if db.hexists("song:" + song_id, "id"):
log("有缓存(已做过处理),歌曲id:{}".format(song_id))
return True
els... | 5,355,348 |
def pip(requirements_file='requirements.txt'):
"""Run pip install."""
require('site_path')
with cd(env.site_path):
if exists(requirements_file):
run('./bin/pip install -r {0}'.format(requirements_file)) | 5,355,349 |
def expand_path(path):
"""
Convert a path to an absolute path. This does home directory expansion,
meaning a leading ~ or ~user is translated to the current or given user's
home directory. Relative paths are relative to the current working
directory.
:param path: Relative or absolute path of fi... | 5,355,350 |
def oxe_system_alaw_to_mulaw(host, token, mode):
"""Summary
Args:
host (TYPE): Description
token (TYPE): Description
mode (TYPE): Description
Returns:
TYPE: Description
"""
payload = {
'T0_Mu_Law': mode
}
packages.urllib3.disable_warnings(pac... | 5,355,351 |
def test_makepipebranch():
"""py.test for makepipebranch"""
tdata = (
(
"p_branch",
[
"BRANCH",
"p_branch",
0.0,
"",
"Pipe:Adiabatic",
"p_branch_pipe",
"p_branch_pipe_i... | 5,355,352 |
def update_weights(comment_weights, comment_usage):
"""Updates the weights used to upvote comments so that the actual voting
power usage is equal to the estimated usage.
"""
desired_usage = 1.0 - VP_COMMENTS / 100.0
actual_usage = 1.0 - comment_usage / 100.0
scaler = np.log(desired_usage) / np.l... | 5,355,353 |
def smaller2k(n):
"""
Returns power of 2 which is smaller than n. Handles negative numbers.
"""
if n == 0: return 0
if n < 0:
return -2**math.ceil(math.log2(-n))
else:
return 2**math.floor(math.log2(n)) | 5,355,354 |
def create_anime_image_data(anime):
"""Create (or load) a dict for each anime that has a high level CNN
representation of the associated MAL image.
Parameters:
-----------
anime : Pandas dataframe
the dataframe corresponding to the list of all anime in the dataset.
Returns:
--------
image_data : dict
A... | 5,355,355 |
def add_template_to_graph(graph, template):
"""Add a template object and its edges to the graph
Iterates through any tiles on the template and
adds the relevant link as an edge. Also looks for
additional references in the dependencies
"""
def analyse_images():
"""Find all the image refe... | 5,355,356 |
def deferred_bots_for_alias(alias):
"""Returns a dict where the keys are bot names whose commands have an alias
that conflicts with the provided alias, and the values are a list of
prefixes that would cause that conflict."""
return {
# TODO Support more prefixes than one
config['name']: ... | 5,355,357 |
def test():
"""
This is not usually called, other than for testing the API.
"""
head = {"request": "test/api/{}".format(API_KEY), "api_key": API_KEY}
ret=requests.get(url,params=head)
print(ret.text)
print(ret.json())
print(ret.json()['message']) | 5,355,358 |
def del_rel_path(paths: Set[str]) -> None:
"""Delete all relative :param:`paths` from current root/docs directory."""
for path in paths:
log.debug("Deleting file in the path %s", path)
root_dir.joinpath(path.lstrip("/")).unlink() | 5,355,359 |
def modeify(intcode, i):
"""Apply a mode to a parameter"""
j = i + 1
_opcode = opcode(intcode[i])
params = intcode[j: j + _opcode['param_count']]
modes = _opcode['modes']
mode_covert = {
0: lambda x: intcode[x], # position mode
1: lambda x: x # immediate mode... | 5,355,360 |
def unwind(g, num):
"""Return <num> first elements from iterator <g> as array."""
return [next(g) for _ in range(num)] | 5,355,361 |
def find_best_control(db, input_features, max_distance=200.0, debug=False, control_cache=None):
"""
Search all controls with AST vector magnitudes within max_distance and find the best hit (lowest product of AST*call distance)
against suitable controls. Does not currently use literal distance for the calculati... | 5,355,362 |
def test_delete_multiple_objects():
"""批量删除文件"""
file_id = str(random.randint(0, 1000)) + str(random.randint(0, 1000))
file_name1 = "tmp" + file_id + "_delete1"
file_name2 = "tmp" + file_id + "_delete2"
response1 = client.put_object(
Bucket=test_bucket,
Key=file_name1,
Body='... | 5,355,363 |
def CPPComments(text):
"""Remove all C-comments and replace with C++ comments."""
# Keep the copyright header style.
line_list = text.splitlines(True)
copyright_list = line_list[0:10]
code_list = line_list[10:]
copy_text = ''.join(copyright_list)
code_text = ''.join(code_list)
# Remove */ for C-commen... | 5,355,364 |
def get_bulk_statement(
stmt_type, table_name, column_names, dicts=True, value_string="%s", odku=False
):
"""Get a SQL statement suitable for use with bulk execute functions
Parameters
----------
stmt_type : str
One of REPLACE, INSERT, or INSERT IGNORE. **Note:** Backend support for
... | 5,355,365 |
def test_interfacegroup_construction_item():
"""Check that we construct address groups when sub-props are not a list."""
interface = Interface(hostname="h1", interface="i1")
interface_group = InterfaceGroup("g1", interfaces=[interface])
assert InterfaceGroup("g1", interfaces=interface) == interface_grou... | 5,355,366 |
def map_remove_by_value_range(bin_name, value_start, value_end, return_type, inverted=False):
"""Creates a map_remove_by_value_range operation to be used with operate or operate_ordered
The operation removes items, with values between value_start(inclusive) and
value_end(exclusive) from the map
Args:
... | 5,355,367 |
def insertions_sort(A):
"""Sort list of comparable elements into nondecreasing order"""
for i in range(1, len(A)): # from 1 to n-1
curr = A[i] # current element to be possibly moved
j = i # variable used to find correct index for current
while (
j > 0 and A[j - 1] > curr
... | 5,355,368 |
def make_access_shp(access_shp_path):
"""Create a 100x100 accessibility polygon shapefile with two access values.
Args:
access_shp_path (str): the path for the shapefile.
Returns:
None.
"""
srs = osr.SpatialReference()
srs.ImportFromEPSG(26910)
projection_wkt = srs.ExportTo... | 5,355,369 |
async def async_setup_racelandshop_websockt_api():
"""Set up WS API handlers."""
racelandshop = get_racelandshop()
racelandshop.log.info("Setup task %s", RacelandshopSetupTask.WEBSOCKET)
websocket_api.async_register_command(racelandshop.hass, racelandshop_settings)
websocket_api.async_register_comma... | 5,355,370 |
def rationalApproximation(points, N, tol=1e-3, lowest_order_only=True):
"""
Return rational approximations for a set of 2D points.
For a set of points :math:`(x,y)` where :math:`0 < x,y \\leq1`, return all
possible rational approximations :math:`(a,b,c) \\; a,b,c \\in \\mathbb{Z}`
such that :math:`... | 5,355,371 |
def delete_group(group_id: int) -> None:
"""
移除某个群的所有订阅
:param group_id: 群号
"""
sub_list = get_group_sub(group_id)
for lid in sub_list.keys():
delete_sub(lid, group_id) | 5,355,372 |
def get_notebook_server_instance(try_use_existing=False):
"""Create a notebook server instance to use. Optionally attempting to re-use existing
instances.
"""
pid = get_cache_pid()
servers = list_running_servers()
# If we already have a server, use that
for server in servers:
if s... | 5,355,373 |
def to_dict(doc, fields):
"""Warning: Using this convenience fn is probably not as efficient as the
plain old manually building up a dict.
"""
def map_field(prop):
val = getattr(doc, prop)
if isinstance(val, list):
return [(e.to_dict() if hasattr(e, 'to_dict') else e) for e i... | 5,355,374 |
def _auto_wrap_external(real_env_creator):
"""Wrap an environment in the ExternalEnv interface if needed.
Args:
real_env_creator (fn): Create an env given the env_config.
"""
def wrapped_creator(env_config):
real_env = real_env_creator(env_config)
if not isinstance(real_env, (E... | 5,355,375 |
def _get_binary_link_deps(
base_path,
name,
linker_flags = (),
allocator = "malloc",
default_deps = True):
"""
Return a list of dependencies that should apply to *all* binary rules that link C/C++ code.
This also creates a sanitizer configuration rule if necessary, s... | 5,355,376 |
def create_dummy_ligand(ligand, cut_idx=None):
"""
Takes mol object and splits it based on a primary amine such that the frags can connect to
the tertiary amine on the Mo core.
Args:
cut_idx tuple(int):
ligand (mol):
Returns:
ligands List(mol) :
"""
# TODO AllChem.Rep... | 5,355,377 |
def double2pointerToArray(ptr, n, m_sizes):
""" Converts ctypes 2D array into a 2D numpy array.
Arguments:
ptr: [ctypes double pointer]
n: [int] number of cameras
m_sizes: [list] number of measurements for each camera
Return:
arr_list: [list of ndarrays] list of numpy ... | 5,355,378 |
def test_loading_cached_properties():
"""Load cached properties."""
loader = Loader(new_path_syntax=True)
obj = loader.get_object_documentation("tests.fixtures.cached_properties:C")
assert len(obj.children) == 1
assert obj.children[0].name == obj.children[0].docstring == "aaa"
assert "cached" in... | 5,355,379 |
def read_varint(stream: bytes):
"""
读取 varint。
Args:
stream (bytes): 字节流。
Returns:
tuple[int, int],真实值和占用长度。
"""
value = 0
position = 0
shift = 0
while True:
if position >= len(stream):
break
byte = stream[position]
... | 5,355,380 |
def plot_mass_hist(data, popup=False):
"""
Plots a histogram of the Natural Log value for mass
of the Meteorite Landings dataset. Shows the figure if
popup = true, and saves it if popup is false
"""
plt.close()
strikes = data['strikes']
# Plots histogram with ln_mass column
strikes[... | 5,355,381 |
def check_fragment_count(blob_list, fragmentId, log_path, _fragment_count_flag):
"""Count fragment images || フラグメント画像の枚数確認
Args:
blob_list(list): list of file name registered in blob || azure.storage.blobのコンテナーへ登録されたファイル名のリスト
fragmentId (list): list of fragment images (GUID) || フラグメント画像につけられたGU... | 5,355,382 |
def get_arguments():
"""
get commandline arguments
"""
# Parse command line arguments
parser = argparse.ArgumentParser(description="P1 reader interface")
parser.add_argument("--config-file",
default=__file__.replace('.py', '.yml').replace('/bin/', '/etc/'),
... | 5,355,383 |
def any(array, mapFunc):
"""
Checks if any of the elements of array returns true, when applied on a function that returns a boolean.
:param array: The array that will be checked, for if any of the elements returns true, when applied on the function. \t
:type array: [mixed] \n
:param mapFunc: The fun... | 5,355,384 |
def qt_point_to_point(qt_point, unit=None):
"""Create a Point from a QPoint or QPointF
Args:
qt_point (QPoint or QPointF): The source point
unit (Unit): An optional unit to convert
values to in the output `Point`. If omitted, values
in the output `Point` will be plain `i... | 5,355,385 |
def test_s3_bucket_policy():
"""
To test that bucket policy is applied if passed in
"""
template = Template()
policy = {
"Version": "2012-10-17",
"Statement": [
{
"Sid": "AWSCloudTrailAclCheck20150319",
"Effect": "Allow",
"... | 5,355,386 |
def apiRequest(method, payload=None):
"""
Get request from vk server
:param get: method for vkApi
:param payload: parameters for vkApi
:return: answer from vkApi
"""
if payload is None:
payload = {}
if not ('access_token' in payload):
payload.update({'access_token': G... | 5,355,387 |
def page_required_no_auth(f):
"""Full page, requires user to be logged out to access, otherwise redirects to main page."""
@wraps(f)
def wrapper(*args, **kwargs):
if "username" in session:
return redirect("/")
else:
return f(*args, **kwargs)
return wrapper | 5,355,388 |
def parse_arguments(args):
"""
Parse all given arguments.
:param args: list
:return: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description=__description__,
epilog="Example-usage in apache-config:\n"
'CustomLog "| /path/to/anonip.py '
'[OPTIONS] --o... | 5,355,389 |
def p_xmlkv_opt_list(p):
"""wc_stringlist : wc_string"""
p[0] = ParseTreeNode('EQ', raw='assign')
p[0].add_child(p[1]) | 5,355,390 |
def generate_forward():
"""
Generate dataset with forward method
It tries to integrate random function.
The integral may not be symbolically possible, or may contains invalid operators.
In those cases, it returns None.
"""
formula = symbolic.fixed_init(15)
integrated = sympy.integrate(formula, symbolic... | 5,355,391 |
def exit_prompt(message=''):
"""Function to exit the program after prompting the use to press Enter"""
if message != '': print(str(message))
input('\nPress [Enter] to exit...')
sys.exit() | 5,355,392 |
def load_agent(agent_args, domain_settings, experiment_settings):
"""
This function loads the agent from the results directory results/env_name/method_name/filename
Args:
experiment_settings
Return:
sarsa_lambda agent
"""
with open('results/' + experiment_... | 5,355,393 |
def unpickle_robust(bytestr):
""" robust unpickle of one byte string """
fin = BytesIO(bytestr)
unpickler = robust_unpickler(fin)
return unpickler.load() | 5,355,394 |
def test_POST_log_entry_admin_user(test_client,test_login,):
""" Ensure that when the user issues a POST request
to the new_request page a log entry is inserted into the user action log table
with the correct log event """
# Simulate pressing the "Setup samples" button
data = dict(
labname="Wang",correspondenc... | 5,355,395 |
def build_ins_embed_branch(cfg, input_shape):
"""
Build a instance embedding branch from `cfg.MODEL.INS_EMBED_HEAD.NAME`.
"""
name = cfg.MODEL.INS_EMBED_HEAD.NAME
return INS_EMBED_BRANCHES_REGISTRY.get(name)(cfg, input_shape) | 5,355,396 |
def atl03sp(ipx_region, parm, asset=icesat2.DEFAULT_ASSET):
"""
Performs ATL03 subsetting in parallel on ATL03 data and returns photon segment data.
See the `atl03sp <../api_reference/icesat2.html#atl03sp>`_ function for more details.
Parameters
----------
ipx_region: Query
... | 5,355,397 |
def get_sample_activity_from_batch(activity_batch, idx=0):
"""Return layer activity for sample ``idx`` of an ``activity_batch``.
"""
return [(layer_act[0][idx], layer_act[1]) for layer_act in activity_batch] | 5,355,398 |
def dump(location):
"""Run pg_dump."""
os.environ['PGPASSWORD'] = current_app.config['PG_PASSWORD']
pg_dump = current_app.config.get('PG_BIN_DIR') + 'pg_dump'
subprocess.call((
pg_dump,
'--host={}'.format(current_app.config['PG_HOST']),
'--username={}'.format(current_app.config['PG_USERNAME']),
'--format=c'... | 5,355,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.