content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def test_sqlite_run_sql_with_parameters():
"""Test running a SQL query using SQLAlchemy templating engine"""
statement = "SELECT 1 + :value;"
database = SqliteDatabase()
response = database.run_sql(statement, parameters={"value": 1})
assert response.first()[0] == 2 | 5,355,100 |
def print_unicodeinfo(val: str, key: str) -> str:
"""
Prints the occurrence, unicode character or guideline rules and additional information
:param args: arguments instance
:param val: count of the occurrences of key
:param key: key (glyph or guideline rules)
:return:
"""
return f"{val:-... | 5,355,101 |
def ramp_up_coulomb(
lamda: float,
simulation: app.Simulation,
ligand_indices: List[int],
original_parameters: List[unit.Quantity],
) -> None:
"""
Helper function for the ghost_busters_ligand function. It updates the charge parameter in the nonbonded force of your simulation context.
Args:
... | 5,355,102 |
def test_column_values_not_in_set():
"""
ColumnValuesToBeNotInSet
"""
obj = {
"config": {"forbiddenValues": ["random"]},
"columnTestType": "columnValuesToBeNotInSet",
}
test_case = ColumnTestCase.parse_obj(obj)
assert isinstance(test_case.config, ColumnValuesToBeNotInSet) | 5,355,103 |
def plot_upset_indicators(
intersections,
ax=None,
facecolor="black",
element_size=None,
with_lines=True,
horizontal=True,
height_pad=0.7,
):
# REF: https://github.com/jnothman/UpSetPlot/blob/e6f66883e980332452041cd1a6ba986d6d8d2ae5/upsetplot/plotting.py#L428
"""Plot the matrix of in... | 5,355,104 |
def qx_to_npx(df):
""" Return df with qx converted to npx.
"""
df = 1 - df
out = df.cumprod().shift()
for i in df.index:
out.loc[i, i] = 1
return out | 5,355,105 |
def homepage(selenium, config):
"""Get homepage with selenium."""
selenium.get(config.BASE_URL)
selenium.set_window_size(config.WINDOW_WIDTH, config.WINDOW_HEIGHT)
custom_click_cookie_rollbar(selenium, config.MAX_WAIT_TIME)
return selenium | 5,355,106 |
def _if_scalar_type_as(g, self, tensor):
"""
Convert self into the same type of tensor, as necessary.
We only support implicit casting for scalars, so we never
actually need to insert an ONNX cast operator here; just
fix up the scalar.
"""
if isinstance(self, torch._C.Value):
return... | 5,355,107 |
def read_images_text(path):
"""
see: src/base/reconstruction.cc
void Reconstruction::ReadImagesText(const std::string& path)
void Reconstruction::WriteImagesText(const std::string& path)
"""
images = {}
with open(path, "r") as fid:
while True:
line = fid.... | 5,355,108 |
def emailIsValid(email):
"""Return true if email is valid otherwise false"""
return EMAIL_RE.match(email) is not None | 5,355,109 |
def calculate_line_number(text):
"""Calculate line numbers in the text"""
return len([line for line in text.split("\n") if line.strip() != ""]) | 5,355,110 |
def remove_user_from_group(user_openid, group_id):
"""Remove specified user from specified group."""
session = get_session()
with session.begin():
(session.query(models.UserToGroup).
filter_by(user_openid=user_openid).
filter_by(group_id=group_id).
delete(synchronize_sessi... | 5,355,111 |
def links_at_node(shape):
"""Get link ids for each node.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
Returns
-------
(N, 4) ndarray of int
Array of link ids.
Examples
--------
>>> from landlab.grid.structured_quad.links import links_at_no... | 5,355,112 |
def manage_addFancyContent(self, id, REQUEST=None):
"""Add the fancy fancy content."""
id = self._setObject(id, FancyContent(id))
return '' | 5,355,113 |
def convert_sentences(sentences, tokenizer):
"""
Truncate each sentence to 512 bpes in order to fit on BERT and convert it to bpes.
:param tokenizer: The BERT tokenizer we used in order convert each sentence to ids.
:param sentences: The tokenized sentences of the summary we are processing.
:return:... | 5,355,114 |
def imscale(image: Imagelike, scale: Union[float, Tuple[float, float]],
**kwargs) -> np.ndarray:
"""Scale the given image. The result will be a new image
scaled by the specified scale.
"""
global _resizer
if _resizer is None:
_resizer = ImageResizer()
return _resizer.scale(i... | 5,355,115 |
def parse_tooltip(spell: Union[ChampionSpell, SummonerSpell], tooltip: str) -> str:
"""
Improved tooltip parser based on the built-in Cassiopeia `Spell.__replace_variables`
"""
for dto in spell._data.values():
try:
costs_burn = dto.costBurn
effects_burn = dto.effectBurn
... | 5,355,116 |
def _packages_info() -> dict:
"""Return a dict with installed packages version"""
return Dependencies.installed_packages() | 5,355,117 |
def generate(package_path: Optional[Path]) -> None:
"""Generate Poetry package manifests"""
PROJECT_CONFIG.load_requirements()
processor = PackageProcessor()
processor.register_packages()
processor.ensure_no_circular_imports()
if package_path:
processor.generate_package_manifest(package_... | 5,355,118 |
def _save_mnist_recreation_indices():
"""Code to find MNIST train, validation and test indices for recreation of
MNIST MAF dataset.
Note this should not be called directly. This is only here for reproducibility."""
warnings.warn('This function should generally not be called because it '
... | 5,355,119 |
def main():
"""Run training process."""
parser = argparse.ArgumentParser(
description="Train Hifigan (See detail in examples/hifigan/train_hifigan.py)"
)
parser.add_argument(
"--train-dir",
default=None,
type=str,
help="directory including training data. ",
)
... | 5,355,120 |
def _first(root: TreeNode) -> TreeNode:
"""Return a first in "inorder" traversal order
of the `root` subtree
Args:
root (TreeNode): root of subtree
Returns:
TreeNode: first node in subtree
"""
if root.left is None:
return root
return _first(root.left) | 5,355,121 |
def mock_plot(mocker):
"""Disable matplotlib plotting in test code"""
try:
import matplotlib.pyplot as plt
mocker.patch.object(plt, "gca")
mocker.patch.object(plt, "show")
except ImportError:
pass | 5,355,122 |
def get_library_version() -> str:
"""
Returns the version of minecraft-launcher-lib
"""
return __version__ | 5,355,123 |
def download_voc_pascal(data_dir='../data'):
"""Download the Pascal VOC2012 Dataset."""
voc_dir = os.path.join(data_dir, 'VOCdevkit/VOC2012')
url = "http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar"
sha1 = '4e443f8a2eca6b1dac8a6c57641b67dd40621a49'
fname = gutils.download(u... | 5,355,124 |
def load_image(image):
"""reshape and convert image to fit the model"""
img = cv2.imread(image) # Load images
img = cv2.resize(img, (257, 257), interpolation=cv2.INTER_LINEAR) # resize
img = (np.float32(img) - 127.5) / 127.5 # change image to float and normalize
img = img.reshape((1, 257, 25... | 5,355,125 |
def hist_trigger_time_diff(df_dev):
"""
plots
"""
df = devices_trigger_time_diff(df_dev.copy())
fig = go.Figure()
trace = go.Histogram(x=np.log(df['row_duration'].dt.total_seconds()/60),
nbinsx=200,
)
fig.add_trace(trace)
return fig | 5,355,126 |
def verify_spec(spec_utid, proxy_utid):
"""
For a specific unit test id (utid) compares the spec with the proxy
"""
results=''
for key in spec_utid:
results += '%s: spec=%s, proxy=%s (%s) *** ' % (key,spec_utid[key],proxy_utid[key],(spec_utid.get(key)==proxy_utid.get(key)))
return results | 5,355,127 |
def test_Local_dir():
"""
Test Local filesystem directory utilities
"""
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
with TemporaryDirectory() as td:
src = os.path.join(root, 'cloud_fs')
dst = os.path.join(td, 'cloud_fs')
fs = FileSystem(src)
fs... | 5,355,128 |
def diff_plot(song_1, song_2, filename=None, folder=None):
"""
Plot the difference between two series.
:param dict song_1:
:param dict song_2:
:param str filename:
:param str folder:
:return:
"""
x_1 = list(song_1.keys())
y_1 = list(song_1.values())
x_2 = list(song_2.keys())... | 5,355,129 |
def lammps_created_gsd(job):
"""Check if the mdtraj has converted the production to a gsd trajectory for the job."""
return job.isfile("trajectory-npt.gsd") | 5,355,130 |
def neo4j_data_age(data, max_data_age=None):
"""
Checks the noclook_last_seen property against datetime.datetime.now() and
if the difference is greater than max_data_age (hours)
(django_settings.NEO4J_MAX_DATA_AGE will be used if max_data_age is not specified)
and the noclook_auto_manage is true the... | 5,355,131 |
def ProfileOptions(parser):
"""Build option group for profiling chrome.
Args:
parser: OptionParser object for parsing the command-line.
Returns:
Option group that contains profiling chrome options.
"""
profile_options = optparse.OptionGroup(parser, 'Profile Chrome Options')
browsers = sorted(util.... | 5,355,132 |
def _clean_unicode(value):
"""Return the value as a unicode."""
if isinstance(value, str):
return value.decode('utf-8')
else:
return unicode(value) | 5,355,133 |
def deslugify_province(prov):
"""
Province slug to name, i.e. dashes to spaces and title case.
KZN is a special case.
"""
if prov == 'kwazulu-natal':
return 'KwaZulu-Natal'
return prov.replace('-', ' ').title() | 5,355,134 |
def color_debug():
"""
Color for info
"""
return read_config_color("COLOR", "debug", "grey") | 5,355,135 |
def replace_sym(data: str) -> str:
"""
Converts currency strings such as ``£5.00`` to ``5.00 GBP`` - or ``10 kr`` to ``10 SEK``
"""
origdata = data
data = data.strip()
for s, r in settings.CUR_SYMBOLS.items():
if data.startswith(s) or data.endswith(s):
log.debug(f"Replacing s... | 5,355,136 |
def parse_vectors(vectors):
""" Basic cleanup of vector or vectors
Strip out V from V#s. Similar to parse tables, this by no means guarantees
a valid entry, just helps with some standard input formats
Parameters
----------
vectors : list of str or str
A string or list of strings of vec... | 5,355,137 |
def save_checkpoint(state, is_best, filename='checkpoint.pth.tar', best_filename='model_best.pth.tar'):
"""
:param state:
:param is_best:
:param filename:
:param best_filename:
:return:
"""
torch.save(state, filename)
if is_best:
shutil.copyfile(filename, best_filename) | 5,355,138 |
def sort_slopes(sds):
"""Sort slopes from bottom to top then right to left"""
sds = np.array(sds)
scores = sds[:, 0, 1] + sds[:, 1, 1] * 1e6
inds = np.argsort(scores)
return sds[inds] | 5,355,139 |
def test_horodecki_invalid_a_param():
"""Tests for invalid a_param inputs."""
with np.testing.assert_raises(ValueError):
horodecki(-5)
with np.testing.assert_raises(ValueError):
horodecki(5) | 5,355,140 |
def test_success(database):
""" Test if the DisasterEmergencyFundCode element has a valid COVID-19 related code and TOA is blank, then
GrossOutlayByAward_CPE cannot be blank.
"""
# gross_outlay_amount_by_awa_cpe populated
op1 = AwardFinancialFactory(disaster_emergency_fund_code='l', transaction_... | 5,355,141 |
def test_researcher_invitation(client, mocker):
"""Test full researcher invitation flow."""
exception = mocker.patch.object(client.application.logger, "exception")
mocker.patch("sentry_sdk.transport.HttpTransport.capture_event")
mocker.patch(
"orcid_hub.views.send_user_invitation.queue",
... | 5,355,142 |
def _get_mock_dataset(root_dir, base_dir_name):
"""
root_dir: directory to the mocked dataset
"""
base_dir = os.path.join(root_dir, base_dir_name)
os.makedirs(base_dir, exist_ok=True)
if base_dir_name == SQuAD1.__name__:
file_names = ("train-v1.1.json", "dev-v1.1.json")
else:
... | 5,355,143 |
def serialize_skycoord(o):
"""
Serializes an :obj:`astropy.coordinates.SkyCoord`, for JSONification.
Args:
o (:obj:`astropy.coordinates.SkyCoord`): :obj:`SkyCoord` to be serialized.
Returns:
A dictionary that can be passed to :obj:`json.dumps`.
"""
representation = o.representa... | 5,355,144 |
def redis_sentinel(create_sentinel, sentinel, loop):
"""Returns Redis Sentinel client instance."""
redis_sentinel = loop.run_until_complete(
create_sentinel([sentinel.tcp_address], timeout=2, loop=loop))
assert loop.run_until_complete(redis_sentinel.ping()) == b'PONG'
return redis_sentinel | 5,355,145 |
def compute_features(
seq_path: str,
map_features_utils_instance: MapFeaturesUtils,
social_features_utils_instance: SocialFeaturesUtils,
) -> Tuple[np.ndarray, Dict[str, np.ndarray]]:
"""Compute social and map features for the sequence.
Args:
seq_path (str): file path for the se... | 5,355,146 |
def summarizeTitlesByLength(titlesAlignments, limit=None):
"""
Sort match titles by sequence length.
@param titlesAlignments: A L{dark.titles.TitlesAlignments} instance.
@param limit: An C{int} limit on the number of results to show.
@return: An C{IPython.display.HTML} instance with match titles so... | 5,355,147 |
def _diff_tail(msg):
"""`msg` is an arbitrary length difference "path", which could
be coming from any part of the mapping hierarchy and ending in any kind of
selector tree. The last item is always the change message: add, replace,
delete <blah>. The next to last should always be a selector key of s... | 5,355,148 |
def select_standard_name(session, cluster, importance_table_name):
"""
Use cluster members for a WHERE ... IN (...) query
Use SQLAlchemy to handle the escaping
"""
stmt = session.query('name from %s' % importance_table_name) \
.filter(column('name').in_(list(cluster))) \
.order_by('"... | 5,355,149 |
def test_squeeze_sumup():
"""make sure that sumup does not lead to false output shape"""
s = magpy.Sensor(pixel=(1, 2, 3))
ss = magpy.magnet.Sphere((1, 2, 3), 1)
B1 = magpy.getB(ss, s, squeeze=False)
B2 = magpy.getB(ss, s, squeeze=False, sumup=True)
assert B1.shape == B2.shape | 5,355,150 |
def calculate_depth(experiment):
""" Calculate the minor, major, total depth
Args:
experiment (remixt.Experiment): experiment object
Returns:
pandas.DataFrame: read depth table with columns, 'major', 'minor', 'total', 'length'
"""
data = remixt.analysis.experiment.create_segment_... | 5,355,151 |
def checkerboard(key, nsq, size, dtype=np.float32):
"""Create a checkerboard background image with random colors.
NOTE: only supports a single value for nsq (number squares).
Args:
key: JAX PRNGkey.
nsq (int): number of squares per side of the checkerboard.
size (int): size of one side of the checke... | 5,355,152 |
def choice(x, a):
"""Generate a random sample from an array of given size."""
if torch.is_tensor(x):
return x[torch.randint(len(x), (a,))]
return x | 5,355,153 |
def correct_gene_names(df):
""" Fix datetime entries in Gene names
"""
update_symbols = []
for i, gs in enumerate(df.Gene_Symbol):
if (not (isinstance(gs, str))) or (':' in gs):
update_symbols.append(mapping.get_name_from_uniprot(df.Uniprot_Id.iloc[i]))
else:
upda... | 5,355,154 |
def process_repl_args(args):
""" Process PANDA replay-related arguments.
"""
assert False, 'Not implemented yet.'
cmd = []
cmd.extend(['-display', 'none'])
return cmd
# p_test "${panda_rr}-rr-snp" f "trace memory snapshot"
# p_test "${panda_rr}-rr-nondet.log" f "trace nondet log"
# -... | 5,355,155 |
def test_RotationPlot_methods():
"""This code is lifted from demo-3-v0.1."""
misori = Misorientation([1, 1, 1, 1]) # any will do
ori = Orientation.random()
fig = plt.figure()
ax = fig.add_subplot(projection="axangle", proj_type="ortho", **_SUBPLOT_KWARGS)
ax.scatter(misori)
ax.scatter(ori)
... | 5,355,156 |
def choose(n, k):
"""
A fast way to calculate binomial coefficients by Andrew Dalke (contrib).
"""
if 0 <= k <= n:
ntok = 1
ktok = 1
for t in range(1, min(k, n - k) + 1): # changed from xrange
ntok *= n
ktok *= t
n -= 1
return ntok // ... | 5,355,157 |
def _get_ReaLiSe_dataset(which="15"):
"""
For its
"""
print("Loading ReaLiSe Dataset !")
print("Hint: The Data You loading now is the preprocessed sighan from ReaLise, ")
ddp_exec("os.system('date')")
path = "../SE_tmp_back/milestone/ReaLiSe/data/"
import pickle
train_dataset = pic... | 5,355,158 |
def generate_check_phrase() -> bytes:
""" Generate check-phrase for connecting of auxiliary socket.
:return: some array of ATOM_LENGTH bytes.
"""
return get_random_bytes(ATOM_LENGTH) | 5,355,159 |
def load_pickle(filename: str):
"""
Load a file from disk.
Parameters
----------
filename: str
Name of the file that is loaded.
Returns
-------
"""
return pickle.load(open(filename, 'rb')) | 5,355,160 |
def find_pssm_missing_proteins(fasta_dict, pssm_dir):
"""find_pssm_missing_proteins function finds the missing pssm files of the proteins in fasta file.
Args:
fasta_dict (dict): This is a dict of fasta file. The keys of fasta_dict are protein ids and
values are protein sequences.
pssm_d... | 5,355,161 |
def _format_echo(text):
"""Compose system echo command outputs text"""
quote = '' if os.name == 'nt' else '"'
return 'echo {}{}{}'.format(quote, text, quote) | 5,355,162 |
def crop_to_reference(dataset: xr.Dataset, ref_dataset: xr.Dataset) -> xr.Dataset:
""" Crops horizontal coordinates to match reference dataset """
if "longitude" not in dataset.coords.keys():
raise ValueError("Longitude is not a coordinate of dataset.")
if "longitude" not in ref_dataset.coords... | 5,355,163 |
def add_frontend(fe_role, params):
"""
add_frontend
:return:
"""
# TODO the function is not appropriate
# Before add backend change root password, This is not appropriate.
change_root_passowrd(params)
# add doris fe
doris_fe_hostname = params.doris_fe_hostname[0]
doris_fe_obser... | 5,355,164 |
def split_tasks(lst, n):
"""Split tasks into N-sized chunks."""
n = math.ceil(len(lst) / n)
for j in range(0, len(lst), n):
chunk = lst[j:n + j]
yield chunk | 5,355,165 |
def parse_line(description, inline_comments=_INLINE_COMMENT_PREFIXES):
"""
Parse a line and correctly add the description(s) to a collection
"""
# manually strip out the comments
# py2 cannot ignore comments on a continuation line
# https://stackoverflow.com/q/9110428/1177288
#
# PY3 ca... | 5,355,166 |
def target_frame():
"""Input frame."""
return 'IAU_ENCELADUS' | 5,355,167 |
def cards(cs):
"""Parse cards"""
cs = cs.split(' ')
result = np.zeros([len(valueL), len(colorL)], int)
for c in cs:
result[np.where(valueL == c[0])[0][0], np.where(colorL == c[1])[0][0]] = 1
return result | 5,355,168 |
def _get_max_diag_idx(m, n_A, n_B, diags, start, percentage):
"""
Determine the diag index for when the desired percentage of distances is computed
Parameters
----------
m : int
Window size
n_A : int
The length of the time series or sequence for which to compute the matrix
... | 5,355,169 |
def get_argument_values(arg_defs, arg_asts, variables):
"""Prepares an object map of argument values given a list of argument
definitions and list of argument AST nodes."""
if arg_asts:
arg_ast_map = {arg.name.value: arg for arg in arg_asts}
else:
arg_ast_map = {}
result = {}
f... | 5,355,170 |
def get_identifier(positioner_id, command_id, uid=0, response_code=0):
"""Returns a 29 bits identifier with the correct format.
The CAN identifier format for the positioners uses an extended frame with
29-bit encoding so that the 11 higher bits correspond to the positioner
ID, the 8 middle bits are the... | 5,355,171 |
def standardized(array):
"""Normalize the values in an array.
Arguments:
array (np.ndarray): Array of values to normalize.
Returns:
array with zero mean and unit standard deviation.
"""
return (array - array.mean()) / max(1e-4, array.std()) | 5,355,172 |
def newaddress(fn,passphrase,addr_type=0):
"""
getnetaddress
"""
wallet = Wallet(fn).fromFile(passphrase)
# Address Types
# addr_type == 0, deposit
# addr_type == 1, change
# addr_type == 2, staking
# addr_type == 3, Dealer
# Address types aren't programmatically important, but help to organize
if addr_type ... | 5,355,173 |
def index(args):
"""Handles the index step of the program."""
if not args.index: # build index
logging.info(" Building index...")
index_list = generate_index(args.input_dir)
if not index_list: # list is empty
logging.error(" Empty index. Exiting...")
return
... | 5,355,174 |
def setup_harness(bsize=16, workers=0):
"""
CommandLine:
python ~/code/netharn/netharn/examples/yolo_voc.py setup_harness
Example:
>>> # DISABLE_DOCTSET
>>> harn = setup_harness()
>>> harn.initialize()
"""
xpu = nh.XPU.cast('argv')
nice = ub.argval('--nice', de... | 5,355,175 |
def _output_object_or_file_map_configurator(prerequisites, args):
"""Adds the output file map or single object file to the command line."""
return _output_or_file_map(
output_file_map = prerequisites.output_file_map,
outputs = prerequisites.object_files,
args = args,
) | 5,355,176 |
def get_file_name():
"""This function asl the user for file and returns it"""
f_name = input('Input your file name: ')
return f_name | 5,355,177 |
def GetMinikubeVersion():
"""Returns the current version of minikube."""
return six.ensure_text(subprocess.check_output([_FindMinikube(), 'version'])) | 5,355,178 |
def minimum_distance(geo1, geo2):
""" get the minimum distance between atoms in geo1 and those in geo2
"""
xyzs1 = coordinates(geo1)
xyzs2 = coordinates(geo2)
return min(cart.vec.distance(xyz1, xyz2)
for xyz1, xyz2 in itertools.product(xyzs1, xyzs2)) | 5,355,179 |
def promptbrowse(tree):
"""A simple prompt with faux commands to browse the filetree"""
tree_pref = tree
def change_dir(dirnm):
"""Change directory"""
nonlocal tree_pref
# Move on up
if ".." in dirnm:
dotlst = dirnm.split("/")
upcnt = 0
f... | 5,355,180 |
def cli():
"""Blah"""
logging.basicConfig(level=logging.DEBUG) | 5,355,181 |
def test_tcp_telemetry_client_fn():
"""
Create a client from a telemetry daemon, reboot the client a number of
times and verify that it decodes telemetry on each boot.
"""
writer, env, daemon = tcp_env()
with writer.booted(), daemon.booted():
time.sleep(0.5)
client, out_queue = ... | 5,355,182 |
async def test_switch_context(
opp, entities, opp_admin_user, enable_custom_integrations
):
"""Test that switch context works."""
assert await async_setup_component(opp, "switch", {"switch": {"platform": "test"}})
await opp.async_block_till_done()
state = opp.states.get("switch.ac")
assert sta... | 5,355,183 |
def mkdir_p(path):
"""Create path if it doesn't exist already"""
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise | 5,355,184 |
def test_string_ilike(feature_list, field_list):
"""
Assertions for 'like' operations with no case sensitivity
:param feature_list: feature collection list
:param field_list: feature field names
"""
cql_ast = get_ast('name ILIKE "lake%"')
assert cql_ast == LikePredicateNode(
Attrib... | 5,355,185 |
def prepare():
"""
Get the list of filtered tweets by target entity where each item contains the tweet
with its original attributes when downloaded from Twitter
:return:
"""
path = '../../Data.json'
List = loadData(path) # load data
tweets = [List[i]['text'] for i in range(len(List))] ... | 5,355,186 |
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
"""
Given the mapping from entrypoint name to callable, return the relevant
console script specs.
"""
# Don't mutate caller's version
console = console.copy()
scripts_to_generate = []
# Special case pip and setuptools ... | 5,355,187 |
def local_action_StillOnAll(arg=None):
"""{"group": "Playback - All"}"""
query = '*ST\r'
queue.request(lambda: udp.send(query),
lambda resp: handleReqResp('StillOnAll', resp)) | 5,355,188 |
def deletecall(bam_url,api_call,call_parameters,delete_entity,header):
"""API request to delete and return values"""
call_url = "http://"+bam_url+"/Services/REST/v1/"+api_call+"?"
print("You are requesting to delete:")
print(delete_entity)
answer = input("Do you want to proceed (y (yes) or n (no))? ... | 5,355,189 |
def haDecFromAzAlt (azAlt, lat):
"""Converts alt/az position to ha/dec position.
Inputs:
- azAlt (az, alt) (deg)
- lat latitude (degrees);
>0 is north of the equator, <0 is south
Returns a tuple containing:
- haDec (HA, Dec) (deg), a tuple;
HA is i... | 5,355,190 |
def get_description(expression, options=None):
"""Generates a human readable string for the Cron Expression
Args:
expression: The cron expression string
options: Options to control the output description
Returns:
The cron expression description
"""
descripter = ExpressionDes... | 5,355,191 |
def check_for_firefox():
""" Determine if Firefox is available. """
if os.path.exists('/Applications/Firefox.app/Contents/MacOS/firefox'):
return True
for exe in ('firefox',):
if find_executable(exe):
return True
return False | 5,355,192 |
def midpoint(close, length=None, offset=None, **kwargs):
"""Indicator: Midpoint"""
# Validate arguments
close = verify_series(close)
length = int(length) if length and length > 0 else 1
min_periods = int(kwargs['min_periods']) if 'min_periods' in kwargs and kwargs['min_periods'] is not None else len... | 5,355,193 |
def burn_in(task_id: str, build_variant: str, generate_config: GenerateConfig,
repeat_config: RepeatConfig, evg_api: EvergreenApi, evg_conf: EvergreenProjectConfig,
repos: List[Repo], generate_tasks_file: str, install_dir: str) -> None:
"""
Run burn_in_tests.
:param task_id: Id of t... | 5,355,194 |
def quad_fejer(order, domain=(0, 1), growth=False, segments=1):
"""
Generate the quadrature abscissas and weights in Fejer quadrature.
Args:
order (int, numpy.ndarray):
Quadrature order.
domain (chaospy.distributions.baseclass.Dist, numpy.ndarray):
Either distributio... | 5,355,195 |
def probe(app: FastFlixApp, file: Path) -> Box:
""" Run FFprobe on a file """
command = [
f"{app.fastflix.config.ffprobe}",
"-v",
"quiet",
"-loglevel",
"panic",
"-print_format",
"json",
"-show_format",
"-show_streams",
f"{file}",
... | 5,355,196 |
def is_standard_time_series(time_series, window=180):
"""
Check the length of time_series. If window = 180, then the length of time_series should be 903.
The mean value of last window should be larger than 0.
:param time_series: the time series to check, like [data_c, data_b, data_a]
:type time_ser... | 5,355,197 |
def get_comment_type(token, comment_syntax):
"""
SQLエンジン関連コメントTypeを返す
"""
if is_block_comment(token):
return comment_syntax.get_block_comment_type(token)
elif is_line_comment(token):
return comment_syntax.get_line_comment_type(token) | 5,355,198 |
def utils_short_term_train(speaker_model, listener_model, candidates, policy4shortgame, stop=0.1, maxrounds=1000, trainspeaker=True, trainlistener=True):
"""
All inputs: Just one instance. No bs dimensize.
"""
new_candidates = copy.deepcopy(candidates)
rr = 0
rewards = []
while rr < maxrounds:
np.rando... | 5,355,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.