content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def parseParams(opt):
"""Parse a set of name=value parameters in the input value.
Return list of (name,value) pairs.
Raise ValueError if a parameter is badly formatted.
"""
params = []
for nameval in opt:
try:
name, val = nameval.split("=")
except ValueError:
... | 5,356,800 |
def test_list_short_enumeration_2_nistxml_sv_iv_list_short_enumeration_3_5(mode, save_output, output_format):
"""
Type list/short is restricted by facet enumeration.
"""
assert_bindings(
schema="nistData/list/short/Schema+Instance/NISTSchema-SV-IV-list-short-enumeration-3.xsd",
instance=... | 5,356,801 |
def alert_remove(alert_id):
"""Remove the specified alert"""
key = get_api_key()
# Get the list
api = shodan.Shodan(key)
try:
api.delete_alert(alert_id)
except shodan.APIError as e:
raise quo.Outliers(e.value)
quo.echo("Alert deleted") | 5,356,802 |
def asnumpy(a, dtype=None, order=None):
"""Returns a dense numpy array from an arbitrary source array.
Args:
a: Arbitrary object that can be converted to :class:`numpy.ndarray`.
order ({'C', 'F', 'A'}): The desired memory layout of the output
array. When ``order`` is 'A', it uses 'F... | 5,356,803 |
def render_array_items(
item_renderer: ITEM_RENDERER_TYPE,
summary_renderer: ITEM_RENDERER_TYPE,
index: INDEX_TYPE,
array: np.ndarray,
edge_items: int,
) -> ITEM_GENERATOR_TYPE:
"""Render array, dispatching to `render_array_summarised` if required.
:param item_renderer: item renderer
:p... | 5,356,804 |
def get_next_cpi_date():
"""
Get next CPI release date
"""
df = pd.read_html(r"https://www.bls.gov/schedule/news_release/cpi.htm")[0][:-1]
df["Release Date"] = pd.to_datetime(df["Release Date"], errors='coerce')
df = df[df["Release Date"] >= current_date].iloc[0]
df['Release Date'] = df['Rel... | 5,356,805 |
async def on_ready():
"""
確認機器人上線
"""
print("Bot is ready.") | 5,356,806 |
def date_arithmetic() -> Tuple[datetime, datetime, int]:
""" This function is used to calculate
what is the date after 3 days is given
and the differences between two given dates """
date1: str = "Feb 27, 2020"
date_2020: datetime = datetime.strptime(
date1, "%b %d, %Y") + timedelt... | 5,356,807 |
def update_investment_projects_for_gva_multiplier_task(gva_multiplier_id):
"""
Updates the normalised gross_value_added for all investment projects
with the associated GVA Multiplier.
"""
try:
instance = GVAMultiplier.objects.get(id=gva_multiplier_id)
except GVAMultiplier.DoesNotExist:
... | 5,356,808 |
def get_words_from_line_list(text):
"""
Applies Translations and returns the list of words from the text document
"""
text = text.translate(translation_table)
word_list = [x for x in text.split() if x not in set(stopwords.words('english'))]
return word_list | 5,356,809 |
def _get_name(dist):
"""Attempts to get a distribution's short name, excluding the name scope."""
return getattr(dist, 'parameters', {}).get('name', dist.name) | 5,356,810 |
def seta_count(ent):
"""Enrich the match."""
data = {'body_part': 'seta'}
location = []
for token in ent:
label = token._.cached_label
if label == 'seta':
data['seta'] = REPLACE.get(token.lower_, token.lower_)
elif label == 'number_word':
data['count'] ... | 5,356,811 |
def compute_mean_std(all_X):
"""Return an approximate mean and std for every feature"""
concatenated = np.concatenate(all_X, axis=0).astype(np.float64)
mean = np.mean(concatenated, axis=0)
std = np.std(concatenated, axis=0)
std[std == 0] = 1
return mean, std | 5,356,812 |
def frontend_handler(socketio_request_json):
"""Handler of SocketIO request for frontend."""
SocketIOApi.execute(socketio_request_json) | 5,356,813 |
def GenerateConfig(context):
"""Generates configuration."""
key_ring = {
'name': 'keyRing',
'type': 'gcp-types/cloudkms-v1:projects.locations.keyRings',
'properties': {
'parent': 'projects/' + context.env['project'] + '/locations/' + context.properties['region'],
'keyRingId': context.env['d... | 5,356,814 |
def plot_embedding(X, y, d, title=None, imgName=None):
"""
Plot an embedding X with the class label y colored by the domain d.
:param X: embedding
:param y: label
:param d: domain
:param title: title on the figure
:param imgName: the name of saving image
:return:
"""
if params.... | 5,356,815 |
def get_valid_collapsed_products(products, limit):
"""wraps around collapse_products and respecting a limit
to ensure that uncomplete products are not collapsed
"""
next_min_scanid = get_next_min_scanid(products, limit)
collapsed_products = []
for scanid, scan in groupby(products, itemgetter(... | 5,356,816 |
def get_imd():
"""Fetches data about LA IMD status.
The "local concentration" measure is used -
this gives higher weight to particularly deprived areas
Source: http://www.gov.uk/government/statistics/english-indices-of-deprivation-2019
"""
imd = pd.read_csv(
PROJECT_DIR / "inputs/data/so... | 5,356,817 |
def get_instance_snapshots(pageToken=None):
"""
Returns all instance snapshots for the user's account.
See also: AWS API Documentation
:example: response = client.get_instance_snapshots(
pageToken='string'
)
:type pageToken: string
:param pageToken: A token used f... | 5,356,818 |
def read_bgr(file):
"""指定ファイルからBGRイメージとして読み込む.
# Args:
file: イメージファイル名.
# Returns:
成功したらイメージ、失敗したら None.
"""
return cv2.imread(file, cv2.IMREAD_COLOR) | 5,356,819 |
def _remove_none_from_post_data_additional_rules_list(json):
"""
removes hidden field value from json field "additional_rules" list,
which is there to ensure field exists for editing purposes
:param json: this is data that is going to be posted
"""
data = json
additional_rules = json.get... | 5,356,820 |
def add_email(request, pk):
"""
This Endpoint will add the email id into
the person contact details.
It expects personId in URL param.
"""
try:
request_data = request.data
email = request_data.get("email")
person = Person.objects.filter(id=pk).last()
if email... | 5,356,821 |
def coords_extracter():
"""Exctract coords to send command to robot.
To be executed inside of xarm_hand_control module."""
SKIPPED_COMMANDS = 5
COEFF = 22
current = [0]
def coords_to_command(data: Any):
current[0] += 1
if current[0] < SKIPPED_COMMANDS:
return
... | 5,356,822 |
def NO_MERGE(writer, segments):
"""This policy does not merge any existing segments.
"""
return segments | 5,356,823 |
def detect_video( yolo, all_classes):
"""Use yolo v3 to detect video.
# Argument:
video: video file.
yolo: YOLO, yolo model.
all_classes: all classes name.
"""
# ipv4 address
ipv4_url = 'http://192.168.171.44:8080'
# read video
cam = f'{ipv4_url}/video'
... | 5,356,824 |
def save_features():
"""
Writes extracted feature vectors into a binary or text file, per args.
:return: none
"""
extractor = args.extractor
features = []
if extractor == 'multi':
features = extract_multi()
elif extractor == 'single':
features = extract_single()
# ... | 5,356,825 |
def build_ntwk(p, s_params):
"""
Construct a network object from the model and
simulation params.
"""
np.random.seed(s_params['RNG_SEED'])
# set membrane properties
n = p['N_PC'] + p['N_INH']
t_m = cc(
[np.repeat(p['T_M_PC'], p['N_PC']), np.repeat(p['T_M_INH'], p['N_INH... | 5,356,826 |
def divideData(data):
"""Given test and validation sets, divide
the data for three different sets"""
testListFile = []
fin = file('data/multi-woz/testListFile.json')
for line in fin:
testListFile.append(line[:-1])
fin.close()
valListFile = []
fin = file('data/multi-woz/valListFi... | 5,356,827 |
def createAaronWorld():
"""
Create an empty world as an example to build future projects from.
"""
# Set up a barebones project
project = makeBasicProject()
# Create sprite sheet for the player sprite
player_sprite_sheet = addSpriteSheet(project, "actor_animated.png", "actor_animated", "act... | 5,356,828 |
def builtin_ljustify(s, w, p):
"""Left-justify a string to a given width with a given padding character."""
sv = s.convert(BStr()).value
pv = p.convert(BStr()).value
return BStr(sv.ljust(w.value, pv)) | 5,356,829 |
def package_files(directory):
"""package_files
recursive method which will lets you set the
package_data parameter in the setup call.
"""
paths = []
for (path, _, filenames) in os.walk(directory):
for filename in filenames:
paths.append(os.path.join('..', path, filename))
... | 5,356,830 |
def get_effective_router(appname):
"""Returns a private copy of the effective router for the specified application"""
if not routers or appname not in routers:
return None
return Storage(routers[appname]) | 5,356,831 |
def build(ctx, cclean=False, sys='ninja'):
"""
Build C++ code and install the artifacts.
"""
if not check_option(sys, ['makefile', 'ninja']):
return
sys_build = {
'makefile' : {
'Generate' : '-G"Unix Makefiles"',
'Install' : 'make install',
},
'ninja' : {
'Generate' : '-G"Ninja" -DCM... | 5,356,832 |
def is_activated(user_id):
"""Checks if a user has activated their account. Returns True or false"""
cur = getDb().cursor()
cur.execute('SELECT inactive FROM users where user_id=%s', (user_id,))
inactive = cur.fetchone()[0]
cur.close()
return False if inactive is 1 else True | 5,356,833 |
def _encode(dictionary):
"""Encodes any arbitrary dictionary into a pagination token.
Args:
dictionary: (dict) Dictionary to basee64-encode
Returns:
(string) encoded page token representing a page of items
"""
# Strip ugly base64 padding.
return base64.urlsafe_b64encode(json.du... | 5,356,834 |
def remove_duplicates_sharded(
files: List[Path],
outputs: List[Path],
hashes_dir: FilesOrDir,
field: str,
group_hashes: int = 1,
tmp_dir: Path = None,
min_len: int = 0,
):
"""Remove duplicates in several passes, when all hashes don't fit in RAM.
Note: The current implementation is ... | 5,356,835 |
def parse_checkfile(filename):
"""Load a module containing extra Check subclasses. This is a nuisance;
programmatic module loading is different in Py2 and Py3, and it's not
pleasant in either.
"""
global checkfile_counter
modname = '_cc_%d' % (checkfile_counter,)
checkfile_counter += 1
... | 5,356,836 |
def test_downsample_in_time_totally_flagged(hera_uvh5, future_shapes):
"""Test the downsample_in_time method with totally flagged integrations"""
uv_object = hera_uvh5
if future_shapes:
uv_object.use_future_array_shapes()
uv_object.phase_to_time(Time(uv_object.time_array[0], format="jd"))
... | 5,356,837 |
def stop_stream_encryption(StreamName=None, EncryptionType=None, KeyId=None):
"""
Disables server-side encryption for a specified stream.
Stopping encryption is an asynchronous operation. Upon receiving the request, Kinesis Data Streams returns immediately and sets the status of the stream to UPDATING . Aft... | 5,356,838 |
def selectPlate(plates, jdRange, normalise=False, scope='all'):
"""From a list of simulated plates, returns the optimal one."""
# Gets the JD range for the following night
nextNightJDrange = _getNextNightRange(jdRange)
# First we exclude plates without new exposures
plates = [plate for plate in pl... | 5,356,839 |
def scenario_3_2():
"""
Same day, same recipient list, different shift start (for example 6am and 6pm on same day)
Should send email in all cases
"""
shift_date_morning_str = datetime.datetime.strftime(date_utils.key_to_date('2022010200') , date_utils.HOUR_KEY_FMT)
# recepients same as third r... | 5,356,840 |
def test_link_image_layers_all_attributes(key, value):
"""Test linking common attributes across layers of similar types."""
l1 = layers.Image(np.random.rand(10, 10), contrast_limits=(0, 0.8))
l2 = layers.Image(np.random.rand(10, 10), contrast_limits=(0.1, 0.9))
link_layers([l1, l2])
# linking does (... | 5,356,841 |
def get_events(
raw: mne.io.BaseRaw,
event_picks: Union[str, list[str], list[tuple[str, str]]],
) -> tuple[np.ndarray, dict]:
"""Get events from given Raw instance and event id."""
if isinstance(event_picks, str):
event_picks = [event_picks]
events = None
for event_pick in event_picks:
... | 5,356,842 |
def transplant(root, u, v):
"""
注意, 这里要返回root, 不然修改不了
"""
if u.parent == None:
root = v
elif u.parent.left == u:
u.parent.left = v
else:
u.parent.right = v
if v:
v.parent = u.parent
return root | 5,356,843 |
def is_head_moderator():
"""
Returns true if invoking author is a Head Moderator (role).
"""
async def predicate(ctx: Context):
if not any(config.HEAD_MOD_ROLE in role.id for role in ctx.author.roles):
raise NotStaff("The command `{}` can only be used by a Head Moderator.".format(ctx... | 5,356,844 |
def compute_adj_matrices(type, normalize=True):
"""
Computes adjacency matrices 'n', 'd' or 's' used in GCRAM.
"""
# Get channel names
raw = mne.io.read_raw_edf('dataset/physionet.org/files/eegmmidb/1.0.0/S001/S001R01.edf', preload=True, verbose=False).to_data_frame()
ch_names = raw.columns[2:]... | 5,356,845 |
def build_census_chart(
*, alt, census_floor_df: pd.DataFrame, max_y_axis: Optional[int] = None, use_log_scale: bool = False
) -> Chart:
"""
This builds the "Admitted Patients" census chart, projecting total number of patients in the hospital over time.
Args:
alt: Reference to Altair package.
... | 5,356,846 |
def hasNonAsciiCharacters(sText):
"""
Returns True is specified string has non-ASCII characters, False if ASCII only.
"""
sTmp = unicode(sText, errors='ignore') if isinstance(sText, str) else sText;
return not all(ord(ch) < 128 for ch in sTmp); | 5,356,847 |
def draw_matches(image_1, image_1_keypoints, image_2, image_2_keypoints, matches):
""" Draws the matches between the image_1 and image_2.
(Credit: GT CP2017 course provided source)
Params:
image_1: The first image (can be color or grayscale).
image_1_keypoints: The image_1 keypoints.
... | 5,356,848 |
def copy_ttl_in():
"""
COPY_TTL_IN Action
"""
return _action("COPY_TTL_IN") | 5,356,849 |
def neuron_weight(
layer: str,
weight: torch.Tensor,
x: Optional[int] = None,
y: Optional[int] = None,
batch: Optional[int] = None,
) -> Objective:
"""Linearly weighted channel activation at one location as objective
:param layer: Name of the layer
:type layer: str
:param weight... | 5,356,850 |
def off():
"""Set all LEDs to 0/off"""
all(0)
show() | 5,356,851 |
def genargs() -> ArgumentParser:
"""
Generate an input string parser
:return: parser
"""
parser = ArgumentParser()
parser.add_argument("indir", help="Location of input shexj files")
parser.add_argument("outdir", help="Location of output shexc files")
parser.add_argument("-s", "--save", h... | 5,356,852 |
def backup_plugin():
"""
Backup scraping-prod mongodb
"""
backup('plugin') | 5,356,853 |
def _find_and_remove_value_info_by_name(graph, name):
"""Find and remove value info in a graph by name.
"""
for value_info in graph.value_info:
if value_info.name == name:
graph.value_info.remove(value_info)
break | 5,356,854 |
def get_model_name(factory_class):
"""Get model fixture name by factory."""
return (
inflection.underscore(factory_class._meta.model.__name__)
if not isinstance(factory_class._meta.model, str) else factory_class._meta.model) | 5,356,855 |
def validate_and_filter_args(cls_list, ns):
"""
For each CONDITIONAL item found,
if conditional holds true, then argument must be set
else, then argument shouldn't be changed (value should be None or default)
"""
d = ns.__dict__.copy()
n_errs = [0]
def set_error(s):
... | 5,356,856 |
def keywords(kwarg1=None, kwarg2=None):
""" Test function for live kwargs | str --> None
Copy paste following to test:
arg1 = foo, arg2 = bar
"""
print('Keywords: ' + kwarg2 + ' ' + kwarg1) | 5,356,857 |
def GetSpatialFeatures(img, size=(32, 32), isFeatureVector=True):
""" Extracts spatial features of the image.
param: img: Source image
param: size: Target image size
param: isFeatureVector: Indication if the result needs to be unrolled into a feature vector
returns: Spatial features
"""
res... | 5,356,858 |
def html_to_text(content):
"""Filter out HTML from the text."""
text = content['text']
try:
text = html.document_fromstring(text).text_content()
except etree.Error as e:
logging.error(
'Syntax error while processing {}: {}\n\n'
'Falling back to regexes'.format(te... | 5,356,859 |
def backup_local(config):
""" Creates a local backup of the local folder
"""
for folder in config.folders:
folder_path = getrealhome(folder['path'])
backup_path = folder_path + ".backup"
gui.debug("Backing up from " + folder_path + " to " + backup_path)
if not os.path.exists(... | 5,356,860 |
async def get_robot_positions() -> control.RobotPositionsResponse:
"""
Positions determined experimentally by issuing move commands. Change
pipette position offsets the mount to the left or right such that a user
can easily access the pipette mount screws with a screwdriver. Attach tip
position plac... | 5,356,861 |
def compute_basis(normal):
""" Compute an orthonormal basis for a vector. """
u = [0.0, 0.0, 0.0]
v = [0.0, 0.0, 0.0]
u[0] = -normal[1]
u[1] = normal[0]
u[2] = 0.0
if ((u[0] == 0.0) and (u[1] == 0.0)):
u[0] = 1.0
mag = vector_mag(u)
if (mag == 0.0):
return
for ... | 5,356,862 |
def do_LEE_correction(max_local_sig, u1, u2, exp_phi_1, exp_phi_2):
"""
Return the global p-value for an observed local significance
after correcting for the look-elsewhere effect
given expected Euler characteristic exp_phi_1 above level u1
and exp_phi_2 above level u2
"""
n1, n2 = get_coefficient... | 5,356,863 |
def _check_password(request, mail_pass, uid):
"""
[メソッド概要]
パスワードチェック
"""
error_msg = {}
if len(mail_pass) <= 0:
error_msg['mailPw'] = get_message('MOSJA10004', request.user.get_lang_mode())
logger.user_log('LOSI10012', request=request)
logger.logic_log('LOSM17015', re... | 5,356,864 |
def localize():
"""Return to using page-specific workers for copyvio checks.
This disables changes made by :func:`globalize`, including stoping the
global worker threads.
This function is not thread-safe and should only be called when no checks
are being done.
"""
global _is_globalized, _g... | 5,356,865 |
def _read_wb_indicator(indicator: str, start: int, end: int) -> pd.DataFrame:
"""Read an indicator from WB"""
return pd.read_feather(config.paths.data + rf"/{indicator}_{start}_{end}.feather") | 5,356,866 |
def pr_branches() -> list[str]:
"""List of branches that start with 'pr-'"""
out = subprocess.run(
[
"git",
"for-each-ref",
"--shell",
'--format="%(refname:strip=3)"',
"refs/remotes/origin/pr-*",
],
capture_output=True,
)
... | 5,356,867 |
def scatter_raster_plot(spike_amps, spike_depths, spike_times, n_amp_bins=10, cmap='BuPu',
subsample_factor=100, display=False):
"""
Prepare data for 2D raster plot of spikes with colour and size indicative of spike amplitude
:param spike_amps:
:param spike_depths:
:p... | 5,356,868 |
def is_pack_real(*args):
"""
is_pack_real(F) -> bool
'FF_PACKREAL'
@param F (C++: flags_t)
"""
return _ida_bytes.is_pack_real(*args) | 5,356,869 |
def _get_sim205(node: ast.UnaryOp) -> List[Tuple[int, int, str]]:
"""Get a list of all calls of the type "not (a <= b)"."""
errors: List[Tuple[int, int, str]] = []
if (
not isinstance(node.op, ast.Not)
or not isinstance(node.operand, ast.Compare)
or len(node.operand.ops) != 1
... | 5,356,870 |
def db_table_update_column(db_name,table_name,key,values,key_ref,values_ref):
"""Update key values in table where key_ref = values_ref."""
db_commands = []
for i in range(len(values)):
value = values[i]
value_ref = values_ref[i]
db_commands.append(('UPDATE "{}" SET {}={} WHERE {}={}'... | 5,356,871 |
def parse_boolean(val: str) -> Union[str, bool]:
"""Try to parse a string into boolean.
The string is returned as-is if it does not look like a boolean value.
"""
val = val.lower()
if val in ('y', 'yes', 't', 'true', 'on', '1'):
return True
if val in ('n', 'no', 'f', 'false', 'off', '0'... | 5,356,872 |
def load_users(usertable):
"""
`usertable` is the path to a CSV with the following fields:
user.*
account.organisation
SELECT user.*, account.organisation FROM user LEFT JOIN account ON user.account_id = account.id;
"""
users = []
with open(usertable) as f:
reader = cs... | 5,356,873 |
def get_progress_status_view(request):
"""Get progress status of a given task
Each submitted task is identified by an ID defined when the task is created
"""
if 'progress_id' not in request.params:
raise HTTPBadRequest("Missing argument")
return get_progress_status(request.params['progress_... | 5,356,874 |
def test_discrete_time_zeeman_switchoff_only():
"""
Check that switching off a field works even if no dt_update is
given (i.e. the field is just a pulse that is switched off after a
while).
"""
field_expr = df.Expression(("1", "2", "3"), degree=1)
H_ext = DiscreteTimeZeeman(field_expr, dt_up... | 5,356,875 |
async def test_unsupported_condition_icon_data(hass):
"""Test with unsupported condition icon data."""
await init_integration(hass, forecast=True, unsupported_icon=True)
state = hass.states.get("weather.home")
assert state.attributes.get(ATTR_FORECAST_CONDITION) is None | 5,356,876 |
def pretty_param_string(param_ids: "collection") -> str:
"""Creates a nice string showing the parameters in the given collection"""
return ' '.join(sorted(param_ids, key=utilize_params_util.order_param_id)) | 5,356,877 |
def _center_crop(image, size):
"""Crops to center of image with specified `size`."""
# Reference: https://github.com/mlperf/inference/blob/master/v0.5/classification_and_detection/python/dataset.py#L144 # pylint: disable=line-too-long
height = tf.shape(image)[0]
width = tf.shape(image)[1]
out_height = size... | 5,356,878 |
def resample(ts, values, num_samples):
"""Convert a list of times and a list of values to evenly spaced samples with linear interpolation"""
assert np.all(np.diff(ts) > 0)
ts = normalize(ts)
return np.interp(np.linspace(0.0, 1.0, num_samples), ts, values) | 5,356,879 |
def main():
""" Main Prog """
args = get_args()
# for i in range(args.num, 0, -1):
# print(verse(i))
print('\n\n'.join(map(verse, range(args.num, 0, -1))))
# # Alternative way
# for n in range(args.num, 0, -1):
# print(verse(n), end='\n' * (2 if n > 1 else 1)) | 5,356,880 |
def test_get_encoder_for(solver_z, D_hat, algorithm, loss,
uv_constraint, feasible_evaluation):
"""Test for valid values."""
with get_z_encoder_for(solver=solver_z,
X=X,
D_hat=D_hat,
n_atoms=N_ATOMS,
... | 5,356,881 |
def merge_align_moa(data_dir, cp_moa_link):
"""
This function aligns L1000 MOAs with the cell painting MOAs
and further fill null MOAs in one of the them (cell painting or L1000)
with another, so far they are of the same broad sample ID.
The function outputs aligned L1000 MOA metadata datafram... | 5,356,882 |
def supports_color(stream) -> bool: # type: ignore
"""Determine whether an output stream (e.g. stdout/stderr) supports displaying colored text.
A stream that is redirected to a file does not support color.
"""
return stream.isatty() and hasattr(stream, "isatty") | 5,356,883 |
def flush():
"""Flush changes made to clusto objects to the database."""
SESSION.flush() | 5,356,884 |
def parse_repo_links(
html: Union[str, bytes],
base_url: Optional[str] = None,
from_encoding: Optional[str] = None,
) -> Tuple[Dict[str, str], List[Link]]:
"""
.. versionadded:: 0.7.0
Parse an HTML page from a simple repository and return a ``(metadata,
links)`` pair.
The ``metadata`` ... | 5,356,885 |
def process_page_metadata(generator, metadata):
"""
Process page metadata and assign css
"""
global bnews_default_settings, bnews_settings
# Inject article listing
article_listing = bnews_settings['articles']
bnews_settings = copy.deepcopy(bnews_default_settings)
bnews_settings['artic... | 5,356,886 |
def calc_kappa4Franci(T_K, a_H, a_H2CO3s):
"""
Calculates kappa4 in the PWP equation using approach from Franci's code.
Parameters
----------
T_K : float
temperature Kelvin
a_H : float
activity of hydrogen (mol/L)
a_H2CO3s : float
activity of carbonic acid (mol/L)
... | 5,356,887 |
def get_candidates_from_single_line(single_line_address, out_spatial_reference, max_locations):
""" parses the single line address and passes it to the AGRC geocoding service
and then returns the results as an array of candidates
"""
try:
parsed_address = Address(single_line_address)
except... | 5,356,888 |
def create_store():
"""Gathers all the necessary info to create a new store"""
print("What is the name of the store?")
store_name = raw_input('> ')
return receipt.Store(store_name) | 5,356,889 |
def set_retention_policy(bucket_name, retention_period):
"""Defines a retention policy on a given bucket"""
# [START storage_set_retention_policy]
# bucket_name = "my-bucket"
# retention_period = 10
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
bucket.retent... | 5,356,890 |
def vgg11_bn_vib(cutting_layer, logger, num_client = 1, num_class = 10, initialize_different = False, adds_bottleneck = False, bottleneck_option = "C8S1"):
"""VGG 11-layer model (configuration "A") with batch normalization"""
return VGG_vib(make_layers(cutting_layer,cfg['A'], batch_norm=True, adds_bottleneck = ... | 5,356,891 |
def test_command_line_tool_activate_server(mock_main_runner, mock_requests):
"""Test activating a server"""
runner = mock_main_runner
context = mock_main_runner.get_context()
assert context.get_active_server().name == "testserver"
result = runner.invoke(
entrypoint.cli, "server activate tes... | 5,356,892 |
def pytest_sessionstart(session):
"""
pytest hook to configure plugin.
"""
config = session.config
# Get registered options
platform = config.getoption('--topology-platform')
plot_format = config.getoption('--topology-plot-format')
plot_dir = config.getoption('--topology-plot-dir')
n... | 5,356,893 |
def test_synchrotron_lum(particle_dists):
"""
test sync calculation
"""
from ..models import Synchrotron
ECPL,PL,BPL = particle_dists
lum_ref = [2.523130675e-04,
1.689956354e-02,
3.118110763e-04]
We_ref = [8.782070535e+09,
1.443896523e+10,
... | 5,356,894 |
def perform_init_checks(wrapper, input_list, input_mapper, in_output_list, output_list,
param_list, mapper_list, short_name, level_names):
"""Perform checks on objects created by running or slicing an indicator."""
if input_mapper is not None:
checks.assert_equal(input_mapper.sha... | 5,356,895 |
def get_conf_paths(project_metadata):
"""
Get conf paths using the default kedro patterns, and the CONF_ROOT
directory set in the projects settings.py
"""
configure_project(project_metadata.package_name)
session = KedroSession.create(project_metadata.package_name)
_activate_session(session, ... | 5,356,896 |
def _mat_ptrs(a):
"""Creates an array of pointers to matrices
Args:
a: A batch of matrices on GPU
Returns:
GPU array of pointers to matrices
"""
return cuda.to_gpu(numpy.arange(
a.ptr, a.ptr + a.shape[0] * a.strides[0], a.strides[0],
dtype=ctypes.c_void_p)) | 5,356,897 |
def file_upload_quota_broken(request):
"""
You can't change handlers after reading FILES; this view shouldn't work.
"""
response = file_upload_echo(request)
request.upload_handlers.insert(0, QuotaUploadHandler())
return response | 5,356,898 |
def print_error(e, print_traceback=False):
"""Logs error to stdout, so it's not only shown to the user through streamlit."""
print()
print("=" * 80)
print(f"ERROR for user {username}:", e)
if print_traceback:
print()
traceback.print_exc()
print("=" * 80)
print() | 5,356,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.