content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def select_organization(cursor):
"""organization情報取得(全取得)
Args:
cursor (mysql.connector.cursor): カーソル
Returns:
dict: select結果
"""
# select実行
cursor.execute('SELECT * FROM organization ORDER BY organization_id')
rows = cursor.fetchall()
return rows | 5,355,500 |
def _GetInstDisk(index, cb):
"""Build function for calling another function with an instance Disk.
@type index: int
@param index: Disk index
@type cb: callable
@param cb: Callback
"""
def fn(ctx, inst):
"""Call helper function with instance Disk.
@type ctx: L{InstanceQueryData}
@type inst: ... | 5,355,501 |
def Jphii_cal(L, W, q, xi_local):
"""タスク写像のヤコビ行列"""
return np.array([[1, 0, -sin(q[2, 0]) * xi_local[0, 0] - cos(q[2, 0]) * xi_local[1, 0]],
[0, 1, cos(q[2, 0]) * xi_local[0, 0] - sin(q[2, 0]) * xi_local[1, 0]]], dtype = np.float32)
#return np.array([[1, 0, -xi_local[1, 0]],
# ... | 5,355,502 |
def model_evalution(test_data):
""" function to test the loss and accuracy on validation data """
for X_test, y_test in val_data:
y_pred = model(X_test, training=False)
val_acc_metrics.update_state(y_test, y_pred)
accuracy = val_acc_metrics.result()
return float(accuracy) | 5,355,503 |
def main():
"""
Main function
:return: None
"""
# read the netlist
net = kicad_netlist_reader.netlist(sys.argv[1])
# extract boms from netlist
boms = extract_boms(net)
# write boms to csv files
for sup, comps in boms.items():
num_comps = len(comps)
if num_com... | 5,355,504 |
def make_python_script_from_list(list_optical_elements1,script_file=""):
"""
program to build automatically a python script to run shadow3
the system is read from a list of instances of Shadow.Source and Shadow.OE
:argument list of optical_elements A python list with intances of Shadow.Source and Shad... | 5,355,505 |
def flax_tag(arr):
"""Wraps a value in a flax module, to inspect intermediate values."""
return arr | 5,355,506 |
def toEpoch( dateTimeObject = None ):
"""
Get seconds since epoch
"""
if dateTimeObject == None:
dateTimeObject = dateTime()
return nativetime.mktime( dateTimeObject.timetuple() ) | 5,355,507 |
def sendNotification(token, title, message, extraData=None, channelID=None):
"""
send Notification to Devices
:param token:
:param title:
:param message:
:return:
"""
url = 'https://exp.host/--/api/v2/push/send'
headers = {
"Content-Type": "application/json"
}
data... | 5,355,508 |
def make_phsfct_kernel(size_px, dpx, g_fac):
"""
Make a kernel for phase function convolution
:param size_px:
:param dpx: [deg/px]
:param g_fac:
:return: ph_ker [deg]
"""
ke = np.mgrid[:size_px, :size_px]
half = (size_px - 1) / 2
ke[0] -= half
ke[1] -= half
dist = np.sqrt... | 5,355,509 |
def sample_pts_ellipsoid_surface(mu, Q, NB_pts, random=True):
"""
Uniformly samples points on the surface of an ellipsoid, specified as
(xi-mu)^T Q^{-1} (xi-mu) == 1
arguments: mu - mean [dim]
Q - Q [dim x dim]
NB_pts - nb of points
random - True... | 5,355,510 |
def load_mzml_path():
"""Return the path to the mzML toy file.
Parameters
----------
None
Returns
-------
path_data : str
The path to the mzML data.
Examples
--------
>>> from specio.datasets import load_mzml_path
>>> load_mzml_path() # doctest: +ELLIPSIS
'...s... | 5,355,511 |
def setStrictCheckingFlag( newValue=True ):
"""
See the strict checking flag.
"""
global strictCheckingFlag
strictCheckingFlag = newValue
dPrint( 'Verbose', debuggingThisModule, ' strictCheckingFlag =', strictCheckingFlag ) | 5,355,512 |
def _czce_df_read(url, skip_rows, encoding='utf-8', header=0):
"""
郑州商品交易所的网页数据
:param header:
:type header:
:param url: 网站 string
:param skip_rows: 去掉前几行 int
:param encoding: utf-8 or gbk or gb2312
:return: pd.DataFrame
"""
headers = {
"Accept": "text/html,application/xh... | 5,355,513 |
def select(weights):
"""
select a node with probability proportional to its "weight"
"""
r = random.random() * sum(weights)
s = 0.0
for k,w in enumerate(weights):
s += w
if r <= s:
return k
raise RuntimeError("select WTF from %s" % weights) | 5,355,514 |
def kexo(spacecraft_id, sensor_id, band_id):
"""Sun exo-atmospheric irridiance [W/m2/sr]
This is used for processing surface reflectance.
Spacecraft_id: Landsat7
Sensor_id: ETM+
band_id: band1, band2, band3, band4, band5, band7, band8
Spacecraft_id: Terra
Sensor_id: Aster
band_id: band1, band2, band3, ban... | 5,355,515 |
def gen_profile_id(profile_id):
"""
Generates the Elasticsearch document id for a profile
Args:
profile_id (str): The username of a Profile object
Returns:
str: The Elasticsearch document id for this object
"""
return "u_{}".format(profile_id) | 5,355,516 |
def test_netconf_get_config_subtree(nornir):
"""Test filter subtree of get_config."""
nr = nornir.filter(name="netconf1")
assert nr.inventory.hosts
result = nr.run(
netconf_get_config,
source="startup",
path="<keystore xmlns='urn:ietf:params:xml:ns:yang:ietf-keystore'><asymmetri... | 5,355,517 |
def check_config():
"""
Check required fields are present in config.
"""
sections = [{'name': 'assembly',
'keys': ['accession', 'prefix', 'alias', 'span'],
'defaults': {'accession': 'draft', 'alias': '==prefix'}},
{'name': 'busco',
'keys... | 5,355,518 |
async def get_self_info(credential: Credential):
"""
获取自己的信息
Args:
credential (Credential): Credential
"""
api = API["info"]["my_info"]
credential.raise_for_no_sessdata()
return await request("GET", api["url"], credential=credential) | 5,355,519 |
def workflow_workflows(ctx, sessions, _filter, output_format, access_token,
show_all, verbose):
"""List all workflows user has."""
logging.debug('command: {}'.format(ctx.command_path.replace(" ", ".")))
for p in ctx.params:
logging.debug('{param}: {value}'.format(param=p, valu... | 5,355,520 |
def app(request):
"""Testable flask application"""
_app.config.from_mapping(
TESTING=True,
SECRET_KEY=os.environ.get('SECRET_KEY'),
SQLALCHEMY_DATABASE_URI=os.getenv('TEST_DATABASE_URL'),
SQLALCHEMY_TRACK_MODIFICATIONS=False,
WTF_CSRF_ENABLED=False
)
ctx = _app.a... | 5,355,521 |
def _wct_test(name, srcs, split_index, split_count):
"""Macro to define single WCT suite
Defines a private macro for a portion of test files with split_index.
The actual split happens in test/tests.js file
Args:
name: name of generated sh_test
srcs: source files
split_index: in... | 5,355,522 |
def mpl_event_handler(event_type: MplEvent):
"""Marks the decorated method as given matplotlib event handler
.. note::
This decorator should be used only for methods of classes that
inherited from :class:`MplEventDispatcher` class.
This decorator can be used for reassignment event handlers... | 5,355,523 |
def get_date_pairs(in_dates, step):
"""
入场点出场点数据
:param in_dates: 所有入场日期
:param step: 步长
:return:
"""
DatePair = namedtuple('DatePair', ['in_date', 'out_date'])
date_pairs = []
for in_date in in_dates:
out_date = date_utility.date_cal(in_date, step)
date_pairs.append(... | 5,355,524 |
def new_jitters(jitter):
"""
update jitter vector every 100 frames by setting ~half of noise vector units to lower sensitivity
"""
jitters=np.zeros(128)
for j in range(128):
if random.uniform(0,1)<0.5:
jitters[j]=1
else:
jitters[j]=1-jitter
return... | 5,355,525 |
def test_owe_groups(dev, apdev):
"""Opportunistic Wireless Encryption - DH groups"""
if "OWE" not in dev[0].get_capability("key_mgmt"):
raise HwsimSkip("OWE not supported")
params = { "ssid": "owe",
"wpa": "2",
"wpa_key_mgmt": "OWE",
"rsn_pairwise": "CCMP... | 5,355,526 |
def reduce_jscs(file_line_mapping, **extra):
"""
Runs JSHCS on the project with the default configured rules. The output
is reduced to only contain entries from the Git change set.
:param file_line_mapping: Mapping of files with changed lines (obtained
`get_git_line_sets()`).
:param extra: ... | 5,355,527 |
def add_manuscript_urls_to_ci_params(ci_params):
"""
Return and edit in-place the ci_params dictionary to include 'manuscript_url'.
This function assumes Travis CI is used to deploy to GitHub Pages, while
AppVeyor is used for storing manuscript artifacts for pull request builds.
"""
if not ci_pa... | 5,355,528 |
def station_stats(df):
"""Displays statistics on the most popular stations and trip."""
print('\nCalculating The Most Popular Stations and Trip...\n')
start_time = time.time()
# display most commonly used start station
start_station = get_most_common_idx_and_val(df['Start Station'])
# display... | 5,355,529 |
def count_sites(vcfpath):
"""Extract number of sites in VCF from its tabix index."""
cmd = ["bcftools","index","--nrecords", vcfpath]
so, se, code = slurp_command(cmd)
return int(so) | 5,355,530 |
def test_check_compability(qtbot, setup_reports, monkeypatch):
"""Test state and message returned by check_compatibility."""
monkeypatch.setattr('spyder_reports.reportsplugin.PYQT4', True)
reports = setup_reports
valid, message = reports.check_compatibility()
assert not valid
assert 'qt4' in m... | 5,355,531 |
def model_contrast_score(overlays: torch.Tensor, masks: torch.Tensor, object_labels: torch.Tensor,
scene_labels: torch.Tensor, object_model: Callable, scene_model: Callable,
object_method: Callable, scene_method: Callable, device: str):
"""
Model contrast score:... | 5,355,532 |
def get_network_insights_access_scope_analysis(network_insights_access_scope_analysis_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkInsightsAccessScopeAnalysisResult:
"""
Resource schema for AWS::EC2::NetworkInsightsAc... | 5,355,533 |
def learnable_eval(
cfg: OmegaConf, classifier, encoder: ContrastiveModel, training_data_loader: DataLoader,
val_data_loader: DataLoader, top_k: int,
) -> tuple:
"""
:param cfg: Hydra's config instance.
:param classifier: Instance of classifier with learnable parameters.
:param encoder: ... | 5,355,534 |
def wavenumber(src, rec, depth, res, freq, wavenumber, ab=11, aniso=None,
epermH=None, epermV=None, mpermH=None, mpermV=None, verb=2):
"""Return the electromagnetic wavenumber-domain field.
Calculate the electromagnetic wavenumber-domain field due to infinitesimal
small electric or magnetic ... | 5,355,535 |
def gumbel_softmax(logits, temperature, dtype=tf.float32, seed=0):
"""Gumbel Softmax Layer."""
log_alpha = tf.nn.log_softmax(logits)
eps = 1e-7
gumbel = -tf.log(-tf.log(
tf.random_uniform(
tf.shape(logits), minval=0, maxval=1 - eps, dtype=dtype, seed=seed) +
eps))
prob = tf.nn.softmax((log_alpha + gumbe... | 5,355,536 |
def divide(num1, num2=1):
"""
除法
:param num1: int
:param num2: int
:return: float
"""
# 增加判断操作,抛出自定义异常
if num2 == 0:
raise InvalidOpreation()
val = num1 / num2
return val | 5,355,537 |
def convert2int(image):
""" Transfrom from float tensor ([-1.,1.]) to int image ([-1024,6500])
"""
return tf.image.convert_image_dtype((image + 1) * 2036 - 1000, tf.float32) | 5,355,538 |
def test_stationary_component():
"""
Integrated test which fits a single component to a synthetic association.
Runtime on my mac (single thread) is ~ 20 mins. Check logs/groupfitter.log
and temp_plots/*.png for progress.
Takes about 10 mins single thread with C implementation of overlap
or ~40... | 5,355,539 |
def main(args):
"""Main entry point"""
args.archive = expand_path(args.archive)
args.files = expand_path(args.files)
def additions(search_path):
"""Generate a list of (lpath, arcname) for writing to zip-file"""
aname = Path(args.archive).stem
for root, _, files in os.walk(sear... | 5,355,540 |
def execution_duration(fun):
"""
Calculates the duration the function 'fun' takes to execute.
execution_duration returns a wrapper function to which you pass your arguments.
Example: execution_duration(my_function)(my_first_param, my_second_param)
The result of the wrapper function will be a tuple... | 5,355,541 |
def return_origin_and_destination():
"""Return origin and destination from session's waypoints key."""
waypoints = session['waypoints']
if len(waypoints) <= 1:
return 'Please enter at least 2 destinations for your trip.'
else:
origin = session['waypoints'][0]
destination = sess... | 5,355,542 |
def attempt_move(piece):
"""
Attempts to make a move if the target coordinate is a legal move.
Returns:
True if the move is made, False otherwise
"""
x, y = pygame.mouse.get_pos()
x = x // 100
y = y // 100
if (piece is not None) and (x, y) in piece.legal_moves:
piece.move... | 5,355,543 |
def _plot_thresholds(thresholds, ax):
""" Plot horizontal lines with threshold levels
"""
if "WhoDaily" in thresholds:
ax.axhline(y=thresholds["WhoDaily"], color='red',
linestyle='--', label="WHO daily threshold", linewidth=1)
if "WhoYearly" in thresholds:
ax.ax... | 5,355,544 |
def give(user_id, text, group):
"""construct a message to be sent that mentions a user,
which is surprisingly complicated with GroupMe"""
nickname = group.members().filter(user_id=user_id).first.nickname
mention = attachments.Mentions([user_id], [[0, len(nickname)+1]]).as_dict()
message = '@{}... | 5,355,545 |
def integrate_audio_feat(features, audio_h5, mxm2msd):
"""
"""
# TODO: this part should be moved to MFCC feature extraction
# and stored in the feature file for better integrity
n_coeffs = 40
audio_feat_cols = (
['mean_mfcc{:d}'.format(i) for i in range(n_coeffs)] +
['var_m... | 5,355,546 |
def retrieve_submodule():
"""Fixture to get all the CONP datasets before a test."""
pytest.datasets = {x.path for x in git.Repo(".").submodules}
yield | 5,355,547 |
def make_annotation_loader_factory():
"""Generate a factory function for constructing annotation loaders.
Invoke the returned factory function by passing the name of the annotation
loader class you want to construct, followed by the parameters for the
constructor as named arguments
(e.g., factory('... | 5,355,548 |
def test_arma():
"""arma, check that rho is correct (appendix 10.A )and reproduce figure 10.2"""
a,b, rho = arma_estimate(marple_data, 20, 20, 40)
psd = arma2psd(A=a,B=b, rho=rho, NFFT=None)
psd = arma2psd(A=a,B=b, rho=rho)
try:
psd = arma2psd(A=None, B=None, rho=rho)
assert False
... | 5,355,549 |
def chunks_lists_to_tuples(level: Union[list, int, float]) -> Union[tuple, int, float]:
"""Convert a recursive list of lists of ints into a tuple of tuples of ints. This is
a helper function needed because MongoDB automatically converts tuples to lists, but
the dask constructor wants the chunks defined stri... | 5,355,550 |
def oid_pattern_specificity(pattern):
# type: (str) -> Tuple[int, Tuple[int, ...]]
"""Return a measure of the specificity of an OID pattern.
Suitable for use as a key function when sorting OID patterns.
"""
wildcard_key = -1 # Must be less than all digits, so that e.G. '1.*' is less specific than ... | 5,355,551 |
def test_pearl_ml10():
"""Test pearl_ml10.py"""
assert subprocess.run([
EXAMPLES_ROOT_DIR / 'torch/pearl_ml10.py', '--num_epochs', '1',
'--num_train_tasks', '1', '--num_test_tasks', '1',
'--encoder_hidden_size', '1', '--net_size', '2',
'--num_steps_per_epoch', '2', '--num_initial... | 5,355,552 |
def plot_metrics(history, path):
"""
# Notes
Plots the metrics of the history of a classifier.
# Arguments
- history: history created from calling clf.fit(X, Y) of a classifier.
- path: string representing the path where to save this figure.
# Source
https://www.tensorflo... | 5,355,553 |
def bucket_list(ctx, namespace, compartment_id, limit, page):
"""
Lists the `BucketSummary`s in a namespace. A `BucketSummary` contains only summary fields for the bucket
and not fields such as the user-defined metadata.
Example:
bmcs os bucket list -ns mynamespace --compartment-id ocid1.compar... | 5,355,554 |
def import_folder():
"""
This test will build a H2O frame from importing the bigdata/laptop/parser/orc/airlines_05p_orc_csv
from and build another H2O frame from the multi-file orc parser using multiple orc files that are
saved in the directory bigdata/laptop/parser/orc/airlines_05p_orc. It will compar... | 5,355,555 |
def pronunciation_assessment_continuous_from_file(question_num):
"""performs continuous speech recognition asynchronously with input from an audio file"""
import difflib
import json
# Creates an instance of a speech config with specified subscription key and service region.
# Replace with y... | 5,355,556 |
def extract_entities(text, json_={}):
"""
Extract entities from a given text using metamap and
generate a json, preserving infro regarding the sentence
of each entity that was found. For the time being, we preserve
both concepts and the entities related to them
Input:
- text: str,
... | 5,355,557 |
def gaul_as_df(gaul_path):
"""
Load the Gaussian list output by PyBDSF as a pd.DataFrame
Args:
gaul_path (`str`): Path to Gaussian list (.gaul file)
"""
gaul_df = pd.read_csv(
gaul_path, skiprows=6, names=GAUL_COLUMNS, delim_whitespace=True,
)
return gaul_df | 5,355,558 |
def dump():
"""Displays the state of memory/CPU"""
print(end='\n')
for row in range(10):
for col in range(10):
address = str(row * 10 + col).rjust(2)
numeric = '[' + str(readMem(int(address))).ljust(3) + ']'
print(address + numeric, end=" ")
print(end='\n'... | 5,355,559 |
def setup(bot):
"""Set up the Blood on the Clocktower extension."""
# set up persistent botc town square category settings
bot.botc_townsquare_settings = DiscordIDSettings(
bot, "botc_townsquare", BOTC_CATEGORY_DEFAULT_SETTINGS
)
# set up town square object
bot.botc_townsquare = BOTCTown... | 5,355,560 |
def psql(riemann_host, riemann_port, sqlquery):
"""monitor query from a postgresql database"""
logging.basicConfig(level=logging.INFO)
logger.info("version %s starting", util.get_version())
util.watch_report_loop(
lambda: bernhard.Client(riemann_host, riemann_port),
functools.partial(wa... | 5,355,561 |
def normalize_matrix(mat, dim=3, p=2):
"""Normalize matrix.
Args:
mat: matrix
dim: dimension
p: p value for norm
Returns: normalized matrix
"""
mat_divided = F.normalize(mat, p=p, dim=dim)
return mat_divided | 5,355,562 |
def _AllowObjectAccess(sid, handle, object_type: int,
access_permissions: int) -> None:
"""Allows access to an object by handle.
Args:
sid: A `PySID` representing the SID to grant access to.
handle: A handle to an object.
object_type: A `SE_OBJECT_TYPE` enum value.
access_per... | 5,355,563 |
def species_to_parameters(species_ids: List[str],
sbml_model: 'libsbml.Model') -> List[str]:
"""
Turn a SBML species into parameters and replace species references
inside the model instance.
:param species_ids:
List of SBML species ID to convert to parameters with the ... | 5,355,564 |
def test_build_artifacts_invokes_docker_commands(mocker):
"""
Validate that the docker-compose commands are executed with the valid paramters.
Since the docker-compose file was dynamically generated, we must pass the full
path of that file to docker-compose command. Also, set the context of the executio... | 5,355,565 |
def test_fact_name(strings, test_fact_empty_fx):
"""
Test Fact.name getter/setter
"""
test_fact = test_fact_empty_fx
test_fact.name = strings
assert test_fact_empty_fx.name == strings | 5,355,566 |
def gaussian_filter_cv(array: np.ndarray, sigma) -> np.ndarray:
"""
Apply a Gaussian filter to a raster that may contain NaNs, using OpenCV's implementation.
Arguments are for now hard-coded to be identical to scipy.
N.B: kernel_size is set automatically based on sigma
:param array: the input arra... | 5,355,567 |
def plot_eigval_zero_crossings(data_path:str, normalize:bool = False):
"""Plots scatterplot of eigenvalue magnitude with the number of
zero-crossings of the corresponding eigenmode, for each ReferenceModel.
Args:
- data_path: Diagnostic data path from which to draw
the metrics.
- normalize:... | 5,355,568 |
def save_controller(
controller: Controller,
filename: str
) -> None:
"""Saves the controller in json format into the specified files
Parameters
----------
controller : Controller
the controller to save into files
filename : str
the path to the json file
Returns
---... | 5,355,569 |
def refresh_cache(f):
"""Decorator to update the instance_info_cache
Requires context and instance as function args
"""
argspec = inspect.getargspec(f)
@functools.wraps(f)
def wrapper(self, context, *args, **kwargs):
res = f(self, context, *args, **kwargs)
try:
# ge... | 5,355,570 |
def get_timestamped_export_dir(export_dir_base):
"""Builds a path to a new subdirectory within the base directory.
Each export is written into a new subdirectory named using the
current time. This guarantees monotonically increasing version
numbers even across multiple runs of the pipeline.
The timest... | 5,355,571 |
def calculate_density(temp, pressure):
"""Returns density in g/cm^3
"""
if (temp < 161.40):
raise ValueError("Solid phase!")
if (temp < 289.7):
VaporP_bar = pow(10, 4.0519 - 667.16 / temp)
else:
VaporP_bar = sys.float_info.max
if (pressure < VaporP_bar):
raise ... | 5,355,572 |
def load_scorers(scorers):
"""Loads modules and instantiates scorers."""
for sid, sdef in scorers:
module = None
if os.path.isfile(sdef):
try:
logging.info('Loading additional feature definitions from file %s', sdef)
prefix = os.path.dirname(sdef)
... | 5,355,573 |
def cross_validation(df, K, hyperparameters):
"""
Perform cross validation on a dataset.
:param df: pandas.DataFrame
:param K: int
:param hyperparameters: dict
"""
train_indices = list(df.sample(frac=1).index)
k_folds = np.array_split(train_indices, K)
if K == 1:
K = 2
... | 5,355,574 |
def solve_cities(cities: List, gdps: List, sick: List, total_capacity: int,
value_r=0, weight_r=0, num_reads=1, verbose=False) -> Dict:
"""
Solves problem: "Which cities should I should I shut down in order to stay
within healthcare resources constraints while maximizing overall GDP"
pa... | 5,355,575 |
def catalog():
"""Render the mapping catalog page."""
if request.args.get(EQUIVALENT_TO):
mappings = current_app.manager.get_mappings_by_type(EQUIVALENT_TO)
message = Markup("<h4>You are now visualizing the catalog of equivalent mappings</h4>")
flash(message)
elif request.args.get(I... | 5,355,576 |
def detect_counterexample(algorithm, test_epsilon, default_kwargs={},
event_search_space=None, databases=None,
event_iterations=100000, detect_iterations=500000, cores=0,
loglevel=logging.INFO):
"""
:param algorithm: The algorithm to ... | 5,355,577 |
def produce_dataset_mce(mce, kafka_config):
"""
Produces a MetadataChangeEvent to Kafka
"""
conf = {'bootstrap.servers': kafka_config.bootstrap_server,
'on_delivery': delivery_report,
'schema.registry.url': kafka_config.schema_registry}
key_schema = avro.loads('{"type": "stri... | 5,355,578 |
def test_simple(async_fxc, fxc_args):
""" Testing basic async functionality
"""
async_fxc.loop.run_until_complete(simple_task(async_fxc, fxc_args['tutorial_endpoint'])) | 5,355,579 |
def test_enable_beacon_module():
"""
Test enabling beacons
"""
comm1 = "Enabled beacons on minion."
event_returns = [
{
"complete": True,
"tag": "/salt/minion/minion_beacon_enabled_complete",
"beacons": {
"enabled": True,
"w... | 5,355,580 |
def convert_annotation(ann):
"""Converts an AST object into its lib2to3 equivalent."""
raise NotImplementedError(f"unknown AST node type: {ann!r}") | 5,355,581 |
def reset_all(rewind_to=None):
"""Reset an instance of each of the registered projectors"""
for name in _projectors:
reset(name) | 5,355,582 |
def test_maps():
"""This test just tests nothing fails at the moment
"""
df = pd.DataFrame([["99P", 0.3], ["13T", 1.2]], columns=["pct", "val"])
plt = maps.ccg_map(df, title="foo", column="val")
with tempfile.NamedTemporaryFile() as f:
plt.savefig(f.name, format="png", dpi=300, bbox_inches="... | 5,355,583 |
def apply_inverse_rot_to_vec(rot, vec):
"""Multiply the inverse of a rotation matrix by a vector."""
# Inverse rotation is just transpose
return [rot[0][0] * vec[0] + rot[1][0] * vec[1] + rot[2][0] * vec[2],
rot[0][1] * vec[0] + rot[1][1] * vec[1] + rot[2][1] * vec[2],
rot[0][2]... | 5,355,584 |
def make_pkr_plot():
"""Does the work of making the real-space P(k) figure."""
zlist = [2.000,6.000]
# b1lst = [0.900,2.750]
# b2lst = [0.800,6.250]
# alpha = [1.500,0.145]
#
b1lst = [0.920,2.750]
b2lst = [-.125,5.788]
bnlst = [3.713,1.00]
alpha = [1.500,0.150]
#bnls... | 5,355,585 |
def gsl_blas_dsdot(*args, **kwargs):
"""gsl_blas_dsdot(gsl_vector_float const * X, gsl_vector_float const * Y) -> int"""
return _gslwrap.gsl_blas_dsdot(*args, **kwargs) | 5,355,586 |
def apero_create_pol_product(product, p, loc):
"""
Create the p.fits product:
Polarimetric products only processed in polarimetric mode, from the combination of 4 consecutive exposures.
HDU # Name Type Description
1 Primary Header
2 Pol Ima... | 5,355,587 |
def generate_tumor_growth_trajectories_base(initialCondition, parameterValues, number_realizations, random_seed=None, output_directory_name='./'):
"""
Generate many time courses of tumor growth and save data
"""
prng = np.random.RandomState(random_seed)
random_seed_array = prng.randint(0, 1000, num... | 5,355,588 |
async def vote_comment(session: AsyncSession, comment: Comment, user: User) -> Comment:
"""
Creates a vote on the given comment.
""" | 5,355,589 |
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
global ADDR_TYPES
# Required linux kernel version for this suite to run.
result = required_linux_kernel_version("4.16")
if result is not True:
pytest.skip("Kernel requirements are not met")
... | 5,355,590 |
def cost(weights):
"""Cost function which tends to zero when A |x> tends to |b>."""
p_global_ground = global_ground(weights)
p_ancilla_ground = ancilla_ground(weights)
p_cond = p_global_ground / p_ancilla_ground
return 1 - p_cond | 5,355,591 |
def test_check_urls(file):
"""
test check urls check function.
"""
urls = collect_links_from_file(file)
checker = UrlCheckResult()
assert str(checker) == "UrlCheckResult"
# Checker should have passed, failed, and all
for attribute in ["passed", "failed", "all"]:
assert hasattr(c... | 5,355,592 |
def runOptimization(
cfg,
optimize_cfg,
n_iter=20,
split_runs=1,
model_runs=1,
filename="optimize_result",
):
"""Optimize the model parameter using hyperopt.
The model parameters are optimized using
the evaluations on validation dataset.
Args:
cfg(dict): configura... | 5,355,593 |
def __set_metadata(file_path: str, tags: dict):
"""Save given metadata in the file.
This function uses pytaglib since pydub does not support some MP3 tags
(e.g. "comment").
:param file_path: the path to the file to set tags in
:param tags: the dictionary of tags to set
"""
song = taglib.Fi... | 5,355,594 |
def _capabilities_for_entity(config, entity):
"""Return an _EntityCapabilities appropriate for given entity.
raises _UnknownEntityDomainError if the given domain is unsupported.
"""
if entity.domain not in _CAPABILITIES_FOR_DOMAIN:
raise _UnknownEntityDomainError()
return _CAPABILITIES_FOR_... | 5,355,595 |
def check_missing_files(client):
"""Find missing files listed in datasets."""
missing = defaultdict(list)
for path, dataset in client.datasets.items():
for file in dataset.files:
filepath = (path.parent / file)
if not filepath.exists():
missing[str(
... | 5,355,596 |
def apply_torsion(nodes, suffix=""):
""" Torsion energy in nodes. """
if (
"phases%s" % suffix in nodes.data
and "periodicity%s" % suffix in nodes.data
):
return {
"u%s"
% suffix: esp.mm.torsion.periodic_torsion(
x=nodes.data["x"],
... | 5,355,597 |
def parse_summary_table(doc):
"""
Parse the etree doc for summarytable, returns::
[{'channel': unicode,
'impressions': int,
'clicks': int,
'ctr': decimal.Decimal,
'ecpm': decimal.Decimal,
'earnings': decimal.Decimal}]
"""
for t in doc.findall('... | 5,355,598 |
def tileset_info(hitile_path):
"""
Get the tileset info for a hitile file.
Parameters
----------
hitile_path: string
The path to the hitile file
Returns
-------
tileset_info: {'min_pos': [],
'max_pos': [],
'tile_size': 1024,
... | 5,355,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.