content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def test_validation_happy(name, mode, parent, tmp_trestle_dir: pathlib.Path) -> None:
"""Test successful validation runs."""
(tmp_trestle_dir / test_utils.TARGET_DEFS_DIR / 'my_test_model').mkdir(exist_ok=True, parents=True)
(tmp_trestle_dir / test_utils.TARGET_DEFS_DIR / 'my_test_model2').mkdir(exist_ok=Tr... | 5,355,200 |
def edit(project: Any, params: Dict[str, str]) -> Dict[str, str]:
"""
Add a new method to a Python class in its given module.
TODO: See why an <EOF> char is added along with the new method
"""
eng = project.context().pathExpressionEngine()
res = eng.evaluate(project, "/Directory()/File()[@name=... | 5,355,201 |
def write_version_py(filename: str = 'python/esroofit/version.py') -> None:
"""Write package version to version.py.
This will ensure that the version in version.py is in sync with us.
:param filename: The version.py to write too.
:type filename: str
:return:
:rtype: None
"""
# Do not m... | 5,355,202 |
def get_country_code(country_name):
""" Return the Pygal 2-digit country code for the given country."""
for code, name in COUNTRIES.items():
if name == country_name:
return code
# If the country wasn't found, return None.
return None | 5,355,203 |
def get_divmod(up, down, minute=False, limit=2):
"""
获取商
:param up: 被除数
:param down: 除数
:param minute: 换算成分钟单位
:param limit: 保留小数的位数
:return: 商
"""
if up == 0:
return 0
if down == 0:
return 0
if minute:
return round(up/down/60.0, limit)
return roun... | 5,355,204 |
def find_kernel_base():
"""Find the kernel base."""
return idaapi.get_fileregion_ea(0) | 5,355,205 |
def main():
"""
实例化棋盘并将结果打印
"""
checkers = CheckerBoard()
print(f'checkers: {checkers}')
chess = ChessBoard()
print(f'chess: {chess}') | 5,355,206 |
def get_padding(x, padding_value=0, dtype=tf.float32):
"""Return float tensor representing the padding values in x.
Args:
x: int tensor with any shape
padding_value: int value that
dtype: type of the output
Returns:
float tensor with same shape as x containing values 0 or 1.
... | 5,355,207 |
def log_loss_and_acc(model_name: str, loss: torch.Tensor, acc: torch.Tensor, experiment_logger: LightningLoggerBase,
global_step: int):
"""
Logs the loss and accuracy in an histogram as well as scalar
:param model_name: name for logging
:param loss: loss tensor
:param acc: acc t... | 5,355,208 |
def canonical_symplectic_form_inverse (darboux_coordinates_shape:typing.Tuple[int,...], *, dtype:typing.Any) -> np.ndarray:
"""
Returns the inverse of canonical_symplectic_form(dtype=dtype). See documentation for that function for more.
In particular, the inverse of the canonical symplectic form is
... | 5,355,209 |
def update_with_error(a, b, path=None):
"""Merges `b` into `a` like dict.update; however, raises KeyError if values of a
key shared by `a` and `b` conflict.
Adapted from: https://stackoverflow.com/a/7205107
"""
if path is None:
path = []
for key in b:
if key in a:
i... | 5,355,210 |
def tokenize(sent):
"""Return the tokens of a sentence including punctuation.
>>> tokenize("Bob dropped the apple. Where is the apple?")
["Bob", "dropped", "the", "apple", ".", "Where", "is", "the", "apple", "?"]
"""
return [x.strip() for x in re.split(r"(\W+)?", sent) if x and x.strip()] | 5,355,211 |
def tree_unflatten(flat, tree, copy_from_tree=None):
"""Unflatten a list into a tree given the tree shape as second argument.
Args:
flat: a flat list of elements to be assembled into a tree.
tree: a tree with the structure we want to have in the new tree.
copy_from_tree: optional list of elements that ... | 5,355,212 |
def ByteOffsetToCodepointOffset( line_value, byte_offset ):
"""The API calls for byte offsets into the UTF-8 encoded version of the
buffer. However, ycmd internally uses unicode strings. This means that
when we need to walk 'characters' within the buffer, such as when checking
for semantic triggers and similar,... | 5,355,213 |
def get_authed_tweepy(access_token, token_secret):
"""Returns an authed instance of the twitter api wrapper tweepy for a given user."""
social_app_twitter = get_object_or_404(SocialApp, provider='twitter')
auth = tweepy.OAuthHandler(social_app_twitter.client_id, social_app_twitter.secret)
auth.set_acce... | 5,355,214 |
def suffix_for_status(status):
"""Return ``title`` suffix for given status"""
suffix = STATUS_SUFFIXES.get(status)
if not suffix:
return ''
return ' {}'.format(suffix) | 5,355,215 |
def test_codesystem_1(base_settings):
"""No. 1 tests collection for CodeSystem.
Test File: codesystem-contract-security-classification.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "codesystem-contract-security-classification.json"
)
inst = codesystem.CodeSystem.pars... | 5,355,216 |
def login():
"""
login an existing user
"""
try:
username = json.loads(request.data.decode())['username'].replace(" ", "")
password = json.loads(request.data.decode())['password'].replace(" ", "")
user = User(username, "", "")
user = user.exists()
if check_passwo... | 5,355,217 |
def soma_radius(morph):
"""Get the radius of a morphology's soma."""
return morph.soma.radius | 5,355,218 |
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
"""Return authorization server."""
return AuthorizationServer(
authorize_url=AUTHORIZATION_ENDPOINT,
token_url=TOKEN_ENDPOINT,
) | 5,355,219 |
def process_video_list(filename):
"""
submit multiple videos from a json file
"""
import django,json
sys.path.append(os.path.dirname(__file__))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dva.settings")
django.setup()
from dvaapp.views import handle_youtube_video
vlist = json.lo... | 5,355,220 |
def check_proposal_functions(
model: Model, state: Optional[flow.SamplingState] = None, observed: Optional[dict] = None,
) -> bool:
"""
Check for the non-default proposal generation functions
Parameters
----------
model : pymc4.Model
Model to sample posterior for
state : Optional[fl... | 5,355,221 |
def test_random_crop_01_py():
"""
Test RandomCrop op with py_transforms: size is a single integer, expected to pass
"""
logger.info("test_random_crop_01_py")
original_seed = config_get_set_seed(0)
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
# Generate dataset
... | 5,355,222 |
def smaller_n(n1, n2):
""" Compare two N_Numbers and returns smaller one. """
p1, s1 = n1
p2, s2 = n2
p1l = len(str(p1)) + s1
p2l = len(str(p2)) + s2
if p1l < p2l:
return n1
elif p1l > p2l:
return n2
p1 = p1.ljust(36, '9')
p2 = p2.ljust(36, '9')
if p1 <= p2:
... | 5,355,223 |
def deploy_gcs_audit_logs(config):
"""Deploys the GCS logs bucket to the remote audit logs project, if used."""
if FLAGS.enable_new_style_resources:
logging.info('GCS audit logs will be deployed through CFT.')
return
# The GCS logs bucket must be created before the data buckets.
if not config.audit_log... | 5,355,224 |
def sortDict(dictionary: dict):
"""Lambdas made some cringe and stupid thing some times, so this dirty thing was developed"""
sortedDictionary = {}
keys = list(dictionary.keys())
keys.sort()
for key in keys:
sortedDictionary[key] = dictionary[key]
return sortedDictionary | 5,355,225 |
def elina_linexpr0_alloc(lin_discr, size):
"""
Allocate a linear expressions with coefficients by default of type ElinaScalar and c_double.
If sparse representation, corresponding new dimensions are initialized with ELINA_DIM_MAX.
Parameters
----------
lin_discr : c_uint
Enum of typ... | 5,355,226 |
def FrameTag_get_tag():
"""FrameTag_get_tag() -> std::string"""
return _RMF.FrameTag_get_tag() | 5,355,227 |
def noise4(x: float, y: float, z: float, w: float) -> float:
"""
Generate 4D OpenSimplex noise from X,Y,Z,W coordinates.
"""
return _default.noise4(x, y, z, w) | 5,355,228 |
def damerau_levenshtein_distance(word1: str, word2: str) -> int:
"""Calculates the distance between two words."""
inf = len(word1) + len(word2)
table = [[inf for _ in range(len(word1) + 2)] for _ in range(len(word2) + 2)]
for i in range(1, len(word1) + 2):
table[1][i] = i - 1
for i in range... | 5,355,229 |
def closeSession(log_file, seen_tweets):
"""Write final files."""
with open(log, 'w') as outfile:
outfile.write(log_file)
with open(seentweets, 'w') as outfile:
outfile.write(seen_tweets) | 5,355,230 |
def ga_validator(value: Any) -> str | int:
"""Validate that value is parsable as GroupAddress or InternalGroupAddress."""
if isinstance(value, (str, int)):
try:
parse_device_group_address(value)
return value
except CouldNotParseAddress:
pass
raise vol.Inva... | 5,355,231 |
def getTime(sim):
"""
Get the network time
@param sim: the SIM serial handle
"""
sim.write(b'AT+CCLK?\n')
line = sim.readline()
res = None
while not line.endswith(b'OK\r\n'):
time.sleep(0.5)
matcher = re.match(br'^\+CCLK: "([^+]+)\+[0-9]+"\r\n', line)
if matcher:... | 5,355,232 |
def start_buffer_thread(buffer_thread_config):
""" 开启一个buffer队列线程,监视所有的buffer队列,
根据buffer队列对应的job队列拥塞情况, 将buffer队列的任务合适的推送到相应的job队列
"""
if not buffer_thread_config: return
global buffer_thread_instance
if buffer_thread_instance is not None:
buffer_thread_instance.stop()
buffer_... | 5,355,233 |
def defaultTargetLicense():
"""
Default license for targets, shared for all targets that do not specify
their own custom license, which is useful for saving storage space as this
license is globally referenced by and applies to the majority of targets.
"""
import makehuman
return makehuman.g... | 5,355,234 |
def generate_app(path, template=None, create=False):
""" Generates a CouchApp in app_dir
:attr verbose: boolean, default False
:return: boolean, dict. { 'ok': True } if ok,
{ 'ok': False, 'error': message }
if something was wrong.
"""
TEMPLATES = ['app']
prefix = ''
if template is ... | 5,355,235 |
def RunTestsOnNaCl(targets, build_args):
"""Run a test suite for the NaCl version."""
# Currently we can only run the limited test set which is defined as
# nacl_test_targets in nacl_extension.gyp.
if targets:
PrintErrorAndExit('Targets [%s] are not supported.' % ', '.join(targets))
nacl_gyp = os.path.joi... | 5,355,236 |
def get_repo_info(main_path):
""" Get the info of repo.
Args:
main_path: the file store location.
Return:
A json object.
"""
with open(main_path + '/repo_info.json') as read_file:
repo_info = json.load(read_file)
return repo_info | 5,355,237 |
def parse_settings(settings_file: str) -> dict:
"""
The function parses settings file into dict
Parameters
----------
settings_file : str
File with the model settings, must be in yaml.
Returns
-------
ydict : dict
Parsed settings used for modeling.
"... | 5,355,238 |
def get_mac_address(path):
"""
input: path to the file with the location of the mac address
output: A string containing a mac address
Possible exceptions:
FileNotFoundError - when the file is not found
PermissionError - in the absence of access rights to the file
TypeError - If t... | 5,355,239 |
def jwt_get_username_from_payload_handler(payload):
"""
Override this function if username is formatted differently in payload
"""
return payload.get('name') | 5,355,240 |
def grr_uname(line):
"""Returns certain system infornamtion.
Args:
line: A string representing arguments passed to the magic command.
Returns:
String representing some system information.
Raises:
NoClientSelectedError: Client is not selected to perform this operation.
"""
args = grr_uname.par... | 5,355,241 |
def stock_szse_summary(date: str = "20200619") -> pd.DataFrame:
"""
深证证券交易所-总貌-证券类别统计
http://www.szse.cn/market/overview/index.html
:param date: 最近结束交易日
:type date: str
:return: 证券类别统计
:rtype: pandas.DataFrame
"""
url = "http://www.szse.cn/api/report/ShowReport"
params = {
... | 5,355,242 |
def find_all_movies_shows(pms): # pragma: no cover
""" Helper of get all the shows on a server.
Args:
func (callable): Run this function in a threadpool.
Returns: List
"""
all_shows = []
for section in pms.library.sections():
if section.TYPE in ('movie', 'show'):... | 5,355,243 |
def dropout_gradient_descent(Y, weights, cache, alpha, keep_prob, L):
"""
Updates the weights of a neural network with Dropout regularization using
gradient descent
Y is a one-hot numpy.ndarray of shape (classes, m) that contains the
correct labels for the data
classes is the number of clas... | 5,355,244 |
def parse_vars(vars):
"""
Transform a list of NAME=value environment variables into a dict
"""
retval = {}
for var in vars:
key, value = var.split("=", 1)
retval[key] = value
return retval | 5,355,245 |
def find_dup_items(values: List) -> List:
"""Find duplicate items in a list
Arguments:
values {List} -- A list of items
Returns:
List -- A list of duplicated items
"""
dup = [t for t, c in collections.Counter(values).items() if c > 1]
return dup | 5,355,246 |
def circle_location_Pass(circle_, image_, margin=0.15):
"""
Function for check if the circle_ is overlapping
with the margin of the image_.
"""
cy, cx, rad, accum = circle_
image_sizeY_, image_sizeX_ = image_.shape[0], image_.shape[1]
margin_min_x = int(image_sizeX_ * margin)
margin_max_... | 5,355,247 |
def test_deck_size():
"""Tests the len of the deck"""
assert len(Deck()) == len(cards) | 5,355,248 |
def session_factory(
base_class=ftplib.FTP,
port=21,
use_passive_mode=None,
*,
encrypt_data_channel=True,
debug_level=None,
):
"""
Create and return a session factory according to the keyword
arguments.
base_class: Base class to use for the session class (e. g.
`ftplib.FTP_T... | 5,355,249 |
def calculate_molecular_mass(symbols):
"""
Calculate the mass of a molecule.
Parameters
----------
symbols : list
A list of elements.
Returns
-------
mass : float
The mass of the molecule
"""
mass = 0
for i in range(len(symbols)):
mass =... | 5,355,250 |
def _color_str(string, color):
"""Simple color formatter for logging formatter"""
# For bold add 1; after "["
start_seq = '\033[{:d}m'.format(COLOR_DICT[color])
return start_seq + string + '\033[0m' | 5,355,251 |
def scrape_data(urls: list):
"""Use Multithreading for scraping."""
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
executor.map(parse_lob_data, urls) | 5,355,252 |
def roi():
"""
calculate return of investment, displays values and relative title
values displayed in descending order with relative title
delete the ROI series after display
"""
df['ROI'] = (df["Gross Earnings"] / df["Budget"] * 100).round(2)
sort_highest_roi = df[['Title', 'ROI']].sort_val... | 5,355,253 |
def init_db(config, verbose=False):
"""
Initialize db if necessary: create the sole non-admin user
"""
client = pymongo.MongoClient(
host=config["sentinel"]["database"]["host"],
port=config["sentinel"]["database"]["port"],
username=config["sentinel"]["database"]["admin_username"]... | 5,355,254 |
def parser_tool_main(args):
"""Main function for the **parser** tool.
This method will parse a JSON formatted Facebook conversation,
reports informations and retrieve data from it, depending on the
arguments passed.
Parameters
----------
args : Namespace (dict-like)
Arguments passe... | 5,355,255 |
def merge_all_channel_images(all_patient_paths, output_dir, image_resize):
"""
Function used to merge all channel images into one
:param all_patient_paths: list of all paths, one for each patient
:param output_dir: output dir for new concatenated images
:return:
"""
# clean and make output d... | 5,355,256 |
def merge(from_args):
"""Merge a sequence of operations into a cross-product tree.
from_args: A dictionary mapping a unique string id to a
raco.algebra.Operation instance.
Returns: a single raco.algebra.Operation instance and an opaque
data structure suitable for passing to the rewrite_refs functi... | 5,355,257 |
def get_content_type(file_resource):
"""Gets a file's MIME type.
Favors returning the result of `file -b --mime ...` if the command is
available and users have enabled it. Otherwise, it returns a type based on the
file's extension.
Args:
file_resource (resource_reference.FileObjectResource): The file to... | 5,355,258 |
def graph(task_id):
"""Return the graph.json results"""
return get_file(task_id, "graph.json") | 5,355,259 |
def com_google_fonts_check_varfont_bold_wght_coord(ttFont, bold_wght_coord):
"""The variable font 'wght' (Weight) axis coordinate must be 700 on the 'Bold' instance."""
if bold_wght_coord == 700:
yield PASS, "Bold:wght is 700."
else:
yield FAIL,\
Message("not-700",
... | 5,355,260 |
def shoulders(agents, mask):
"""Positions of the center of mass, left- and right shoulders.
Args:
agents (ndarray):
Numpy array of datatype ``dtype=agent_type_three_circle``.
"""
for agent, m in zip(agents, mask):
if not m:
continue
tangent = rotate270(un... | 5,355,261 |
def parse_components_from_aminochange(aminochange):
""" Returns a dictionary containing (if possible) 'ref', 'pos', and 'alt'
characteristics of the supplied aminochange string.
If aminochange does not parse, returns None.
:param aminochange: (str) describing amino acid change
:return: dict or Non... | 5,355,262 |
def get_logs_csv():
"""
get target's logs through the API in JSON type
Returns:
an array with JSON events
"""
api_key_is_valid(app, flask_request)
target = get_value(flask_request, "target")
data = logs_to_report_json(target)
keys = data[0].keys()
filename = "report-" + now(... | 5,355,263 |
def _cache_key_format(lang_code, request_path, qs_hash=None):
"""
função que retorna o string que será a chave no cache.
formata o string usando os parâmetros da função:
- lang_code: código do idioma: [pt_BR|es|en]
- request_path: o path do request
- qs_hash: o hash gerado a partir dos parametro... | 5,355,264 |
def select_from(paths: Iterable[Path],
filter_func: Callable[[Any], bool] = default_filter,
transform: Callable[[Path], Any] = None,
order_func: Callable[[Any], Any] = None,
order_asc: bool = True,
fn_base: int = 10,
limit: ... | 5,355,265 |
def prd(o, all = False):
"""
Pretty dump.
@param (object) o
@param (bool) all
@return (None)
"""
name = o.__module__
for attrName in dir(o):
# show only attrs
attrValue = getattr(o, attrName)
if all == False and not hasattr(attrValue, '__call__'):
print("<%s>.%s =... | 5,355,266 |
def dbscan(data:torch.Tensor, epsilon:float, **kwargs) -> torch.Tensor:
"""
Generate mask using DBSCAN.
Note, data in the largest cluster have True values.
Parameters
----------
data: torch.Tensor
input data with shape (n_samples, n_features)
epsilon: float
DBSCAN epsilon
... | 5,355,267 |
def data_splitter(data_path: str = "../../data/", split_perc: tuple = (0.7, 0.2, 0.1)):
"""
Input:
data_path: string
, default "../../data/"
Path to data folder
train_perc : float, default 0.7
Percentage of the data to be included in the training set.
copy... | 5,355,268 |
def simulate_multivariate_ts(mu, alpha, beta, num_of_nodes=-1,\
Thorizon = 60, seed=None, output_rejected_data=False):
"""
Inputs:
mu: baseline intesnities M X 1 array
alpha: excitiation rates of multivariate kernel pf HP M X M array
beta: decay rates of kernel of multivariate HP
nod... | 5,355,269 |
def token_urlsafe(nbytes):
"""Return a random URL-safe text string, in Base64 encoding.
The string has *nbytes* random bytes. If *nbytes* is ``None``
or not supplied, a reasonable default is used.
>>> token_urlsafe(16) #doctest:+SKIP
'Drmhze6EPcv0fN_81Bj-nA'
"""
tok = token_bytes(nbytes... | 5,355,270 |
def sha206a_get_pk_useflag_count(pk_avail_count):
"""
calculates available Parent Key use counts
Args:
pk_avail_count counts available bit's as 1 (int)
Returns:
Status Code
"""
if not isinstance(pk_avail_count, AtcaReference):
status = Status.ATCA_BAD_PARAM
e... | 5,355,271 |
def test_board_group_update():
"""Test that we can create a board group of testing boards."""
board_group = BoardGroup(MockBoard, NoBoardMockBackend())
board_group.update_boards() | 5,355,272 |
def test_tile_valid_default():
"""Should return a 3 bands array and a full valid mask."""
tile_z = 21
tile_x = 438217
tile_y = 801835
data, mask = main.tile(ADDRESS, tile_x, tile_y, tile_z)
assert data.shape == (3, 256, 256)
assert mask.all() | 5,355,273 |
def rotate_points_around_origin(
x: tf.Tensor,
y: tf.Tensor,
angle: tf.Tensor,
) -> Tuple[tf.Tensor, tf.Tensor]:
"""Rotates points around the origin.
Args:
x: Tensor of shape [batch_size, ...].
y: Tensor of shape [batch_size, ...].
angle: Tensor of shape [batch_size, ...].
Returns:
R... | 5,355,274 |
def get_text_blocks(path):
"""
Used to extract text from images
"""
for df in get_ocr_data(path):
groups = df.groupby(by="block_num").groups
# print(groups.groups)
keys = sorted(list(groups.keys()))
text_blocks = []
for k in keys:
word_idxs = groups[k]... | 5,355,275 |
async def test_conformance_008_autocorrect(caplog):
"""
oadrDistributeEvent eventSignal interval durations for a given event MUST
add up to eiEvent eiActivePeriod duration.
"""
event_id = generate_id()
event = {'event_descriptor':
{'event_id': event_id,
'modifica... | 5,355,276 |
def form_of(state):
"""Return the form of the given state."""
if hasattr(state, "__form__"):
if callable(state.__form__) and not inspect.isclass(state.__form__):
return state.__form__()
else:
return state.__form__
else:
raise ValueError(f"{state} has no form"... | 5,355,277 |
def poly_to_geopandas(polys, columns):
"""
Converts a GeoViews Paths or Polygons type to a geopandas dataframe.
Parameters
----------
polys : gv.Path or gv.Polygons
GeoViews element
columns: list(str)
List of columns
Returns
-------
gdf : Geopandas dataframe
""... | 5,355,278 |
def test_set_style():
"""
Tests that setting the matplotlib style works.
"""
set_style() | 5,355,279 |
def create_parent_dirs(path: str):
"""
:param path: the file path to try to create the parent directories for
"""
parent = os.path.dirname(path)
create_dirs(parent) | 5,355,280 |
def test_rdb_aggregation_context():
"""
Check that the aggregation context of the rules is saved in rdb. Write data with not a full bucket,
then save it and restore, add more data to the bucket and check the rules results considered the previous data
that was in that bucket in their calculation. Check o... | 5,355,281 |
def get_sequences(query_file=None, query_ids=None):
"""Convenience function to get dictionary of query sequences from file or IDs.
Parameters:
query_file (str): Path to FASTA file containing query protein sequences.
query_ids (list): NCBI sequence accessions.
Raises:
ValueError: Did... | 5,355,282 |
def _get_tickets(manifest, container_dir):
"""Get tickets."""
principals = set(manifest.get('tickets', []))
if not principals:
return False
tkts_spool_dir = os.path.join(
container_dir, 'root', 'var', 'spool', 'tickets')
try:
tickets.request_tickets(
context.GLO... | 5,355,283 |
def make_plots_stratified_by_category_type(results,
category_type,
emotion_pairs=None,
cycle_types_to_plot = ['near_period',
... | 5,355,284 |
def test_fb_forward_multichannel(fb_class, fb_config, ndim):
""" Test encoder/decoder in multichannel setting"""
# Definition
enc = Encoder(fb_class(**fb_config))
dec = Decoder(fb_class(**fb_config))
# 3D Forward with several channels
tensor_shape = tuple([random.randint(2, 4) for _ in range(ndi... | 5,355,285 |
def random_show_date(database_connection: mysql.connector.connect) -> str:
"""Return a random show date from the ww_shows table"""
database_connection.reconnect()
cursor = database_connection.cursor(dictionary=True)
query = ("SELECT s.showdate FROM ww_shows s "
"WHERE s.showdate <= NOW() "... | 5,355,286 |
def get_output_tensor(interpreter, index):
"""Returns the output tensor at the given index."""
output_details = interpreter.get_output_details()[index]
tensor = np.squeeze(interpreter.get_tensor(output_details["index"]))
return tensor | 5,355,287 |
def make_d_mappings(n_dir, chain_opts):
"""Generate direction to solution interval mapping."""
# Get direction dependence for all terms.
dd_terms = [dd for _, dd in yield_from(chain_opts, "direction_dependent")]
# Generate a mapping between model directions gain directions.
d_map_arr = (np.arange(... | 5,355,288 |
def main():
"""Loop to test the postgres generation with REPL"""
envs = cast(Dict[str, str], os.environ)
if "HAYSTACK_DB" not in envs:
envs["HAYSTACK_DB"] = "sqlite3:///:memory:"
provider = get_provider("shaystack.providers.sql", envs)
conn = cast(SQLProvider, provider).get_connect()
sch... | 5,355,289 |
def _calculateVolumeByBoolean(vtkDataSet1,vtkDataSet2,iV):
"""
Function to calculate the volumes of a cell intersecting a mesh.
Uses a boolean polydata filter to calculate the intersection,
a general implementation but slow.
"""
# Triangulate polygon and calc normals
baseC = vtkTools.datas... | 5,355,290 |
def WebChecks(input_api, output_api):
"""Run checks on the web/ directory."""
if input_api.is_committing:
error_type = output_api.PresubmitError
else:
error_type = output_api.PresubmitPromptWarning
output = []
output += input_api.RunTests([input_api.Command(
name='web presubmit',
cmd=[
... | 5,355,291 |
def numpy2stl(A, fn, scale=0.1, mask_val=None, ascii=False,
max_width=235.,
max_depth=140.,
max_height=150.,
solid=False,
rotate=True,
min_thickness_percent=0.1,
force_python=False):
"""
Reads a numpy arra... | 5,355,292 |
def synonyms(species: str) -> str:
"""
Check to see if there are other names that we should be using for
a particular input. E.g. If CFC-11 or CFC11 was input, go on to use cfc-11,
as this is used in species_info.json
Args:
species (str): Input string that you're trying to match
Returns... | 5,355,293 |
def test_parcel_profile_reference_equals_first_height():
"""Test Parcel.profile when reference height equals first final height."""
height = [3000, 2000, 1000]*units.meter
z_init = 3000*units.meter
t_initial = -2*units.celsius
q_initial = 1e-4*units.dimensionless
l_initial = 0*units.dimensionles... | 5,355,294 |
def test_time():
""" Tests if te algorithm is cabla to finish the solution for N=12 in less than 10 minutes
"""
for N in range(8,20):
_ ,time = solveN(N)
if(time>600):
print("Test don't passed at N={N} should be less than 10 min Taken:{time}")
break
print(f"Ti... | 5,355,295 |
def _blkid_output(out):
"""
Parse blkid output.
"""
flt = lambda data: [el for el in data if el.strip()]
data = {}
for dev_meta in flt(out.split("\n\n")):
dev = {}
for items in flt(dev_meta.strip().split("\n")):
key, val = items.split("=", 1)
dev[key.lower... | 5,355,296 |
def test_generate_file_with_subclass(input_mock, getpass_mock):
"""Test inheritance."""
class CustomCommand(generate_settings.Command):
"""Custom test class to specify settings."""
settings_template_file = TEMPLATE_FILE_PATH
settings_file_path = CREATED_FILE_PATH
force_secret_k... | 5,355,297 |
def as_iso_datetime(qdatetime):
""" Convert a QDateTime object into an iso datetime string.
"""
return qdatetime.toString(Qt.ISODate) | 5,355,298 |
def spec_means_and_magnitudes(action_spec):
"""Get the center and magnitude of the ranges in action spec."""
action_means = tf.nest.map_structure(
lambda spec: (spec.maximum + spec.minimum) / 2.0, action_spec)
action_magnitudes = tf.nest.map_structure(
lambda spec: (spec.maximum - spec.minimum) / 2.0,... | 5,355,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.