content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def _resolve_link(path):
"""Internal helper function. Takes a path and follows symlinks
until we either arrive at something that isn't a symlink, or
encounter a path we've seen before (meaning that there's a loop).
"""
paths_seen = []
while islink(path):
if path in paths_seen:
... | 3,200 |
def test_handle_diagonalization_errors(generate_workchain_ph):
"""Test `PhBaseWorkChain.handle_diagonalization_errors`."""
process = generate_workchain_ph(exit_code=PhCalculation.exit_codes.ERROR_COMPUTING_CHOLESKY)
process.setup()
process.validate_parameters()
process.prepare_process()
process... | 3,201 |
def create(platformDetails):
"""
This function creates a new platform in the platform list
based on the passed in platform data
:param platform: platform to create in platform structure
:return: 201 on success, 406 on platform exists
"""
# Remove id as it's created automatica... | 3,202 |
def add_command(name, func):
""" For controls that execute commands, the command must be added to the _COMMAND list so that
it can be added back to the widget during cloning """
logger.debug("Adding to commands: %s - %s", name, func)
_RECREATE_OBJECTS["commands"][str(name)] = func | 3,203 |
def _execute_cell(cell, shell, iopub, timeout=300):
"""
Execute an IPython Notebook Cell and return the cell output.
Parameters
----------
cell : IPython.nbformat.current.NotebookNode
The IPython Notebook cell to execute.
shell : IPython.kernel.blocking.channels.BlockingShellChannel
... | 3,204 |
def VD_A_DF(data, val_col: str = None, group_col: str = None, sort=True):
"""
:param data: pandas DataFrame object
An array, any object exposing the array interface or a pandas DataFrame.
Array must be two-dimensional. Second dimension may vary,
i.e. groups may have different lengths.
... | 3,205 |
def making_maintf():
"""creating and writing main.tf-file in examples"""
variables_list = get_namelist("variable \"", "\" {", "variables.tf")
text_addition(
"examples/main.tf",
"\nmodule \"" +
"{{ cookiecutter.example_module_name}}" +
"\" {\n"
)
bigest_len = 0
fo... | 3,206 |
def getR2(y, y_fitted, chi=None):
"""
calculates the coefficient of determination R^2 for `y_fitted` as prediction for `y` over a region marked by chi>0 defined by
R^2=1 - S_res/S_tot
with S_res=int(chi*(y-y_fitted*1)**2, S_tot=int(chi*(y-m(y)*1)**2), m(y)=int(chi*y)/... | 3,207 |
def handle_message(event_data):
"""
Here we'll build a 'message' event handler using the Slack Events Adapter.
"""
# Grab the message from the event payload
message = event_data["event"]
print('TESTING >>>> message = ', message)
# if the user says hello
if "hello" in message.get('text'... | 3,208 |
def basis(d, point_distribution='uniform', symbolic=True):
"""
Return all local basis function phi as functions of the
local point X in a 1D element with d+1 nodes.
If symbolic=True, return symbolic expressions, else
return Python functions of X.
point_distribution can be 'uniform' or 'Chebyshev... | 3,209 |
def provide_batch_fn():
""" The provide_batch function to use. """
return dataset_factory.provide_batch | 3,210 |
def test_tenant_id_validation():
"""The credential should raise ValueError when given an invalid tenant_id"""
valid_ids = {"c878a2ab-8ef4-413b-83a0-199afb84d7fb", "contoso.onmicrosoft.com", "organizations", "common"}
for tenant in valid_ids:
UsernamePasswordCredential("client-id", "username", "pass... | 3,211 |
def mergeSort(li):
"""Sorts a list by splitting it to smaller and smaller pieces (until they
only have one or less elements) and then merges it back using the function
``merge()``.
>>> mergeSort([1, 2, 3, 4, 5])
[1, 2, 3, 4, 5]
>>> mergeSort([5, 4, 3, 2, 1])
[1, 2, 3, 4, 5]
>>> mergeSor... | 3,212 |
def mcplayout(pos, amaf_map, disp=False):
""" Start a Monte Carlo playout from a given position,
return score for to-play player at the starting position;
amaf_map is board-sized scratchpad recording who played at a given
position first """
if disp: print('** SIMULATION **', file=sys.stderr)
st... | 3,213 |
def test_colour_ranges(fake_readme, monkeypatch):
"""
Whatever number we provide as coverage should produce the appropriate colour
"""
readme_file = "README"
def fake_readme_location(*args, **kwargs):
return os.path.join(TESTS_DIR, readme_file)
monkeypatch.setattr(__main__, "readme_lo... | 3,214 |
def roundedCorner(pc, p1, p2, r):
"""
Based on Stackoverflow C# rounded corner post
https://stackoverflow.com/questions/24771828/algorithm-for-creating-rounded-corners-in-a-polygon
"""
d1 = pc - p1
d2 = pc - p2
# Angle between vector 1 and vector 2 divided by 2
#angle = (atan2(d1.y, d1... | 3,215 |
async def test_connect_sync_success(v3_server):
"""Test triggering a synchronous handler upon connection to the websocket."""
async with v3_server:
async with aiohttp.ClientSession() as session:
simplisafe = await API.login_via_credentials(
TEST_EMAIL, TEST_PASSWORD, client_i... | 3,216 |
def TestSlipDualReverse(port,N):
""" Drive motor back and forth across marker source for N interations. Assumes marker is primed BEHIND the drive roller"""
for its in range(N):
print "backward iteration " + str(its)
for i in range(10):
IndexS(port,1,-133)
IndexS(port,2,400)
print "forward iteration " + s... | 3,217 |
def remove_partitions(
cube, store, conditions=None, ktk_cube_dataset_ids=None, metadata=None
):
"""
Remove given partition range from cube using a transaction.
Remove the partitions selected by ``conditions``. If no ``conditions`` are given,
remove all partitions. For each considered dataset, only... | 3,218 |
def update_t_new_docker_image_names(main, file):
""" Updates the names of the docker images from lasote to conanio
"""
docker_mappings = {
"lasote/conangcc49": "conanio/gcc49",
"lasote/conangcc5": "conanio/gcc5",
"lasote/conangcc6": "conanio/gcc6",
"lasote/conangcc7": "conan... | 3,219 |
def fit(kern, audio, file_name, max_par, fs):
"""Fit kernel to data """
# time vector for kernel
n = kern.size
xkern = np.linspace(0., (n - 1.) / fs, n).reshape(-1, 1)
# initialize parameters
if0 = gpitch.find_ideal_f0([file_name])[0]
init_f, init_v = gpitch.init_cparam(y=audio, fs=fs, max... | 3,220 |
def find_next_open_date(location_pid, date):
"""Finds the next day where this location is open."""
location = current_app_ils.location_record_cls.get_record_by_pid(
location_pid
)
_infinite_loop_guard = date + timedelta(days=365)
while date < _infinite_loop_guard:
if _is_open_on(loca... | 3,221 |
def check_dimension(units_in=None, units_out=None):
"""Check dimensions of inputs and ouputs of function.
Will check that all inputs and outputs have the same dimension
than the passed units/quantities. Dimensions for inputs and
outputs expects a tuple.
Parameters
----------
unit... | 3,222 |
def _rowcorr(a, b):
"""Correlations between corresponding matrix rows"""
cs = np.zeros((a.shape[0]))
for idx in range(a.shape[0]):
cs[idx] = np.corrcoef(a[idx], b[idx])[0, 1]
return cs | 3,223 |
def detected():
"""
唤醒成功
"""
print('唤醒成功')
play('./audio/open.wav')
global interrupted
interrupted = True
detector.terminate() | 3,224 |
def gff_to_dict(f_gff, feat_type, idattr, txattr, attributes, input_type):
"""
It reads only exonic features because not all GFF files contain gene and trascript features. From the exonic
features it extracts gene names, biotypes, start and end positions. If any of these attributes do not exit
then they... | 3,225 |
def _subtract_the_mean(point_cloud):
"""
Subtract the mean in point cloud and return its zero-mean version.
Args:
point_cloud (numpy.ndarray of size [N,3]): point cloud
Returns:
(numpy.ndarray of size [N,3]): point cloud with zero-mean
"""
point_cloud = point_cloud - np.mean(poin... | 3,226 |
def check_filtering(grating_1d, filtered_grating_1d, normalized_contrast):
"""plot
"""
plt.figure(figsize=(25, 5))
plt.plot(grating_1d)
plt.title('1d grating')
plt.figure(figsize=(25, 5))
plt.plot(filtered_grating_1d)
plt.title('Filtered fundamental')
print("Square-wave contrast: %... | 3,227 |
def ncnr_load(filelist=None, check_timestamps=True):
"""
Load a list of nexus files from the NCNR data server.
**Inputs**
filelist (fileinfo[]): List of files to open.
check_timestamps (bool): verify that timestamps on file match request
**Returns**
output (refldata[]): All entries of a... | 3,228 |
def load_source_dataframe(method, sourcename, source_dict,
download_FBA_if_missing, fbsconfigpath=None):
"""
Load the source dataframe. Data can be a FlowbyActivity or
FlowBySector parquet stored in flowsa, or a FlowBySector
formatted dataframe from another package.
:param ... | 3,229 |
def unpack_nwchem_basis_block(data):
"""Unserialize a NWChem basis data block and extract components
@param data: a JSON of basis set data, perhaps containing many types
@type data : str
@return: unpacked data
@rtype : dict
"""
unpacked = json.loads(data)
return unpacked | 3,230 |
def starify(name):
"""
Replace any ints in a dotted key with stars. Used when applying defaults and widgets to fields
"""
newname = []
for key in name.split('.'):
if is_int(key):
newname.append('*')
else:
newname.append(key)
name = '.'.join(newname)
re... | 3,231 |
def render(states, actions, instantaneous_reward_log, cumulative_reward_log, critic_distributions, target_critic_distributions, projected_target_distribution, bins, loss_log, episode_number, filename, save_directory, time_log, SPOTNet_sees_target_log):
"""
TOTAL_STATE = [relative_x, relative_y, relative_vx, rel... | 3,232 |
def load_boundary_conditions(bound_cond, zone_usage, data_class):
"""load use conditions according to DIN 18599 and SIA2024
loads Use conditions specified in the XML, according to DIN 18599,
SIA2024 in addition some AixLib specific use conditions for central AHU
are defined.
Parameters
-------... | 3,233 |
def hydrogens(atom: Atom) -> int:
"""Total number of hydrogen atoms (int).
"""
return atom.GetTotalNumHs() | 3,234 |
def update_url_catalog(meraki):
"""Update the URL catalog available to the helper."""
query_urls = {"mr_radio": "/devices/{serial}/wireless/radio/settings"}
update_urls = {"mr_radio": "/devices/{serial}/wireless/radio/settings"}
query_all_urls = {"mr_rf_profile": "/networks/{net_id}/wireless/rfProfiles"... | 3,235 |
def to_field(field_tuple):
"""Create a dataframe_field from a tuple"""
return dataframe_field(*field_tuple) | 3,236 |
def Execute(data):
"""Required Execute function"""
global cooldown_list
return_value = ''
sender_user_id = ""
sender_user_display = ""
if data.IsFromTwitch():
sender_user_id = data.UserName.lower()
sender_user_display = data.UserName
elif data.IsFromYoutube() or data.IsFromDi... | 3,237 |
def send_fixtures(
patch_client: HTTPClient,
request: Any,
) -> Generator[Tuple[HTTPClient, str], None, None]:
"""Methods that send data to an API: POST, PATCH, PUT"""
yield patch_client, request.param | 3,238 |
def compute_subjobs_for_build(build_id, job_config, project_type):
"""
Calculate subjobs for a build.
:type build_id: int
:type job_config: JobConfig
:param project_type: the project_type that the build is running in
:type project_type: project_type.project_type.ProjectType
:rtype: list[Subj... | 3,239 |
def pymodbus_mocked(mocker):
"""Patch pymodbus to deliver results."""
class ResponseContent:
"""Fake a response."""
registers = [0]
class WriteStatus:
"""Mock a successful response."""
@staticmethod
def isError():
# pylint: disable=invalid-name,missing... | 3,240 |
def user_can_view_assessments(user, **kwargs):
""" Return True iff given user is allowed to view the assessments """
return not appConfig.settings.LOGIN_REQUIRED or user.is_authenticated | 3,241 |
def get_hashes(root_hash: str) -> List[str]:
""" Return a list with the commits since `root_hash` """
cmd = f"git rev-list --ancestry-path {root_hash}..HEAD"
proc = run(cmd)
return proc.stdout.splitlines() | 3,242 |
def unzip_file(zip_src, dst_dir):
"""
解压zip文件
:param zip_src: zip文件的全路径
:param dst_dir: 要解压到的目的文件夹
:return:
"""
r = zipfile.is_zipfile(zip_src)
if r:
fz = zipfile.ZipFile(zip_src, "r")
for file in fz.namelist():
fz.extract(file, dst_dir)
else:
... | 3,243 |
def abort_multipart_upload(resource, bucket_name, object_name, upload_id):
"""Abort in-progress multipart upload"""
mpupload = resource.MultipartUpload(bucket_name, object_name, upload_id)
return mpupload.abort() | 3,244 |
def read_data(input_path):
"""Read pre-stored data
"""
train = pd.read_parquet(os.path.join(input_path, 'train.parquet'))
tournament = pd.read_parquet(os.path.join(input_path, 'tournament.parquet'))
return train, tournament | 3,245 |
def double(n):
"""
Takes a number n and doubles it
"""
return n * 2 | 3,246 |
def group(iterable):
"""
Creates a min/max grouping for the inputted list of numbers. This
will shrink a list into the group sets that are available.
:param iterable | <iterable> | (list, tuple, set, etc.)
:return <generator> [(<int> min, <int> max), ..]
"""
numbers = sor... | 3,247 |
def Normalize(tensor, mean, std, inplace=False):
"""Normalize a float tensor image with mean and standard deviation.
This transform does not support PIL Image.
.. note::
This transform acts out of place by default, i.e., it does not mutates the input tensor.
See :class:`~torchvision.tra... | 3,248 |
def main():
"""parse args and perform the automation"""
parser = common.cli_arg_parser()
args = parser.parse_args()
pause = getattr(args, common.PAUSE_ARG)
with common.maybe_pause_at_the_end(pause):
_update(args) | 3,249 |
def test_get_aggregated_tensor_weights(tensor_db):
"""Test that get_aggregated_tensor calculates correctly."""
collaborator_weight_dict = {'col1': 0.1, 'col2': 0.9}
tensor_key = TensorKey('tensor_name', 'agg', 0, False, ())
agg_nparray = tensor_db.get_aggregated_tensor(
tensor_key, collaborator_... | 3,250 |
def transcribe(transcriber):
"""
"""
directory = transcriber.transcribe_directory
output_directory = transcriber.transcribe_directory
log_directory = os.path.join(output_directory, 'log')
config = transcriber.transcribe_config
mdl_path = os.path.join(directory, 'final.mdl')
corpus = tran... | 3,251 |
def reproduce_candcollection(cc, data=None, wisdom=None, spec_std=None,
sig_ts=[], kalman_coeffs=[]):
""" Uses candcollection to make new candcollection with required info.
Will look for cluster label and filter only for peak snr, if available.
Location (e.g., integration, dm, d... | 3,252 |
def hilbert(signal, padding='nextpow'):
"""
Apply a Hilbert transform to a `neo.AnalogSignal` object in order to
obtain its (complex) analytic signal.
The time series of the instantaneous angle and amplitude can be obtained
as the angle (`np.angle` function) and absolute value (`np.abs` function)
... | 3,253 |
def _tagged_mosc_id(kubeconfig, version, arch, private) -> str:
"""determine what the most recently tagged machine-os-content is in given imagestream"""
base_name = rgp.default_imagestream_base_name(version)
base_namespace = rgp.default_imagestream_namespace_base_name()
name, namespace = rgp.payload_ima... | 3,254 |
def test_namechooser__DontReuseNames__chooseName__9(NameChooserFactory):
"""`chooseName()` omits a name alredy used in the container."""
nc = NameChooserFactory(2)
with mock.patch.object(nc, 'name_in_use', side_effect=[True, False]):
assert u'foo-4' == nc.chooseName('foo', object()) | 3,255 |
def delete_network_config(data):
"""
Delete the network configuration.
Parameters
----------
data : list
The list of network interfaces.
Returns
-------
No return value.
"""
delete_virtual_interfaces(data)
delete_directory_files(__netscripts, data, lambda x: __i... | 3,256 |
def test_workflow_migration(isolated_runner, old_workflow_project):
"""Check that *.cwl workflows can be migrated."""
result = isolated_runner.invoke(cli, ["migrate"])
assert 0 == result.exit_code
assert "OK" in result.output
result = isolated_runner.invoke(cli, ["log", old_workflow_project["log_p... | 3,257 |
def load_household_size_by_municipality():
"""Return dataframe, index 'Gemeente', column 'HHsize'."""
dfhh = pd.read_csv('data/huishoudens_samenstelling_gemeentes.csv', comment='#')
dfhh.sort_values('Gemeente', inplace=True)
dfhh.set_index('Gemeente', inplace=True)
# remove rows for nonexistent mu... | 3,258 |
def gcp_iam_service_account_delete_command(client: Client, args: Dict[str, Any]) -> CommandResults:
"""
Delete service account key.
Args:
client (Client): GCP API client.
args (dict): Command arguments from XSOAR.
Returns:
CommandResults: outputs, readable outputs and raw respon... | 3,259 |
def couple_to_string(couple: Union[Span, Tuple[int, int]]) -> str:
"""Return a deduplicated string representation of the given couple or span.
Examples:
>>> couple_to_string((12, 15))
"12-15"
>>> couple_to_string((12, 12))
"12"
>>> couple_to_string(Span(12, 15))
... | 3,260 |
def create_study(X, y,
storage=None, # type: Union[None, str, storages.BaseStorage]
sample_method=None,
metrics=None,
study_name=None, # type: Optional[str]
direction='maximize', # type: str
load_cache=False, # typ... | 3,261 |
def setup_logging(path='log.config', key=None):
"""Setup logging configuration"""
if os.path.exists(path):
with open(path, 'rt') as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(key... | 3,262 |
def qparams_init(net, conv_name="conv", bn_name="batchnorm"):
"""
Initialize quantized parameters for convolution op
:param net: mxnet.gluon.nn.Block
The net to initialize.
:param conv_name: str
:param bn_name: str
:return: mxnet.gluon.nn.Block
The net that has been initialized.
... | 3,263 |
def test_no_access_to_class_property(db):
"""Ensure the implementation doesn't access class properties or declared
attrs while inspecting the unmapped model.
"""
class class_property:
def __init__(self, f):
self.f = f
def __get__(self, instance, owner):
return s... | 3,264 |
def get_gb_version(backbone_top_cmake_path):
"""
Find the game backbone version number by searching the top level CMake file
"""
with open(backbone_top_cmake_path, 'r') as file:
cmake_text = file.read()
regex_result = re.search(gb_version_regex, cmake_text)
return regex_result.gr... | 3,265 |
async def get_processes(name: Optional[str] = None) -> List[Process]:
"""
Get all processes.
Args:
name (Optional[str], optional): Filter by process name. Defaults to None.
Returns:
List[Process]: A list of processes.
"""
if name:
return get_processes_by_name(name)
... | 3,266 |
def patch_typing_python351():
"""
Python 3.5.1 doesn't have typing.Type, refs:
https://github.com/crystax/android-vendor-python-3-5/issues/1
"""
# TODO: check Python version and only patch if == 3.5.1
if not hasattr(typing, 'Type'):
typing.Type = Type | 3,267 |
def create_file(root_folder, app_name, file, use_template=False):
"""Create a file in the specified target.
Args:
root_folder (str): project root folder.
app_name (str): project name.
file (str): file to be created.
use_template (bool, optional): whether or not to use the templa... | 3,268 |
def plot_day_of_activation(df, plotname):
"""
Plots Aggregate of Day of Activation.
"""
# todo sort order in logical day order
dotw = {0: 'Monday',
1: 'Tuesday',
2: 'Wednesday',
3: 'Thursday',
4: 'Friday',
5: 'Saturday',
6: 'S... | 3,269 |
def pytest_collection_finish(session):
"""Handle the pytest collection finish hook: configure pyannotate.
Explicitly delay importing `collect_types` until all tests have
been collected. This gives gevent a chance to monkey patch the
world before importing pyannotate.
"""
from pyannotate_runtime... | 3,270 |
def run_alf_extractors(session_path):
"""
Extract camera timestamps from the sync matrix
:param session_path: path to ap.bin file from
:return: no return command, alf files are created
"""
extractors.ephys_fpga._get_main_probe_sync(session_path) | 3,271 |
def get_rucio_redirect_url(lfn, scope):
"""
get_rucio_redirect_url: assemble Rucio redirect URL
@params: lfn ... one filename
e.g. user.gangarbt.62544955._2108356106.log.tgz
scope ... scope of the file with lfn
e.g. user.gangarbt, or... | 3,272 |
def test_store_not_normalized(mini_sentry, relay):
"""
Tests that relay does not normalize when processing is disabled
"""
relay = relay(mini_sentry, {"processing": {"enabled": False}})
project_id = 42
mini_sentry.add_basic_project_config(project_id)
relay.send_event(project_id, {"message": ... | 3,273 |
async def _getRequest(websession, url):
"""Send a GET request."""
async with websession.get(url, headers=HEADER) as response:
if response.status == 200:
data = await response.json(content_type=None)
else:
raise Exception('Bad response status code: {}'.format(response.stat... | 3,274 |
def interval_seconds():
"""returns the time interval in seconds
Returns:
int
"""
return int(interval_to_milliseconds(interval())/1000) | 3,275 |
def get_current_git_branch():
"""Get current git branch name.
Returns:
str: Branch name
"""
branch_name = "unknown"
try:
branch_name = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode('ascii').strip()
except subprocess.CalledProcessError:
... | 3,276 |
def get_autonomous_db_versions(compartment_id: Optional[str] = None,
db_workload: Optional[str] = None,
filters: Optional[Sequence[pulumi.InputType['GetAutonomousDbVersionsFilterArgs']]] = None,
opts: Optional[pulumi.InvokeOpti... | 3,277 |
def azimuthal_average(image, center=None, stddev=True, binsize=0.5, interpnan=False):
"""
Calculate the azimuthally averaged radial profile.
Modified based on https://github.com/keflavich/image_tools/blob/master/image_tools/radialprofile.py
Parameters:
imgae (numpy 2-D array): image array.
... | 3,278 |
def get_setindices(header, setnames):
"""From header like ---ID, coverage, set1_q-value set2_q-value---
this returns indices for different sets {'q-value': {'set1': 2, 'set2: 3}}
"""
setindices = OrderedDict()
for index, field in enumerate(header):
for setname in setnames:
if fie... | 3,279 |
def _base_and_stride(freqstr):
"""
Return base freq and stride info from string representation
Example
-------
_freq_and_stride('5Min') -> 'Min', 5
"""
groups = opattern.match(freqstr)
if groups.lastindex != 2:
raise ValueError("Could not evaluate %s" % freqstr)
stride = g... | 3,280 |
def generate_label(input_x,threshold):
"""
generate label with input
:param input_x: shape of [batch_size, sequence_length]
:return: y:[batch_size]
"""
batch_size,sequence_length=input_x.shape
y=np.zeros((batch_size,2))
for i in range(batch_size):
input_single=input_x[i]
... | 3,281 |
def store_wmarked_pdata(parameters, train_loader, valid_loader, \
netG, watermark, store_paths):
"""
Train: store the w'marked train data
"""
# init. Generator mode
netG.eval()
# data
is_cuda = parameters['system']['cuda']
blend_f = parameters['wmark']['blend... | 3,282 |
def remove(path):
"""Deletes a directory or file.
Args:
path: string, a path, filepath or dirpath.
Raises:
errors. NotFoundError if directory or file doesn't exist.
"""
if exists(path):
if isfile(path):
os.remove(path)
else:
shutil.rmtree(... | 3,283 |
def erase(input: Any, *args: Any, **kwargs: Any) -> Any:
"""TODO: add docstring"""
... | 3,284 |
def test_delete_a_sharedpublished_volume_whilst_the_nexus_node_is_inaccessible():
"""delete a shared/published volume whilst the nexus node is inaccessible.""" | 3,285 |
def test_sanitize(coresys):
"""Test event sanitation."""
event = {
"tags": [["url", "https://mydomain.com"]],
"request": {
"url": "https://mydomain.com",
"headers": [
["Host", "mydomain.com"],
["Referer", "https://mydomain.com/api/oppio_ing... | 3,286 |
def sf_imread(
img_path,
plot=True,
):
"""
Thin wrapper around `skimage.io.imread` that rotates the image if it is
to be used for plotting, but does not if it is to be used for measurements.
Parameters
----------
img_path : str
Path to image
plot : bool
Determines wh... | 3,287 |
def init_config():
"""Called at the end of package import to read initial configuration and setup cloud computing.
"""
from . import packages
config.update(CONFIG_DEFAULTS)
path = _get_config_path()
if os.path.exists(path):
try:
with open(path, 'r') as infile:
... | 3,288 |
def init():
"""
Turns 'jpg' to reality array
Initializes x,y,result_map to some values
"""
global reality, real_coordinates, bot_center
im = Image.open('map.jpg')
reality = array(im)
# TODO Starting Point Issue
real_coordinates.append([reality.shape[1] / 2, reality.shape[0] / 2])
... | 3,289 |
def test_plugin_ws_url_attributes(spf, path, query, expected_url):
"""Note, this doesn't _really_ test websocket functionality very well."""
app = spf._app
test_plugin = TestPlugin()
async def handler(request):
return text('OK')
test_plugin.websocket(path)(handler)
spf.register_plugin(... | 3,290 |
def less_than(x, y, force_cpu=None, cond=None, name=None):
"""
${comment}
Args:
x(Tensor): ${x_comment}.
y(Tensor): ${y_comment}.
force_cpu(${force_cpu_type}): ${force_cpu_comment}.
cond(Tensor, optional): Optional output which can be any created Tensor
that mee... | 3,291 |
def get_page_namespace(url_response):
"""
:type element: Tag
:rtype: int
"""
keyword = '"wgNamespaceNumber"'
text = url_response
if keyword in text:
beginning = text[text.find(keyword) + len(keyword):]
ending = beginning[:beginning.find(',')]
ints = re.findall('\d+', ending)
if len(ints) > 0:
return i... | 3,292 |
def batch_to_seq(h, nbatch, nsteps, flat=False):
"""
Assumes Time major data!!
x.shape = [nsteps, nbatch, *obs_shape]
h = x.reshape([-1, *x.shape[2:]]))
"""
if flat:
h = tf.reshape(h, [nsteps, nbatch])
else:
h = tf.reshape(h, [nsteps, nbatch, -1])
return [tf.squeeze(v, [0... | 3,293 |
def debug(*args,**kwargs):
"""A super easy way to visualize Klamp't items.
The argument list can be a list of Klamp't items, and can also include
strings or dicts. If a string precedes an item, then it will be labeled
by the string. If a dict follows an item, the dict will specify
attributes for... | 3,294 |
def cross_validation_visualization_due(params, mse_tr, mse_te, param2, tr2, te2, params_name='', prname2='', title='',
error_name=''):
"""visualization the curves of mse_tr and mse_te."""
plt.semilogx(params, mse_tr, marker=".", color='r', label='train error ' + params_nam... | 3,295 |
def get_site_camera_data(site_no):
"""An orchestration method that fetches camera data and returns the site dictionary"""
json_raw = get_json_camera_data()
camera = json_raw_to_dictionary(json_raw)
return find_site_in_cameras(site_no, camera) | 3,296 |
def get_args():
"""
Get arguments to the tool with argparse
:return: The arguments
"""
parser = argparse.ArgumentParser()
parser.add_argument("filename", action='store',
help='.xyz file(s) with optimised geometries from which to make .top and .gro files', nargs="+")
p... | 3,297 |
def test_del_invite_null_email(client):
"""Super admin deletes invite without specifying email."""
response = client.delete(
tests.DDSEndpoint.USER_DELETE,
headers=tests.UserAuth(tests.USER_CREDENTIALS["superadmin"]).token(client),
json={"email": None, "is_invite": True},
)
asser... | 3,298 |
def find_contam(df, contaminant_prevalence=0.5, use_mad_filter=False):
"""Flag taxa that occur in too many samples."""
taxa_counts = {}
for taxa in df['taxa_name']:
taxa_counts[taxa] = 1 + taxa_counts.get(taxa, 0)
thresh = max(2, contaminant_prevalence * len(set(df['sample_name'])))
contami... | 3,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.