content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def is_modified(filename: str) -> bool:
"""
Given a filename return if it has been modified
"""
global new_hashes
global old_hashes
if filename in old_hashes.keys():
if old_hashes[filename] == new_hashes[filename]:
return False
return True | 2,200 |
def _is_url_without_path_query_or_fragment(url_parts):
"""
Determines if a URL has a blank path, query string and fragment.
:param url_parts: A URL.
:type url_parts: :class:`urlparse.ParseResult`
"""
return url_parts.path.strip('/') in ['', 'search'] and url_parts.query == '' \
and ... | 2,201 |
def delay_waterfall(uvp, blpairs, spw, pol, component='abs-real',
average_blpairs=False, fold=False, delay=True,
deltasq=False, log=True, lst_in_hrs=True,
vmin=None, vmax=None, cmap='YlGnBu', axes=None,
figsize=(14, 6), force_plot=False,... | 2,202 |
def delete_kind_cluster(name):
"""Delete a kind cluster from config."""
config = get_config()
config.remove_section("kind.{}".format(name))
if config.get("kind", "current-cluster", fallback=None):
config.set("kind", "current-cluster", "")
write_config(config) | 2,203 |
def wgs84_distance(lat1, lon1, lat2, lon2):
"""Distance (in meters) between two points in WGS84 coord system."""
dLat = math.radians(lat2 - lat1)
dLon = math.radians(lon2 - lon1)
a = (math.sin(dLat / 2) * math.sin(dLat / 2) +
math.cos(math.radians(lat1)) * math.cos(math.radians(lat2)) *
... | 2,204 |
def init(vagrant=False):
"""Prepare a local machine for development."""
install_requirements()
local('createdb %(project_name)s' % env) # create postgres database
manage('migrate') | 2,205 |
def optimize_acq_func(acq_func: AcquisitionFunction, bounds=None, options=None):
"""Optimizes the acquisition function"""
# optimize
candidates, _ = optimize_acqf(
acq_function=acq_func,
bounds=bounds,
q=1,
num_restarts=20,
raw_samples=512,
options=options,
... | 2,206 |
def _recover_distributor(lb_id):
"""Get cached Distributor object or generate from ovs external_ids
{
'dist-lb-id': lb_id,
'dist-vip': vip,
'dist-size': size,
'dist-status': status,
'dist-mac': mac,
'dist-hash-fields': field-list,
'dist-ofport': ofport, #... | 2,207 |
def rprecision_score(
y_true, y_pred, ratio: float = 1.0, negative_class=-1,
zero_division: Literal["warn", 0, 1] = "warn"
):
"""Calculate r-precision score for multiclass classification.
The variables y_true and y_pred are the true and predicted labels
respectively. The variable ratio defines the... | 2,208 |
def export_gpkg(dataframes, gpkg_path):
"""Receives a dictionary of pandas dataframes and exports them as geopackage layers."""
# Create gpkg from template if it doesn't already exist.
if not os.path.exists(gpkg_path):
copy(os.path.abspath("../data/empty.gpkg"), gpkg_path)
# Export target data... | 2,209 |
def setup_dispatcher(dp):
"""
Adding handlers for events from Telegram
"""
# commands
dp.add_handler(CommandHandler("start", commands.command_start))
dp.add_handler(CommandHandler("help", commands.command_help))
# admin & mod commands
dp.add_handler(CommandHandler("admin", admin.ad... | 2,210 |
def get_version():
"""Gets the current version"""
_version_re = re.compile(r"__VERSION__\s+=\s+(.*)")
with open("leaked/__init__.py", "rb") as init_file:
version = str(ast.literal_eval(_version_re.search(
init_file.read().decode("utf-8")).group(1)))
return version | 2,211 |
def dir_keys(path):
"""A function to take a path, and return a list of all the numbers in the path. This is
mainly used for sorting
by the parameters they contain"""
regex = '[-+]?[0-9]+(?:\.[0-9]+)?(?:[eE][-+]?[0-9]+)?' # matching any floating point
m = re.findall(regex, path)
if(m): val ... | 2,212 |
def generate_data(n=5, T=1000, random_state=None, initial_data=None):
"""
Parameter
---------
n : int
number of variables
T : int
number of samples
random_state : int
seed for np.random.seed
initial_data : list of np.ndarray
dictionary of initial datas
"""... | 2,213 |
def _get_paragraphs(paragraphs: List[str]) -> List[str]:
"""
Returns the paragraphs of an article's body, annotated with HTML tags.
Args:
paragraphs (:obj:`List[str]`):
List of strings denoting paragraphs.
Returns:
:obj:`List[str]`:
List of paragraphs annotat... | 2,214 |
def calculate_kde(
ascending: bool = True,
evaluate: bool = False,
input_ts="-",
columns=None,
start_date=None,
end_date=None,
clean=False,
skiprows=None,
index_type="datetime",
source_units=None,
target_units=None,
names=None,
):
"""Return the kernel density estimati... | 2,215 |
def EPmulk(a, da, k):
"""
C = A * k
"""
return a * k, np.absolute(da * k) | 2,216 |
def addDeterminants(iterative_interactions, version, options=None):
"""
The iterative pKa scheme. Later it is all added in 'calculateTotalPKA'
"""
# --- setup ---
iteratives = []
done_residue = []
#debug.printIterativeDeterminants(iterative_interactions)
# creating iterat... | 2,217 |
def configuration(parent_package='', top_path=None):
"""
A utility function from numpy.distutils.misc_util to compile Fortran and C
codes. This function will be passed to numpy.distutil.core.setup().
"""
config = Configuration(None, parent_package, top_path)
# Define extern directory where ext... | 2,218 |
def main(args):
"""Main function for adding diffusion error to
a KDE of buoyant density values.
Parameters:
args : dict
See ``diffusion`` subcommand
"""
kde2d = Utils.load_kde(args['<fragment_kde>'])
# creating a diffusion index of guassian distributions
start,stop,ste... | 2,219 |
def load_plugins():
""" Helper function that attempts to load all the plugins """
# provide some info about the env in use
import platform
log.debug("Python %s %s on %s %s (%s)" % (platform.python_version(), platform.architecture()[0],
platform.uname()[0], ... | 2,220 |
def prepare_recent_years():
"""
Splits PUMS data dictionaries for recent years, and creates Values
dictionary JSON files and types json file.
"""
dictionaries = set([])
for year in recent_years:
if year > 2017:
dictionaries.add(year)
elif year > 2012:
dic... | 2,221 |
def show_plot(pyplt=plt, prompt=''):
"""
Close and display the current plot. Matplotlib wrapper.
This function allows a caller to finish and display a plot
without needing to import the matplotlib library separately.
:Parameters:
pyplt: matplotlib pyplot object, optional
A top... | 2,222 |
def decrypt_location(location):
"""Decrypts the `location` field in Xiami responses to URL."""
if not location:
return None
rows, url = int(location[:1]), location[1:]
urllen = len(url)
cols_base = urllen // rows # basic column count
rows_ex = urllen % rows # count of rows that ha... | 2,223 |
def select_regularization_parameter(n_samples: int = 50, n_evaluations: int = 500):
"""
Using sklearn's diabetes dataset use cross-validation to select the best fitting regularization parameter
values for Ridge and Lasso regressions
Parameters
----------
n_samples: int, default=50
Numbe... | 2,224 |
def upgrade(db_url: str = DEFAULT_DB, revision='head', cmd_opts=None):
"""Upgrade the given database to revision.
db_url: str [default: 'sqlite:////tmp/ngshare.db']
The SQLAlchemy database url, e.g. `sqlite:///ngshare.db`.
revision: str [default: head]
The alembic revision to upgrade to.
... | 2,225 |
def start(sleep: float = 0) -> None:
"""Run MocaVirtualDM in background."""
mzk.sleep(sleep)
mzk.call(
f'nohup {mzk.executable} "{core.TOP_DIR.joinpath("moca.py")}" run &> /dev/null &',
shell=True
) | 2,226 |
def test_should_parse_word2vec_with_single_entry(load_embedding_func, tmp_path):
"""Loading a Word2Vec Embedding should pass for single word"""
# GIVEN
word2vec_path = create_tmp_word_embedding(
tmp_path,
"""
1 2
word 1.0 2.0
""",
)
# WHEN
embeddi... | 2,227 |
def sqd_yinfast(samples):
""" compute approximate sum of squared difference
Using complex convolution (fast, cost o(n*log(n)) )"""
# yin_t(tau) = (r_t(0) + r_(t+tau)(0)) - 2r_t(tau)
B = len(samples)
W = B//2
yin = np.zeros(W)
sqdiff = np.zeros(W)
kernel = np.zeros(B)
# compute r_(t+... | 2,228 |
def get_colours_extend(graph_size, start_set, end_set, source, target, reachable=None):
"""
Get colours for nodes including source and target nodes.
Blue nodes are those in the source set.
Orange nodes are those in the start set, not in the source set.
Green nodes are those reachable from the sourc... | 2,229 |
def init_signals(sig_handler):
"""Set exit handler."""
signal.signal(signal.SIGTERM, sig_handler)
signal.signal(signal.SIGINT, sig_handler) | 2,230 |
def test_re_dg7_re_dg7_v(mode, save_output, output_format):
"""
TEST :branch : base='gMonth', pattern='[123456789]|(10|11|12)',
value='9', type='valid', RULE=''
"""
assert_bindings(
schema="msData/regex/reDG7.xsd",
instance="msData/regex/reDG7.xml",
class_name="Doc",
... | 2,231 |
def suspend_circuit():
"""
Suspends the circuits for some seconds, allowing the user to exit the house without playing the song.
"""
circuit.suspend()
return render_template("suspend.html", seconds=EXIT_HOUSE_TIMER, name=get_guest_name()) | 2,232 |
def get_scalar_data_from_path(udatapath, name='pressure', x0=0, x1=None, y0=0, y1=None, z0=0, z1=None,
t0=0, t1=None, inc=1, frame=None, return_xy=False, verbose=True,
slicez=None, crop=None, mode='r',
reverse_x=False, reverse_y=False, reverse_z=Fa... | 2,233 |
def create_transformed_df(old_df, elem_list, features_list):
"""elem_list should be in type list"""
from statistics import mean
new_dict = {}
for index, elems in zip(old_df.index, old_df[elem_list]):
for elem in elems:
if elem in new_dict.keys():
for j, feature in enu... | 2,234 |
def sem_id_semester_get(semester, obs_id):
"""
retrieves all the sem_id associated with an observer for the semester.
:param semester: semester id
:type semester: str
:param obs_id: observer id
:type obs_id: int
:rtype: List[str]
"""
semester_list = []
sem_ids = utils.get_prop... | 2,235 |
def getLabels (dataMatrix, classOfInterest):
"""
Gets labels on a per class basis that will inputted to the randomForest function
Parameters
----------
dataMatrix : anndata object
The data file of interest
classOfInterest : str
The class you will split the data by in the se... | 2,236 |
def publish(path: Path = Path(config.PACKAGE_CONFIG)) -> None:
"""Upload a pacakge to the package index"""
if not path.is_file():
typer.echo(f"{config.PACKAGE_CONFIG} not found")
raise typer.Abort()
contents = json.loads(path.read_text())
try:
contents = validate_database_json... | 2,237 |
def load_file(file_location):
"""
Opens a given file and returns its contents.
:param str file_location: The absolute path to the file
:rtype: str
:return: The contents of the file
"""
with open(file_location, 'r') as file_contents:
contents = file_contents.read()
return conte... | 2,238 |
def create_bam(data, args):
"""
aligner and conversion to BAM file
"""
workdir = safe_makedir("align")
sample = data['name']
# workdir = op.join("align", sample)
data['final_bam'] = _align(data['trimmed'], sample, op.abspath(workdir),
args.index, args.is_direct... | 2,239 |
def calculateStorageLocationsDistance(D_loc: pd.DataFrame, input_loccodex: float,
input_loccodey: float, output_loccodex: float,
output_loccodey: float) -> pd.DataFrame:
"""
calculate the sum of the rectangular distances from
Input ... | 2,240 |
def join(words, sep = ' '):
"""join(list [,sep]) -> string
Return a string composed of the words in list, with
intervening occurrences of sep. The default separator is a
single space.
(joinfields and join are synonymous)
"""
return sep.join(words) | 2,241 |
def lstsq_with_smoothness_prior(data:ArrayLike) -> np.ndarray:
""" not finished,
Parameters:
-----------
Returns:
--------
Reference:
----------
[1]. Sameni, Reza. "Online Filtering Using Piecewise Smoothness Priors: Application to Normal and Abnormal Electrocardiogram Denoising."... | 2,242 |
def pickle(obj):
""" Creates a serialization of the provided object
Serialization is done by :mod:`pickle` module. If :mod:`cPickle` package is
available, that package will be used instead, yielding a gain in speed.
Parameters
----------
obj: :obj:`obj`
Object to be serialized.
Re... | 2,243 |
def calc_E_E_AP_d_t(n_p):
"""1 時間当たりの家電の消費電力量
Args:
n_p(float): 仮想居住人数 仮想居住人数
Returns:
ndarray: 1 時間当たりの家電の消費電力量
"""
schedule = load_schedule()
schedule_app = get_schedule_app(schedule)
if 1 <= n_p and n_p <= 2:
E_E_AP_1_d_t = get_E_E_AP_p_d_t(1, schedule_app)
... | 2,244 |
def Squeeze_forward(op: Operation, values: List[torch.Tensor], ctx: TorchBackendContext = None, **kwargs) -> torch.Tensor:
"""
Remove single-dimensional entries from the shape of a tensor.
Takes an input axes with a list of axes to squeeze.
If axes is not provided, all the single dimensions will be re... | 2,245 |
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass | 2,246 |
async def test_validation_event(
loop, bus: lightbus.path.BusPath, dummy_api, mocker, worker: Worker
):
"""Check validation happens when firing an event"""
bus.client.register_api(dummy_api)
config = Config.load_dict({"apis": {"default": {"validate": True, "strict_validation": True}}})
bus.client.co... | 2,247 |
def format_test_output(test_name, test_res, H0_unit_root=True):
"""
Helper function to format output. Return a dictionary with specific keys. Will be used to
construct the summary data frame for all unit root tests.
TODO: Add functionality of choosing based on the max lag order specified by user.
... | 2,248 |
def build_dataset(instruction_dicts,
dataset_from_file_fn,
shuffle_files=False,
parallel_reads=64):
"""Constructs a `tf.data.Dataset` from TFRecord files.
Args:
instruction_dicts: `list` of {'filepath':, 'mask':, 'offset_mask':}
containing the informa... | 2,249 |
def downloader(url: str, local_path: str, tracker: ProgressTracker, chunk_size: int):
"""
Download the file pointed at by the URL to the local path.
:param url: The URL of the file to be downloaded.
:param local_path: The local name of the file to be downloaded
:param tracker: Tracks information ab... | 2,250 |
def _SetRunOptionInRequest(run_option, run_schedule, request, messages):
"""Returns request with the run option set."""
if run_option == 'manual':
arg_utils.SetFieldInMessage(
request,
'googleCloudDatacatalogV1alpha3Crawler.config.adHocRun',
messages.GoogleCloudDatacatalogV1alpha3AdhocRu... | 2,251 |
def test_rank_closest():
"""test if phoneme-inventory is ranked correctly
according to feature vectore distance to a given phoneme"""
# set up custom class, create instance of it
class EtymMonkeyrank_closest:
def __init__(self):
self.phoneme_inventory, self.dm_called_with = None, []... | 2,252 |
def create_virtual_machine(module, azure):
"""
Create new virtual machine
module : AnsibleModule object
azure: authenticated azure ServiceManagementService object
Returns:
True if a new virtual machine was created, false otherwise
"""
name = module.params.get('name')
hostname =... | 2,253 |
def calcCumulOverlap(modes1, modes2, array=False):
"""Returns cumulative overlap of modes in *modes2* with those in *modes1*.
Returns a number of *modes1* contains a single :class:`.Mode` or a
:class:`.Vector` instance. If *modes1* contains multiple modes, returns an
array. Elements of the array corresp... | 2,254 |
def apply_ntimes(func, n, args, verbose=True, timeout=None):
"""
Applies `n` times the function `func` on `args` (useful if, eg, `func` is partly random).
Parameters
----------
func : function
func must be pickable, see https://docs.python.org/2/library/pickle.html#what-can-be-pickled-and-un... | 2,255 |
def travel_time_without_Rebalancing(tnet, i, j, exo=0):
"""
evalute the travel time function for edge i->j
Parameters
----------
tnet: transportation network object
i: starting node of edge
j: ending node of edge
Returns
-------
float
"""
return sum(
[tnet.fcoe... | 2,256 |
def CleanDatanode(vm):
"""Delete Hadoop data from 'vm'."""
vm.RemoteCommand('rm -rf {0}'.format(
posixpath.join(vm.GetScratchDir(), 'hadoop'))) | 2,257 |
def crawl_mean_temp_for_dates():
"""Get mean temperature for dates."""
# TODO | 2,258 |
def twistless(*args):
"""
Wraps the entry point function, this function should setup and run a
twisted reactor.
A twisted task will be created to constantly schedule other stackless
tasklets as often as the timesched argument.
"""
def _twistless(func):
"""
Wrap the given fun... | 2,259 |
def enhance_with_function(images, labels, ratio, enhance_func):
"""
:param images:
:param labels:
:param ratio: the ratio of max input class. for example, highest sample count is 1000, ratio is 3, the result
will be around 1000 * 3 * how_many_classes
:param enhance_func the func used for enhance... | 2,260 |
async def port_create(
request: Request,
server_id: int,
port: PortCreate,
db=Depends(get_db),
user=Depends(get_current_active_admin),
):
"""
Create a new port on server
"""
db_port = create_port(db, server_id, port)
trigger_tc(db_port)
return db_port | 2,261 |
def main():
"""First function to be called"""
# Clear the screen using module function.
clear_screen_module.clear_screen()
print("This script prints absolute paths of all files in current directory.\n")
current_dir = os.getcwd()
print(f"Current directory: {current_dir}\n")
print("Files in cu... | 2,262 |
def test_CursorDB_str(data) -> None:
"""Testing CursorDB ``__str__`` datamethod."""
db = CursorDB(data)
assert (
db.__str__()
== "CursorDB(aa=[], bb=['cc'], cc=['bb'], ddddd=['ffffff'], ffffff=['ddddd'])"
) | 2,263 |
def rectangle_area(base, height):
"""Returns the area of a rectangle"""
base = float(base)
height = float(height)
if (base < 0.0 or height < 0.0):
raise ValueError('Negative numbers are not allowed')
return base * height | 2,264 |
def create_update_stack_set(stack_set_name, stack_set_accounts):
"""
Creates stack set with the specified accounts
:param stack_set_name: Name of CloudFormation StackSet to create/update
:param stack_set_accounts: Accounts were stackset instances should be created
:return:
"""
try:
... | 2,265 |
def a_star(G: PCFG):
"""
A generator that enumerates all programs using A*.
Assumes that the PCFG only generates programs of bounded depth.
"""
frontier = []
initial_non_terminals = deque()
initial_non_terminals.append(G.start)
heappush(
frontier,
(
-G.max_pr... | 2,266 |
def pipe(*args, **kwargs):
"""A processor that replaces the text of a field of an item.
Args:
item (dict): The entry to process
kwargs (dict): The keyword arguments passed to the wrapper
Kwargs:
conf (dict): The pipe configuration. Must contain the key 'rule'.
rule (di... | 2,267 |
def move_piece(x, y, new_x, new_y, board, board_turtles, SYMBOL_DICT, BOARD_DIMENSION):
"""
This function only moves pieces and doesn't apply valid logic whether they should be there
This function will only replace what is in the tile
"""
print("Moving ", x, y, "to", new_x, new_y)
# replace pi... | 2,268 |
def adjoint(g):
"""Return the adjoint of a rigid body transformation g."""
adg = np.zeros((6, 6))
R_part, p = g[:3, :3], g[:3, 3]
pR = skew(p) @ R_part
adg[:3, :3] = R_part
adg[-3:, -3:] = R_part
adg[:3, -3:] = pR
return adg | 2,269 |
def dmp_rr_yun0_sqf_list(f, u, K):
"""Compute square-free decomposition of ``f`` in zero-characteristic ring ``K``.
References
==========
* :cite:`LeeM2013factor`, page 8
"""
if dmp_ground_p(f, None, u):
return []
result, count = [], 1
qs = [dmp_diff_in(f, 1, i, u, K) for i i... | 2,270 |
def parse_group(rule):
"""
Parse the group line
"""
parser = argparse.ArgumentParser()
rules = shlex.split(rule)
rules.pop(0)
parser.add_argument("--name", dest="name", action="store")
parser.add_argument("--gid", dest="gid", action="store")
args = clean_args(vars(parser.parse_args(... | 2,271 |
def register_module():
"""Registers this module for use."""
def on_module_disable():
tags.Registry.remove_tag_binding(TextFileUploadTag.binding_name)
tags.EditorBlacklists.unregister(
TextFileUploadTag.binding_name,
tags.EditorBlacklists.COURSE_SCOPE)
tags.Editor... | 2,272 |
def angle2trig(theta):
"""Convert angle to a reportlab ready tuple.
Arguments:
- theta - Angle in degrees, counter clockwise from horizontal
Returns a representation of the passed angle in a format suitable
for ReportLab rotations (i.e. cos(theta), sin(theta), -sin(theta),
cos(theta) tuple)
... | 2,273 |
def get_state_z0_pure_state_vector() -> np.ndarray:
"""Returns the pure state vector for :math:`|0\\rangle`.
Returns
-------
np.ndarray
the pure state vector.
"""
vec = np.array([1, 0], dtype=np.complex128)
return vec | 2,274 |
def HSV_to_CMYKratio(hsv):
"""Converts HSV color space to CMYK (ratio representation)"""
rgb = HSV_to_RGB(hsv)
return RGB_to_CMYKratio(rgb) | 2,275 |
def delete_single_culture(user_id, culture_id):
"""Delete a culture."""
try:
culture = Culture.query.filter_by(user_id=user_id).filter_by(culture_id=culture_id).first()
if not culture:
response_object = {
'status': 'fail',
'message': f'{culture_id} doe... | 2,276 |
def create_atomic_chunk(im, chunk_coord, aff_dtype=np.float32, verbose=True):
""" Creates single atomic chunk
:param im: IngestionManager
:param chunk_coord: np.ndarray
array of three ints
:param aff_dtype: np.dtype
np.float64 or np.float32
:param verbose: bool
:return:
"""
... | 2,277 |
def add_adult(request):
"""
Add a new adult record
:param request:
:return:
"""
args = dict()
app = AppUtil.get_by_user(user=request.user)
if request.method == 'POST':
form = AddAdultForm(request.POST)
if form.is_valid():
adult = form.save(commit=False)
... | 2,278 |
def maybe_download_and_extract(url, dst_dir):
"""Download and extract model tar file.
If the pretrained model we're using doesn't already exist, this function
downloads it from the TensorFlow.org website and unpacks it into a directory.
Args:
url: Web location of the tar file containing the pret... | 2,279 |
def test_grad_hermite_multidimensional_vs_finite_differences(tol, renorm):
"""Tests the gradients of hermite polynomials. The gradients of parameters are tested by finite differences."""
d = 4
R = np.random.rand(d, d) + 1j * np.random.rand(d, d)
R += R.T
y = np.random.rand(d) + 1j * np.random.rand(d... | 2,280 |
def replace(target_obj):
"""A decorator to replace the specified obj.
`target_obj` can be a class or a function.
Example:
```python
class A:
def f(self):
print('class A')
@replace(A)
class B:
def f(self):
print('class B')
```
Args:
... | 2,281 |
def get_layers(model, filter_regexp):
"""
Filters out the layers according to a regexp. Note that
we omit biases.
Args:
- model: a nn.Module
- filter_regexp: a regexp to filter the layers to keep
according to their name in model.named_parameters().
For insta... | 2,282 |
def decrypt(data: bytes,
password: Union[str, bytes]) -> bytes:
"""
decrypt data
:param data: encrypted data
:param password: password
:return: plain data
"""
__data = gzip_decompress(data[4:]) if data.startswith(b'moca') else data
iv, cipher = __data[:AES.block_size], __data... | 2,283 |
def stream_from_url(*args, **kwargs):
"""
Save the resource as a file on disk iteratively by first asking
for the 'content-length' header entry and downloading in chunks.
By default we will retry if an HTTP error arises.
By default we will uncompress a downloaded file if it is zipped.
"""
# ... | 2,284 |
def modulo_3(lhs, ctx):
"""Element ǒ
(num) -> a % 3
(str) -> a split into chunks of size 2
"""
return {
(NUMBER_TYPE): lambda: lhs % 3,
(str): lambda: [lhs[i : i + 2] for i in range(0, len(lhs), 2)],
}.get(vy_type(lhs), lambda: vectorise(modulo_3, lhs, ctx=ctx))() | 2,285 |
def gradstep(P,dP,drate,mP,mrate,grad,nesterov=False):
"""
Performs a gradient update step on parameters P,
using gradient dP with learning rate (drate), and
momentum vector mP with momentum rate (mrate).
grad() must be a function that computes:
dP[:] = gradient at current P
where 'grad'... | 2,286 |
def _load_score_submission(submission_path, metric, step, data_label=None):
"""Load the score for a single submission."""
if data_label is None:
training_output_path = os.path.join(
submission_path, 'training_output')
else:
training_output_path = os.path.join(
submiss... | 2,287 |
def main(args):
"""
Returns
-------
"""
# Create training dataset
if args.in_fns is not None:
if args.path_PERC is not None:
logging.info('Preprocess training dataset including output quantiles')
logging.info(f'with real_geography flag set to {args.real_geo... | 2,288 |
def ingest_(droplet_db_file, master_cur, master_con):
"""
INGESTS DATA FROM THE DROPLET DB TO MASTER DB
"""
query = "select indeed_id, city_name, country_code from indeed_resumes;"
con = sql.connect(droplet_dbs_folder+droplet_db_file, timeout=10)
cur = con.cursor()
cur.execute(query)
for indeed_id, city_name, c... | 2,289 |
def geolocalizarCiudades(lista_ciudades: list):
"""Para una lista con nombres de ciudades devuelve una fila de DataFrame.
Parámetros
----------
lista_ciudades : list
Lista de nombres de ciudades.
Devuelve
-------
df_Fila: pandas.DataFrame
Fila de un DataFrame que incluye el... | 2,290 |
def HandleConvPaddingModes(x, padding, kernel_shape, strides):
"""Returns an updated tensor and padding type for REFLECT and SYMMETRIC.
Args:
x: A 4D tensor with shape [batch_size, height, width, depth].
padding: Padding mode (SAME, VALID, REFLECT, or SYMMETRIC).
kernel_shape: Shape of convolutio... | 2,291 |
def uuid1_():
"""用于生成GUID"""
return str(uuid.uuid1()) | 2,292 |
def default_if_none(default):
"""Implements the rule: default if v is None else v"""
return default_if_true(lambda v: v is None, default) | 2,293 |
def test_main():
""" Spawns a io.StringIO daemon in a temporary venv and asserts that it behaves exactly like a local instance """
# --create temporary new python environment
python_exe = _create_temporary_venv('tmp', ".".join(["%s" % s for s in sys.version_info[0:2]]))
TEST_STR = 'str\nhello'
# ... | 2,294 |
def finalize_queues(coord, threads):
""" Finalized the queues used to enqueue examples """
# When done, ask the threads to stop.
coord.request_stop()
# And wait for them to actually do it.
coord.join(threads) | 2,295 |
def fftshift(input, bitmask, b=None):
"""
Apply fftshift along dimensions selected by the {bitmask}.
:param bitmask long:
:param input array:
:param b bool: apply ifftshift
"""
usage_string = "fftshift [-b] bitmask input output"
cmd_str = f'{BART_PATH} '
cmd_str += 'fftshift '
... | 2,296 |
def test_invertibility(txtfile):
"""
roughly, assert txtfile == image_to_txt(txt_to_image(txtfile))
ignoring whitespace before and after txt
"""
pngfile = txtfile.replace('.txt', '.png')
txt_to_image(txtfile, pngfile)
new_txtfile = txtfile.replace('.', '_new.')
image_to_txt(p... | 2,297 |
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Unload Unifi Protect config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in METEOBRIDGE_... | 2,298 |
def _new_correlation_matrix_inverse(new_data, old_corr_mat_inv):
"""
If old_corr_mat_inv is an approximation for the correlation
matrix inverse of a dataset (p1, ..., pn), then the function
returns an approximatrion for the correlation matrix inverse
of dataset (p1, ..., pn, new_data... | 2,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.