content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def assert_sdf_equal(f1:str, f2:str):
""" Diff two sd files and assert that they are equal
"""
outStrg = io.read_file(f1)
try:
refStrg = io.read_file(f2)
except FileNotFoundError:
raise AssertionError(f"Reference File does not exist {f2} for {f1}")
# drop mol timestamp ... | 5,355,400 |
def test_award_update_from_earliest_transaction():
"""Test awards fields that should be updated with most earliest transaction info."""
award = mommy.make('awards.Award')
mommy.make(
'awards.Transaction',
award=award,
# since this is the award's first transaction,
# the txn ... | 5,355,401 |
def _reorder_for_qbb_experiment(df: pd.DataFrame) -> pd.DataFrame:
"""By default the entries are ordered alphabetically. We want SPOTA, EPOpt, PPO"""
print("Changed the order")
return df.iloc[[2, 0, 1]] | 5,355,402 |
def get_trace_sink_output(project: Optional[pulumi.Input[Optional[str]]] = None,
trace_sink_id: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetTraceSinkResult]:
"""
Get a trace sink by name under the parent... | 5,355,403 |
def get_field(self, *args, is_squeeze=False, node=None, is_rthetaz=False):
"""Get the value of variables stored in Solution.
Parameters
----------
self : SolutionData
an SolutionData object
*args: list of strings
List of axes requested by the user, their units and values (optional)
... | 5,355,404 |
def get_folder_status(dirname, with_message=False):
"""获取目录状态
Args:
dirname(str): 目录路径
with_message(bool): 是否需要返回状态文件内的信息
"""
status = None
closest_time = 0
message = ''
for status_type in [
DatasetStatus, TaskStatus, PredictStatus, PruneStatus,
Downl... | 5,355,405 |
def handle_session_event(event: EventData) -> core_pb2.SessionEvent:
"""
Handle session event when there is a session event
:param event: event data
:return: session event
"""
event_time = event.time
if event_time is not None:
event_time = float(event_time)
return core_pb2.Sessi... | 5,355,406 |
def extract():
"""
Function to first go to the yahoo financial news page, then acts as a crawler
visit the latest news page, then etract the full news in a variable upload_string, which
is then encoded in UTF-64, which is then pushed as a file to my github repo.
"""
url = urllib2.urlopen("https://in.finan... | 5,355,407 |
def job_builder(meta, valid_meta, workflow, job_dir, out_dir, coprocess=None, other_args="", writeimg=False):
"""Build a list of image processing jobs.
Args:
meta: Dictionary of processed image metadata.
valid_meta: Dictionary of valid metadata keys.
workflow: PlantCV imag... | 5,355,408 |
def bump_patch(version):
"""Raise the patch part of the version
:param: version string
:return: the raised version string
:rtype: str
"""
verinfo = parse(version)
return format_version(verinfo['major'], verinfo['minor'],
verinfo['patch'] + 1) | 5,355,409 |
def _extract_bike_location(bike, lon_abbrev='lon'):
"""
Standardize the bike location data from GBFS. Some have extra fields,
and some are missing fields.
Arguments:
bike (dict[str, str]): A GBFS bike object as it appears in free_bike_status.json
lon_abbrev (str): The abbreviation used for `longitude`
... | 5,355,410 |
def clean_word(word):
"""Return word in lowercase stripped of whitespace"""
return word.strip().lower() | 5,355,411 |
def get_batch_size(tracks):
"""
If tracks is a track-major list of possibly None tracks, get the batch size
"""
return get_shape(tracks)[0] | 5,355,412 |
def find_edges_from_wires(body: TopoDS_Shape) -> set[TopoDS_Edge]:
"""Return set of edges from Wires."""
edge_set = set()
for wire in TopologyExplorer(body, ignore_orientation=False).wires():
for edge in WireExplorer(wire).ordered_edges():
edge_set.add(edge)
return edge_set | 5,355,413 |
def plot_israel_map(gis_path=gis_path, rc=rc, ticklabelsize=12, ax=None):
"""general nice map for israel, need that to plot stations,
and temperature field on top of it"""
import geopandas as gpd
import contextily as ctx
import seaborn as sns
import cartopy.crs as ccrs
sns.set_style("ticks",... | 5,355,414 |
def getSentB(text2: str, offsetB: int, nextPoint: int, sentLength: int):
"""
alignSentences auxiliar function to get the sentences of the original text.
"""
posB = text2[offsetB+sentLength:].find('.')
sentLength += posB+1
sentB = text2[offsetB:offsetB+sentLength]
nextPoint = offsetB + sentLe... | 5,355,415 |
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.alter_column('vendors', 'start_date', type_=mysql.DATETIME(),
existing_type=mysql.DATE(), nullable=True)
op.alter_column('vendors', 'end_date', type_=mysql.DATETIME(),
existing_type=m... | 5,355,416 |
def get_mapping_fcost_local(interface, bus_def):
"""
coarse cost function to cheaply estimate local (subset of ports)
interface match to bus_def
"""
cost = _get_mapping_fcost_base(interface, bus_def, penalize_umap=False)
name_cost = _get_name_fcost2(interface, bus_def)
cost.nc = name_cost
... | 5,355,417 |
def cross_compile(ctx, tag=""):
"""
Cross-compiles the trace-agent binaries. Use the "--tag=X" argument to specify build tag.
"""
if not tag:
print("Argument --tag=<version> is required.")
return
print("Building tag %s..." % tag)
env = {
"TRACE_AGENT_VERSION": tag,
... | 5,355,418 |
def base64_image(image: bytes, mime_type: str) -> str:
"""Encode the image for an URL using base64
Args:
image: the image
mime_type: the mime type
Returns:
A string starting with "data:{mime_type};base64,"
"""
base64_data = base64.b64encode(image)
image_data = quote(bas... | 5,355,419 |
def multi_box_head(inputs,
image,
base_size,
num_classes,
aspect_ratios,
min_ratio=None,
max_ratio=None,
min_sizes=None,
max_sizes=None,
steps=None,
... | 5,355,420 |
def expandMask(img, shrink = False, step = 1):
"""Grow or shrink a mask by a pixel."""
if shrink:
img = invert(img)
img = jitterSum(img.data, step) > 0
img = Image(data = img.astype(numpy.uint8)*255)
if shrink:
img = invert(img)
return img | 5,355,421 |
def room_upsert(sender, instance, **kwargs):
"""
...
"""
group_name: str = "rooms"
channel_layer = get_channel_layer()
serializer = RoomHeavySerializer(instance)
# print(serializer.data)
async_to_sync(channel_layer.group_send)(
group_name, {"type": "room_event", "method": "U", "... | 5,355,422 |
def isHeader(line):
"""
tests to see if 'line' is in the event file
header
"""
if containsAny(line, 'EVF Filename:', 'Generation Time:', 'Start_time:',
'End_time:', 'events in list)', '#', 'Include:',
'Init_value:'):
return True
elif len(line... | 5,355,423 |
def test_float_notation(value):
"""
float notation
"""
assert ormsgpack.unpackb(ormsgpack.packb(value)) == value | 5,355,424 |
def redirect_page(source_url, destination_url):
"""returns False is current page is not 200"""
def _check_redirect(full_url):
print('Getting ' + full_url)
response = requests.get(full_url, allow_redirects=False)
if response.status_code == 200:
print("Was 200")
re... | 5,355,425 |
def python_visualization(args):
"""Install Python visualization packages.
:param args: A Namespace object containing parsed command-line options.
"""
if args.install:
cmd = f"{args.pip_install} hvplot matplotlib"
run_cmd(cmd)
if args.config:
pass
if args.uninstall:
... | 5,355,426 |
def semantic_analysis(program, print_results=True):
"""
TODO
:param program: TODO
:param print_results: TODO
:return: TODO
"""
semanter = make_semantic_analyser()
program_ir = semanter.transform(program)
if print_results:
print_readable_ast(program_ir)
return program_ir | 5,355,427 |
def WrapWithQuotes(text, quote='"'):
""" Wrap the supplied text with quotes
Args:
text: Input text to wrap
quote: Quote character to use for wrapping (default = "")
Returns:
Supplied text wrapped in quote char
"""
if not text.startswith(quote):
text = quote + text
... | 5,355,428 |
def get_fastsync_bin(venv_dir, tap_type, target_type):
"""
Get the absolute path of a fastsync executable
"""
source = tap_type.replace('tap-', '')
target = target_type.replace('target-', '')
fastsync_name = f'{source}-to-{target}'
return os.path.join(venv_dir, 'pipelinewise', 'bin', fastsy... | 5,355,429 |
def login_aws_via_idp(session, username, password, entity_id):
""" Get a SAML assertion and set of AWS roles which can be assumed with the SAML assertion. """
logger.info("Looking up your IdP")
idp_url, idp_form = get_idp_login_form(
session, username, password, entity_id)
logger.info("Logging ... | 5,355,430 |
def fit_linreg(x, y, intercept=True):
"""Simple linear regression: y = kx + b.
Arguments
---------
x: :class:`~numpy.ndarray`
A vector of independent variables.
y: :class:`~numpy.ndarray`
A vector of dependent variables.
intercept: bool
If using steady state assumption f... | 5,355,431 |
def entropy(x, input_as_probabilities):
"""
Helper function to compute the entropy over the batch
input: batch w/ shape [b, num_classes]
output: entropy value [is ideally -log(num_classes)]
"""
if input_as_probabilities:
x_ = torch.clamp(x, min = 1e-8)
b = x_ * torch.log(x_)... | 5,355,432 |
def find_function_in_object(o: object, function_name: str) -> Callable:
"""Finds a callable object matching given function name in given object.
Args:
o: Any object.
function_name: Name of attribute within o.
Returns:
Callable object with name <function_name> in object <o>.
Ra... | 5,355,433 |
def test__vm_prefs_builder__virt_mode__negative_fuzz(value):
"""Fuzz test invalid values for virt mode"""
_prefs = vm.VmPrefsBuilder()
with pytest.raises(AssertionError):
_prefs.virt_mode(value) | 5,355,434 |
def device_id(ctx):
"""Return device index.
For CPU, the index does not matter. For GPU, the index means which GPU
device on the machine.
Parameters
----------
ctx : Device context object.
Device context.
Returns
-------
int
The device index.
"""
pass | 5,355,435 |
async def refresh(db: AsyncSession, schema: RefreshToken):
"""
Refresh token
:param db: DB
:type db: AsyncSession
:param schema: Refresh token
:type schema: RefreshToken
:return: Access token
:rtype: dict
:raise HTTPException 400: User not found
""... | 5,355,436 |
def gumbel_softmax(logits, temperature, hard=False):
"""Sample from the Gumbel-Softmax distribution and optionally discretize.
Args:
logits: [batch_size, n_class] unnormalized log-probs
temperature: non-negative scalar
hard: if True, take argmax, but differentiate w.r.t. soft sample y
Returns:
[... | 5,355,437 |
def dqn_learn(t, agent, env, env_state, history, args):
"""Learning loop for DeepQAgent"""
step_type, reward, discount, state = env_state
state = copy.deepcopy(state)
# Act
action = agent.act_explore(state)
step_type, reward, discount, successor = env.step(action)
# Learn
if args.cheat... | 5,355,438 |
def isaac_cc_test_group(srcs, deps = [], size = "small", copts = [], **kwargs):
"""
Creates on cc_test target per source file given in `srcs`. The test is given the same name as
the corresponding source file. Only '*.cpp' files are supported. Every test will have the same
dependencies `deps`. The gtest ... | 5,355,439 |
def build_custom_Theta(
data,
data_description=[],
add_constant_term=True,
):
"""
builds a matrix Theta(U) from a predefined set of terms
This is used when we subsample and take all the derivatives point by point or if there is an
extra input to put in.
input:
data: column 0 is... | 5,355,440 |
def run():
"""Requirements for Task 2G"""
# Build list of stations
stations = build_station_list()
# Update latest level data for all stations
update_water_levels(stations)
"""Criteria for flooding:
Relative water level list
Rate of change (rising or falling)
For both catrgories:
... | 5,355,441 |
def load(plugin: pathlib.Path) -> Optional[ModuleType]:
"""Load a specific cemu plugin
Args:
plugin (pathlib.Path): the path of the plugin to load
Returns:
Optional[ModuleType]: the loaded plugin module on success, None if there's no plugin, or it is invalid
"""
try:
if plu... | 5,355,442 |
def png_to_jpeg(png_file, jpeg_file):
"""
Convert PNG images to JPEG format
:param png_file: full path of .png file
:param jpeg_file: full path of .jpeg file
"""
im = PIL.Image.open(png_file)
rgb_im = im.convert('RGB')
rgb_im.save(jpeg_file, 'JPEG') | 5,355,443 |
def get_standard_logger(name, log_dir=None):
"""Function to return an instance of type logger."""
if log_dir is None:
log_dir = '/Users/teaton/dev/fantasyAM/logs'
time_stamp = datetime.now().strftime('%Y%m%d_%H%M%S')
logging.basicConfig(level=logging.INFO,
format='%(asct... | 5,355,444 |
def getDatabaseConnection(databaseString):
"""Attempt connection to the database"""
sqlsession = None
try:
sqlengine = sqlalchemy.create_engine(databaseString)
SQLSession = sessionmaker(bind=sqlengine)
sqlsession = SQLSession()
print("Connection to " + databaseString + " successfull")
except Exception a... | 5,355,445 |
def prepare_multiple_configs(conf):
""" This function uses workload_1 as a base, and then duplicates its configuration for all
other workloads 2,3... while leaving properties already defined in subsequent workloads (2,3..)
unchanged.
"""
keys_starting_with_workload = []
for k, _ in conf.iterite... | 5,355,446 |
def undo_coefficient_scaling(clf = None, coefficients = None, intercept = 0.0, scaler = None):
"""
given coefficients and data for scaled data, returns coefficients and intercept for unnormalized data
w = w_scaled / sigma
b = b_scaled - (w_scaled / sigma).dot(mu) = b_scaled - w.dot(mu)
:param skle... | 5,355,447 |
def plot_polar_image(data, origin=None):
"""Plots an image reprojected into polar coordinages with the origin
at "origin" (a tuple of (x0, y0), defaults to the center of the image)"""
polar_grid, r, theta = reproject_image_into_polar(data, origin)
plt.figure()
plt.imshow(polar_grid, extent=(the... | 5,355,448 |
def parse_sum_stats(inf, sep):
""" Yields a line at a time from the summary statistics file.
Args:
inf (str): input file
sep (str): column separator
Returns:
OrderedDict: {column: value}
"""
with open_gzip(inf, "rb") as in_handle:
# Get header
header = in_han... | 5,355,449 |
def reduce(snail_nr):
"""Returns a fully reduced version of the given snail number."""
new_snail_nr = copy.deepcopy(snail_nr)
# print("Start:")
# print(snail_nr)
while True:
# print("\nNew reduction phase...")
if explode_in_place(new_snail_nr):
# print("Exploded:", new_s... | 5,355,450 |
def destroy_sample_files_folders() -> None:
"""Destroys sample files folders."""
for sample_folder_path in constants.SAMPLE_FILES_FOLDERS:
_destroy_sample_folder(sample_folder_path) | 5,355,451 |
def load_image_embedding_model(input_repr, content_type, embedding_size):
"""
Returns a model with the given characteristics. Loads the model
if the model has not been loaded yet.
Parameters
----------
input_repr : "linear", "mel128", or "mel256"
Spectrogram representation used for audi... | 5,355,452 |
def prepare_alm(alm=None, ainfo=None, lmax=None, pre=(), dtype=np.float64):
"""Set up alm and ainfo based on which ones of them are available."""
if alm is None:
if ainfo is None:
if lmax is None:
raise ValueError("prepare_alm needs either alm, ainfo or lmax to be specified")
ainfo = sharp.alm_info(lmax)
... | 5,355,453 |
def increment_occurance_dict(d: dict, k: Any) -> None:
"""
Increment occurance dict, updates in-place so nothing is returned.
"""
try:
d[k] += 1
except KeyError:
d[k] = 1
return None | 5,355,454 |
def dump_contents(input_fc):
""" Print the contents of the feature class, this is just a namedtuple sample. """
fcrow = namedtuple("fcrow", ["oid", "datestamp"])
with arcpy.da.SearchCursor(input_fc, ["OID@", "datestamp"]) as cursor:
for row in cursor:
feature = fcrow._make(row)
... | 5,355,455 |
def package_versions(modules=None, builtins=False, standard_lib=None):
"""Retrieve package version information.
When builtins or standard_lib are None, they will be included only
if a version was found in the package.
@param modules: Modules to inspect
@type modules: list of strings
@param bui... | 5,355,456 |
def closest_line(query_lines, metric='cosine'):
"""Compute the distance to, and parameters for, the closest line to each
line in query_lines.
Args:
- query_lines: Array of lines to compute closest matches for, shape
(n_lines, width, height, 1)
- metric: String to pass to scipy.s... | 5,355,457 |
def deref_vtk(obj):
"""Dereferences the VTK object from the object if possible."""
if isinstance(obj, TVTKBase):
return obj._vtk_obj
else:
return obj | 5,355,458 |
def order_assignee_factory(team):
"""
Creates a :class:`datahub.omis.order.models.OrderAssignee` instance related to ``team``
"""
adviser = Advisor.objects.create(
first_name='John',
last_name='Doe',
email=f'{uuid4()}@example.com',
)
order_assignee = OrderAssignee.objects... | 5,355,459 |
def is_bool(space, w_obj):
""" Finds out whether a variable is a boolean"""
return space.wrap(w_obj.tp == space.tp_bool) | 5,355,460 |
def _compile_for_uhfqa(
device: zhinst.Device,
cached_schedule: schedule_helpers.CachedSchedule,
settings_builder: zi_settings.ZISettingsBuilder,
) -> Tuple[zi_settings.ZISettingsBuilder, ZIAcquisitionConfig]:
"""
Initialize programming the UHFQA ZI Instrument.
Creates a sequence program and co... | 5,355,461 |
def read_config():
"""Parses config and returns config values
:returns: config as dict
"""
dirname = os.path.dirname(__file__)
config_path = os.path.join(dirname, 'config.yaml')
try:
stream = open(config_path, "r")
except FileNotFoundError:
return None
try:
config... | 5,355,462 |
def pull_media(obj, remote_id, stage):
"""
Pull media files from the Divio cloud environment.
"""
localdev.pull_media(obj.client, stage=stage, remote_id=remote_id) | 5,355,463 |
def air_density(t_f, elevation):
"""Eq 20, page 25"""
return (1.293 - 1.525e-4 * elevation + 6.379e-9 * elevation ** 2) / (
1 + 0.00367 * t_f
) | 5,355,464 |
def revert_migration(apps, schema_editor):
"""
Reverts migration in apply_migration
"""
Group = apps.get_model('auth', 'Group')
Group.objects.filter(name__in=AGENCIES).delete() | 5,355,465 |
def _strip_after_new_lines(s):
"""Removes leading and trailing whitespaces in all but first line."""
lines = s.splitlines()
if len(lines) > 1:
lines = [lines[0]] + [l.lstrip() for l in lines[1:]]
return '\n'.join(lines) | 5,355,466 |
def make_connection(request):
"""
Create a StreamSplitRoutine from a MockConnection and a container, return topics 'A' and 'B' as well as the routine
"""
def generate(*, max_items_send: int):
return MockConnection(max_items_send=max_items_send)
yield generate | 5,355,467 |
def print_cycles(objects, outstream=sys.stdout, show_progress=False):
"""
*objects*
A list of objects to find cycles in. It is often useful to
pass in gc.garbage to find the cycles that are preventing some
objects from being garbage collected.
*outstream*
The stream for out... | 5,355,468 |
def my_browse(*args, **kwargs):
""" Creates and starts an ObjectBrowser with modified summary column.
"""
attribute_columns = copy.deepcopy(DEFAULT_ATTR_COLS)
summary_column = [col for col in attribute_columns if col.name == 'summary'][0]
summary_column.data_fn = my_summary
return browse(*args, ... | 5,355,469 |
def build_scatterplot(budget):
"""
Runs a cross-validation and plot the scatter-plot of the cross-fold validation error
:param tool: the tool to employ
:param budget: the budget in use for the tool
:return:
"""
frame = load_frames(budget=budget)
X = frame[metrics]
Y = frame['y']
... | 5,355,470 |
def cpu_min_frequency():
"""
Returns the processor minimum frequency, in Mhz (> int)
"""
return psutil.cpu_freq().min | 5,355,471 |
def test_increment_int():
"""Making sure increment works for integers"""
x0 = 0
y0 = lde.increment(x0) # 1
assert y0 == 1
x1 = 100
y1 = lde.increment(x1) # 101
assert y1 == 101 | 5,355,472 |
def log_error(message: str) -> str:
"""error log"""
return message | 5,355,473 |
def exclude_preservation_pending(q):
"""
Transform query to exclude MuseumObject entries which are pending
preservation
"""
now = datetime.datetime.now(datetime.timezone.utc)
preservation_boundary = now - PRESERVATION_DELAY
update_boundary = now - UPDATE_DELAY
return (
q.outerjo... | 5,355,474 |
def naturalday(value, format=None):
"""
For date values that are tomorrow, today or yesterday compared to
present day returns representing string. Otherwise, returns a string
formatted according to settings.DATE_FORMAT.
"""
value = localtime(value)
try:
tzinfo = getattr(value, 'tzinf... | 5,355,475 |
def getRequestData():
""" Main function to execute the GET petition to the API """
URL = "https://pokeapi.co/api/v2/evolution-chain/"
# evoChain = [] Se puede optimizar el algoritmo manejando un array y
# almacenando cada evolucion para evitar los if anidados recorriendo el
# json con un loop
... | 5,355,476 |
def load_property_names(connection, property_names, count_properties):
"""
Load property names from a mutation file in the SQLite database
"""
cur = connection.cursor()
for property_id, property_name in enumerate(property_names,1):
statement = """
INSERT INTO MUTATION_PROPERT... | 5,355,477 |
def test_adaptors(adaptor: str, shuffle_buffer_size: int):
"""
Test if framework-specific generator adpators yield batches.
"""
idx = np.arange(0, 10)
def map_fn(x_, obs_):
"""
Note: Need to convert to numpy in output because torch does not accept dask.
"""
return (n... | 5,355,478 |
def qg8_graph_write(filename: str, graph: qg8_graph):
"""
Wrapper function which prepares a collection of chunks (graph) and writes it to a file
"""
if not isinstance(graph, qg8_graph):
raise TypeError("Second argument is not a qg8_graph")
try:
qg8f = qg8_file_open(filename, QG8_MOD... | 5,355,479 |
def valid_post_author(user, post):
"""This function checks whether the post was created by the user"""
if str(user.key().id()) == str(post.user.key().id()):
return True | 5,355,480 |
def listing(request, **kwargs):
"""view for processing and applying listings"""
context = {
'view': 'listing',
'all_channels': CHANNELS,
'all_towns': TOWNS,
'method': request.method,
'actions': ['listing_parse', 'listing_apply'],
}
if request.method == 'GET':
... | 5,355,481 |
def get_rise_or_fall(U, V, Im, demo=0):
"""
Get increase or decrease of intensity in flow direction: This finds us
the front and the wake regions of each wave.
"""
rr, cc = np.shape(Im)
ax_x, ax_y = np.linspace(1, cc, cc), np.linspace(1, rr, rr)
XX, YY = np.meshgrid(ax_x, ax_y)
Velo_mag ... | 5,355,482 |
def has_vanity_name(func):
"""Decorator checking whether a command has been provided a vanity_name value"""
@functools.wraps(func)
async def wrapper(*args, **kwargs):
vanity_name = args[1]
if vanity_name is None:
ctx = args[0]
await ctx.send("Please provide a Steam va... | 5,355,483 |
def get_data_providers(
data_providers_configs: List[dict], data_providers_input: List[str]
) -> List[data.DataProvider]:
"""
Determines which data provider and in which order should be used.
:param data_providers_configs: A list of data provider configurations
:param data_providers_input: A list o... | 5,355,484 |
def geocode(geocoder_api_key, out_file, verbose, quiet):
"""Reverse geocode well locations with BC Geocoder API"""
verbosity = verbose - quiet
configure_logging(verbosity)
# only process if output file does not already exist
if not os.path.exists(out_file):
# get wells csv as pandas datafra... | 5,355,485 |
def _PredatorForFracas(config=None):
"""A helper to pass in the standard pipeline class."""
return PredatorForFracas(MOCK_GET_REPOSITORY, config or {}) | 5,355,486 |
def CreateExtensionSetting(client, feed_items, campaign_feed, feed_item_ids,
platform_restrictions=None):
"""Creates the extension setting for a list of Feed Items.
Args:
client: an AdWordsClient instance.
feed_items: the list of all Feed Items.
campaign_feed: the original Ca... | 5,355,487 |
def py_SurfStatSmooth(Y, surf, FWHM):
"""Smooths surface data by repeatedly averaging over edges.
Parameters
----------
Y : numpy array of shape (n,v) or (n,v,k)
surface data, v=#vertices, n=#observations, k=#variates.
surf : a dictionary with key 'tri' or 'lat', or a BSPolyData object.
... | 5,355,488 |
def arm_name_to_sort_key(arm_name: str) -> Tuple[str, int, int]:
"""Parses arm name into tuple suitable for reverse sorting by key
Example:
arm_names = ["0_0", "1_10", "1_2", "10_0", "control"]
sorted(arm_names, key=arm_name_to_sort_key, reverse=True)
["control", "0_0", "1_2", "1_10", "... | 5,355,489 |
def run(path_main, local_data_path):
"""Function run script
"""
print("... start script {}".format(os.path.basename(__file__)))
# Load data and assumptions
base_data = data_loader.load_paths(path_main, local_data_path)
base_data = data_loader.load_fuels(base_data)
base_data['assumptions'] =... | 5,355,490 |
def test_imprint(app):
"""Test imprints."""
with app.app_context():
check_transformation(
"""
<datafield tag="260" ind1=" " ind2=" ">
<subfield code="a">Sydney</subfield>
<subfield code="b">Allen & Unwin</subfield>
<subfield cod... | 5,355,491 |
def run_query_row(cur: Cursor, sql: str, params: Optional[Mapping[str, Any]] = None, **kwargs: Any
) -> Optional[skytools.dbdict]:
""" Helper function if everything you need is just paramertisized execute to
fetch one row only. If not found none is returned
"""
params = params or k... | 5,355,492 |
def vortex_indicator(high_arr, low_arr, close_arr, n):
"""Calculate the Vortex Indicator for given data.
Vortex Indicator described here:
http://www.vortexindicator.com/VFX_VORTEX.PDF
:param high_arr: high price of the bar, expect series from cudf
:param low_arr: low price of the bar, expect s... | 5,355,493 |
def getServiceById(serviceId: str, **kwargs) -> Dict:
"""Retrieve service by its identifier.
Args:
serviceId: Identifier of service to be retrieved.
Returns:
Service object.
"""
db_collection_service = (
current_app.config['FOCA'].db.dbs['serviceStore']
.collections... | 5,355,494 |
def replace_module_prefix(
state_dict: Dict[str, Any], prefix: str, replace_with: str = "", ignore_prefix: str = ""
):
"""
Remove prefixes in a state_dict needed when loading models that are not VISSL
trained models.
Specify the prefix in the keys that should be removed.
Added by DLM contribut... | 5,355,495 |
def create_csv(parent_dir, tsv_folder, export_csv = True):
"""
The function reads all .tsv files, combine them into a csv file, and export .csv file into parent directory
Args:
parent_dir (string) : The working directory you are working with
tsv_folder (string) : The name of the ... | 5,355,496 |
def crop(img, left, top, right, bottom):
"""
Crop rectangle from image.
Inputs:
img - The image to crop.
left - The leftmost index to crop the image.
top - The topmost index.
right - The rightmost index.
bottom - The bottommost index.
Outputs:
img - The c... | 5,355,497 |
def cl_file_with_height(tmp_path):
"""Create netcdf file for ``cl`` with hybrid height coordinate."""
nc_path = os.path.join(tmp_path, 'cl_hybrid_height.nc')
dataset = Dataset(nc_path, mode='w')
create_hybrid_height_file(dataset, 'cl')
dataset.close()
return nc_path | 5,355,498 |
def get_hosts(network):
"""get_hosts() will return all the hosts within a provided network, range"""
network = ipaddress.IPv4Network(network, strict=False)
hosts_obj = network.hosts()
hosts = []
for i in hosts_obj:
hosts.append(str(i))
return hosts | 5,355,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.