code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def test_resend_confirmation_success_message(self, app, db): <NEW_LINE> <INDENT> user = make_dummy_user() <NEW_LINE> user.confirmed = False <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> with app.test_client() as tc: <NEW_LINE> <INDENT> rv = tc.post(url_for('auth.resend_confirmation'), data=d... | resend_confirmation flashes a success message on successful sub. | 625941b86aa9bd52df036bf6 |
def handleV2(self, info): <NEW_LINE> <INDENT> return 'V2: %s' % info | handles v2 | 625941b867a9b606de4a7d11 |
def make_float(self, s, ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = float(s) <NEW_LINE> return f <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('error, cannot convert string to float') <NEW_LINE> raise | Converts string "s" into a float. | 625941b8851cf427c661a36f |
@exit_with_status('!= 0') <NEW_LINE> def test_non_json_file(text_file): <NEW_LINE> <INDENT> gjtk.cli.main(argv=[text_file]) | Test invocation with a file that does not contain JSON. | 625941b855399d3f05588508 |
def say(self, message): <NEW_LINE> <INDENT> self.parent.say(message) | Tell the player something, long wait. | 625941b89c8ee82313fbb5c9 |
def is_bool(value): <NEW_LINE> <INDENT> return bool(value) == value | Return True if `value` is a boolean. | 625941b8f9cc0f698b14045a |
def initialize(self): <NEW_LINE> <INDENT> self.assmts = {} <NEW_LINE> bit = 1 <NEW_LINE> for entry in self.entries: <NEW_LINE> <INDENT> assmts = AssignmentList() <NEW_LINE> assmts.mask = assmts.bit = bit <NEW_LINE> self.assmts[entry] = assmts <NEW_LINE> bit <<= 1 <NEW_LINE> <DEDENT> for block in self.blocks: <NEW_LINE>... | Set initial state, map assignments to bits. | 625941b84c3428357757c17f |
def ipam_roles_delete(self, id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.ipam_roles_delete_with_http_info(id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.ipam_roles_delete_with_http_info(id, **... | ipam_roles_delete # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ipam_roles_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: A unique integer value identify... | 625941b80383005118ecf439 |
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.published and self.pub_date is None: <NEW_LINE> <INDENT> self.pub_date = datetime.now() <NEW_LINE> <DEDENT> elif not self.published and self.pub_date is not None: <NEW_LINE> <INDENT> self.pub_date = None <NEW_LINE> <DEDENT> super().save(*args, **kwargs) | Set publish date to the date when object published status is switched to True, reset the date if object is
unpublished | 625941b83539df3088e2e1a0 |
def apply(self, arg2, out): <NEW_LINE> <INDENT> return _Resampler.ResamplerCC_apply(self, arg2, out) | apply(ResamplerCC self, complex< double > * arg2, complex< double > * out) -> int | 625941b89b70327d1c4e0c28 |
def get_type(self): <NEW_LINE> <INDENT> obj = self._get_db_obj_query().first() <NEW_LINE> return obj.type if obj else None | Return the provider type.
Args:
None
Returns:
(String): "Provider type. Cloud backend name",
example: "AWS" | 625941b8cdde0d52a9e52e83 |
def __init__(self, *args): <NEW_LINE> <INDENT> _itkOffsetPython.itkOffset2_swiginit(self,_itkOffsetPython.new_itkOffset2(*args)) | __init__(self) -> itkOffset2
__init__(self, itkOffset2 arg0) -> itkOffset2 | 625941b8aad79263cf390890 |
def get_weight_range(self): <NEW_LINE> <INDENT> return self._mins[1], self._maxs[1] | Returns the range of weight data in the set.
get_weight_range() -> tuple<float, float> | 625941b8091ae35668666dba |
def from_radians(self, radians): <NEW_LINE> <INDENT> if isinstance(radians, basestring): <NEW_LINE> <INDENT> radians = self.from_sexegesimal(radians) <NEW_LINE> <DEDENT> self.degrees = math.degrees(radians) | Set the Angle using a value provided in radians. | 625941b87d43ff24873a2af8 |
def deserialize_numpy(self, str, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.timestamp is None: <NEW_LINE> <INDENT> self.timestamp = genpy.Time() <NEW_LINE> <DEDENT> end = 0 <NEW_LINE> _x = self <NEW_LINE> start = end <NEW_LINE> end += 28 <NEW_LINE> (_x.timestamp.secs, _x.timestamp.nsecs, _x.id, _x.ena... | unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module | 625941b8d10714528d5ffb34 |
def read_catalog(self, file): <NEW_LINE> <INDENT> self.set_catalog_version(file) <NEW_LINE> if not self.error: <NEW_LINE> <INDENT> self.df.dropna(subset=['KPI name'], inplace=True) <NEW_LINE> if not len(self.df): <NEW_LINE> <INDENT> self.error = True <NEW_LINE> error_msg = 'Empty catalog file\r\n\r\n %s \r\n\r\n ' <NEW... | Retrieve catalog information | 625941b89f2886367277a6e6 |
def dot(a, b): <NEW_LINE> <INDENT> with tf.name_scope("dot"): <NEW_LINE> <INDENT> a_ndim = a.get_shape().ndims <NEW_LINE> b_ndim = b.get_shape().ndims <NEW_LINE> assert a_ndim is not None <NEW_LINE> if a_ndim == 0: <NEW_LINE> <INDENT> return tf.scalar_mul(a, b) <NEW_LINE> <DEDENT> assert b_ndim is not None <NEW_LINE> i... | :param tf.Tensor a: shape [...da...,d]
:param tf.Tensor b: shape [d,...db...]
:return: tensor of shape [...da...,d,...db...]
:rtype: tf.Tensor | 625941b8462c4b4f79d1d525 |
def compute_coupling_matrix(self,fl1,fl2,bins,nell_rebin=2,method=203,ell_cut_x=[1.,-1.],ell_cut_y=[1.,-1.]) : <NEW_LINE> <INDENT> if self.wsp!=None : <NEW_LINE> <INDENT> lib.workspace_flat_free(self.wsp) <NEW_LINE> <DEDENT> self.wsp=lib.compute_coupling_matrix_flat(fl1.fl,fl2.fl,bins.bin,nell_rebin,method, ell_cut_x[0... | Computes coupling matrix associated with the cross-power spectrum of two NmtFieldFlats and an NmtBinFlat binning scheme.
:param NmtFieldFlat fl1,fl2: fields to correlate
:param NmtBinFlat bin: binning scheme
:param int nell_rebin: number of sub-intervals into which the base k-intervals will be sub-sampled to compute t... | 625941b830dc7b76659017bf |
def test_yaml_dump(self): <NEW_LINE> <INDENT> yaml = yaml_dump("{\"key\":\"value\"}") <NEW_LINE> self.assertEqual(yaml, "key: value\n") | dump json string to yaml | 625941b8090684286d50eb35 |
def get_properties(self): <NEW_LINE> <INDENT> if self.features_layer is not None: <NEW_LINE> <INDENT> for property in self.features_layer.get_properties(): <NEW_LINE> <INDENT> yield propertyfound_entities.get(mention.string) | Returns all the properties of the features layer (iterator)
@rtype: L{Cproperty}
@return: list of properties | 625941b83cc13d1c6d3c71d9 |
@workflow <NEW_LINE> def heal(ctx, graph, node_id): <NEW_LINE> <INDENT> failing_node = ctx.model.node.get(node_id) <NEW_LINE> host_node = ctx.model.node.get(failing_node.host.id) <NEW_LINE> failed_node_subgraph = _get_contained_subgraph(ctx, host_node) <NEW_LINE> failed_node_ids = list(n.id for n in failed_node_subgrap... | Built-in heal workflow..
:param ctx: workflow context
:param graph: graph which will describe the workflow.
:param node_id: ID of the node to heal
:return: | 625941b8bf627c535bc1302b |
def connect_JSON(config): <NEW_LINE> <INDENT> testnet = config.get('testnet', '0') <NEW_LINE> testnet = (int(testnet) > 0) <NEW_LINE> if not 'rpcport' in config: <NEW_LINE> <INDENT> config['rpcport'] = 19887 if testnet else 9887 <NEW_LINE> <DEDENT> connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpa... | Connect to a bitcoin JSON-RPC server | 625941b8435de62698dfdaa9 |
def uid(self, coordinates): <NEW_LINE> <INDENT> return "{:2.6f}{:2.6f}{}".format( coordinates[CONF_LATITUDE], coordinates[CONF_LONGITUDE], self.type ) | Generate a unique id using coordinates and sensor type. | 625941b82ae34c7f2600cf87 |
def __init__(self, patterns, n=None): <NEW_LINE> <INDENT> self.patterns = np.array([pattern.flatten() for pattern in patterns]) <NEW_LINE> self.unique_patterns, idx, counts = np.unique(self.patterns, axis=0, return_index=True, return_counts=True) <NEW_LINE> self.pattern_dim = dict(zip(idx, counts)) <NEW_LINE> self.N = ... | patterns : list or array of flatten patterns to be stored
n : learning rate | 625941b88c0ade5d55d3e814 |
def on_session_ended(session_ended_request, context): <NEW_LINE> <INDENT> pass | Handle session clean up when the session should end. | 625941b810dbd63aa1bd2a04 |
def get_vels_Crust1(location): <NEW_LINE> <INDENT> lat, lon = location <NEW_LINE> all_lons = np.arange(-179.5,180,1) <NEW_LINE> all_lats = np.arange(89.5,-90,-1) <NEW_LINE> i = int((lon - all_lons[0]) + ((all_lats[0] - lat) // 1) * len(all_lons)) <NEW_LINE> nm = 'data/earth_models/crust1/crust1.' <NEW_LINE> try: <NEW_L... | Crust 1.0 is given in a 1 degree x 1 degree grid (i.e. 360 lon points, 180
lat points). The downloads are structured as
crust1.bnds (360 * 180) x 9 depths to top of each layer
0. water (i.e. topography)
1. ice (i.e. bathymetry)
2. upper se... | 625941b8baa26c4b54cb0f78 |
def bulk_lookup(datasets, cell, method='poisson'): <NEW_LINE> <INDENT> comp_func = None <NEW_LINE> if method == 'poisson': <NEW_LINE> <INDENT> comp_func = log_prob_poisson <NEW_LINE> <DEDENT> elif method == 'spearman' or method == 'rank_corr': <NEW_LINE> <INDENT> comp_func = rank_correlation <NEW_LINE> <DEDENT> elif me... | Returns a list of (dataset, value) pairs sorted by descending value,
where value indicates similarity between the cell and the dataset.
Potential metrics:
- corr/pearson
- rank_corr/spearman
- cosine (normalized cosine distance)
- poisson (log-probability)
Test NMI results on 10x_400, all genes:
P... | 625941b8796e427e537b0417 |
def post(): <NEW_LINE> <INDENT> return db.test_post.insert(**dict (request.vars)) | Test for JSON POST
#curl -i -X POST http://127.0.0.1:8000/sahana/test/post -H "Content-Type: application/json" -d {"name": "John"}
#curl -i -X POST http://127.0.0.1:8000/sahana/test/post -H "Content-Type: application/json" -d @test.json
Web2Py forms are multipart/form-data POST forms
curl -i -X POST http://127.0.0.1:80... | 625941b824f1403a926009bf |
def randomcase(payload, **kwargs): <NEW_LINE> <INDENT> retVal = payload <NEW_LINE> if payload: <NEW_LINE> <INDENT> for match in re.finditer(r"[A-Za-z_]+", retVal): <NEW_LINE> <INDENT> word = match.group() <NEW_LINE> if word.upper() in kb.keywords: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> _ = "" <NEW_LINE> fo... | Replaces each keyword character with random case value
Tested against:
* Microsoft SQL Server 2005
* MySQL 4, 5.0 and 5.5
* Oracle 10g
* PostgreSQL 8.3, 8.4, 9.0
Notes:
* Useful to bypass very weak and bespoke web application firewalls
that has poorly written permissive regular expressions
... | 625941b8be7bc26dc91cd45a |
def do_update(self, name, data): <NEW_LINE> <INDENT> self.emit("update", data) | Send a websocket update event to the client. | 625941b80c0af96317bb803e |
def show(self): <NEW_LINE> <INDENT> sHtml = '<div class="tmvcstimeline tmvcstimelinetooltip">\n'; <NEW_LINE> oCurDate = None; <NEW_LINE> for oEntry in self.aoEntries: <NEW_LINE> <INDENT> oTsZulu = db.dbTimestampToZuluDatetime(oEntry.tsCreated); <NEW_LINE> if oCurDate is None or oCurDate != oTsZulu.date(): <NEW_LINE> <... | Generates the tooltip.
Returns (sTitle, HTML). | 625941b8e8904600ed9f1d7e |
def nameserver_check_scheduler(heartbeat_obj): <NEW_LINE> <INDENT> sched = BackgroundScheduler() <NEW_LINE> sched.start() <NEW_LINE> sched.add_job(heartbeat_obj.nameserver_check, 'cron', second=("*/%s" % int(heartbeat_obj.configuration['heartbeat']['default']['interval']))) <NEW_LINE> retries_check = int(heartbeat_obj.... | Schedule the check using the heartbeat object | 625941b8293b9510aa2c30ee |
def start_interpetration_reforms(self): <NEW_LINE> <INDENT> self.__reforms__ = [] <NEW_LINE> reform_apply_fun_found = False <NEW_LINE> current_reform_index = -1 <NEW_LINE> current_reform = None <NEW_LINE> reform_found = False <NEW_LINE> with open(self.__reforms_file_path__,'r') as content_variable: <NEW_LINE> <INDENT> ... | Start interpretation reform | 625941b8442bda511e8be27b |
def adapt(self, data, reset_state=True): <NEW_LINE> <INDENT> if not reset_state: <NEW_LINE> <INDENT> raise ValueError("CategoricalEncoding does not support streaming adapts.") <NEW_LINE> <DEDENT> if self._called and self._max_tokens is None: <NEW_LINE> <INDENT> raise RuntimeError( "CategoricalEncoding can't be adapted ... | Fits the state of the preprocessing layer to the dataset.
Overrides the default adapt method to apply relevant preprocessing to the
inputs before passing to the combiner.
Arguments:
data: The data to train on. It can be passed either as a tf.data Dataset,
or as a numpy array.
reset_state: Optional argument sp... | 625941b856ac1b37e6264035 |
def is_prime(n): <NEW_LINE> <INDENT> for i in range(2, int(math.sqrt(n)) + 1): <NEW_LINE> <INDENT> if n%i == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Returns True if a positive integer n is prime and False otherwise
n: A positive integer | 625941b821a7993f00bc7b3f |
def blpop(self, keys, timeout=0, **options): <NEW_LINE> <INDENT> keys = native_str(keys) <NEW_LINE> if isinstance(keys, str): <NEW_LINE> <INDENT> keys = [keys] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> keys = list(keys) <NEW_LINE> <DEDENT> keys.append(timeout) <NEW_LINE> return self.execute_command('BLPOP', *keys, ... | LPOP a value off of the first non-empty list
named in the ``keys`` list.
If none of the lists in ``keys`` has a value to LPOP, then block
for ``timeout`` seconds, or until a value gets pushed on to one
of the lists.
If timeout is 0, then block indefinitely. | 625941b823e79379d52ee3bd |
def htmlentities_decode(self, string): <NEW_LINE> <INDENT> return htmlentities_decode(string) | decodes htmlentities | 625941b87cff6e4e811177db |
def main(argv): <NEW_LINE> <INDENT> data = load_data("car_sales.json") <NEW_LINE> summary = process_data(data) <NEW_LINE> new_summary = '\n'.join(summary) <NEW_LINE> print(summary) <NEW_LINE> report('/tmp/cars.pdf', "Cars report", new_summary, cars_dict_to_table(data)) <NEW_LINE> msg = email_generate("automation@exampl... | Process the JSON data and generate a full report out of it. | 625941b8a8ecb033257d2f2b |
def _sanitize_key(key) -> str: <NEW_LINE> <INDENT> return str(key).lower().replace(" ", "_") | Sanitize the location or group key to look up
Args:
key: The key to sanitize | 625941b80c0af96317bb803f |
def update(self): <NEW_LINE> <INDENT> if self.__health_points <=0: <NEW_LINE> <INDENT> self.__dead = True <NEW_LINE> <DEDENT> if self.__finished: <NEW_LINE> <INDENT> self.rect.center= (1500,0) <NEW_LINE> <DEDENT> if self.__counter % self.__counter_list1[self.__current_monster-1] == 0 and not self.__dead: <NEW... | This method will be responsible for repositioning the image on the
screen as well as iterating through lists of images according to
different Boolean variables, ultimately, causing the animations to occur.
It will also check if the monster has reached the end of the screen.
If the monster died, it will move it outsid... | 625941b83c8af77a43ae35f4 |
def __init__(self, algorithm, length, value, format_type=enums.KeyFormatType.X_509, masks=None, name='Public Key', key_wrapping_data=None): <NEW_LINE> <INDENT> super(PublicKey, self).__init__( key_wrapping_data=key_wrapping_data ) <NEW_LINE> self._object_type = enums.ObjectType.PUBLIC_KEY <NEW_LINE> self._valid_formats... | Create a PublicKey.
Args:
algorithm(CryptographicAlgorithm): An enumeration identifying the
type of algorithm for the key.
length(int): The length in bits of the key.
value(bytes): The bytes representing the key.
format_type(KeyFormatType): An enumeration defining the format of
the key ... | 625941b85166f23b2e1a4faf |
def _test_path(self, path, expected_posix_path, expected_windows_path, relative_to_posix=None, relative_to_windows=None): <NEW_LINE> <INDENT> self.assertEqual( normalize_platform_path(path, relative_to=relative_to_posix, target_platform=PathPlatform.POSIX), expected_posix_path) <NEW_LINE> self.assertEqual( normalize_pl... | Test path normalization against Windows and POSIX paths.
This will test that a path normalizes correctly on both Windows and
Linux, with or without relative paths.
Args:
path (unicode):
The path to normalize.
expected_posix_path (unicode):
The expected resulting POSIX path.
expected_wind... | 625941b8e5267d203edcdaf6 |
def p_for(p): <NEW_LINE> <INDENT> p[0] = AST.ForNode([AST.IdNumNode(p[3]), p[5], p[7], p[9]]) | instruction : FOR EXPR_START IDENTIFIANT FOR_SEP expression FOR_SEP expression EXPR_END bloc | 625941b8009cb60464c63212 |
def reset_logging(): <NEW_LINE> <INDENT> for handler in logging.getLogger().handlers: <NEW_LINE> <INDENT> if getattr(handler, '_debug_handler', False): <NEW_LINE> <INDENT> handler.stream.close() <NEW_LINE> <DEDENT> <DEDENT> logging.getLogger().handlers = [] | Reset logging config | 625941b821bff66bcd6847ab |
def __str__(self): <NEW_LINE> <INDENT> return self.nama_bahan | String for representing the MyModelName object (in Admin site etc.) | 625941b8cc40096d615957a9 |
def test_link_self(self): <NEW_LINE> <INDENT> self.component.repo = "weblate://test/test" <NEW_LINE> self.component.push = "" <NEW_LINE> self.assertRaisesMessage( ValidationError, "Invalid link to a Weblate project, cannot link it to itself!", self.component.full_clean, ) | Link pointing to self. | 625941b87b180e01f3dc465b |
def draw_maze_obstacle(x,y): <NEW_LINE> <INDENT> draw_obstacle_line_x_pos(x,y) <NEW_LINE> draw_obstacle_line_x_neg(x,y) <NEW_LINE> draw_obstacle_line_y_neg(x,y) <NEW_LINE> draw_obstacle_line_y_pos(x,y) | This function will be draw | 625941b8e5267d203edcdaf7 |
def GetStore(location=None, only_files=()): <NEW_LINE> <INDENT> global _pid_store <NEW_LINE> if not _pid_store: <NEW_LINE> <INDENT> _pid_store = PidStore() <NEW_LINE> if not location: <NEW_LINE> <INDENT> location = PidStoreLocation.location <NEW_LINE> <DEDENT> pid_files = [] <NEW_LINE> for file_name in os.listdir(locat... | Get the instance of the PIDStore.
Args:
location: The location to load the store from. If not specified it uses the
location defined in PidStoreLocation.py
only_files: Load a subset of the files in the location.
Returns:
An instance of PidStore. | 625941b891af0d3eaac9b86a |
def test_rebuild(self): <NEW_LINE> <INDENT> instance = jsonutils.to_primitive(self._create_fake_instance()) <NEW_LINE> instance_uuid = instance['uuid'] <NEW_LINE> image_ref = instance['image_ref'] <NEW_LINE> self.compute.run_instance(self.context, instance_uuid) <NEW_LINE> self.compute.rebuild_instance(self.context, im... | Ensure instance can be rebuilt | 625941b80a366e3fb873e66d |
def RemoveScheduledComponent(self, comp): <NEW_LINE> <INDENT> self._scheduled_instances.remove(comp) | Remove a component that was scheduled. | 625941b8283ffb24f3c55762 |
def reset_window(self): <NEW_LINE> <INDENT> self.ui.graphicsView_plotView.reset_plots() | reset the window to the initial state, such that no plot is made on the canvas
:return: | 625941b894891a1f4081b8fe |
def choose_best_feature(dataset): <NEW_LINE> <INDENT> if not dataset: <NEW_LINE> <INDENT> raise ValueError("dataset is empty.") <NEW_LINE> <DEDENT> num_feature = len(dataset[0]) - 1 <NEW_LINE> base_entropy = calc_shannon_ent(dataset) <NEW_LINE> best_info_gain, best_feature = 0.0, -1 <NEW_LINE> for i in range(num_featur... | 选择信息增益最大的feature
:param dataset: 数据集
:return: 信息增益最大的feature的id | 625941b8187af65679ca4f73 |
@celery_app.task(name='send_sms_code') <NEW_LINE> def send_sms_code(mobile, sms_code): <NEW_LINE> <INDENT> CPP().send_sms_code(mobile, [sms_code, constants.SMS_CODE_REDIS_EXPIRES], 1) | 定义发短信异步任务
:param mobile: 手机号
:param sms_code: 短信验证码
:return: None | 625941b899cbb53fe6792a3d |
def get_filepath(self, len_thresh): <NEW_LINE> <INDENT> if len(self.url_path.rsplit('/')) >= URL_PATH_DEPTH_THRESH: <NEW_LINE> <INDENT> filepath = '/'.join(self.url_path.rsplit('/')[-1*URL_PATH_DEPTH_THRESH:-1]) <NEW_LINE> if len(filepath) > len_thresh: <NEW_LINE> <INDENT> return filepath <NEW_LINE> <DEDENT> <DEDENT> r... | 获取文件路径
:param len_thresh:
:return: | 625941b866656f66f7cbc000 |
def test_2_Reward_shopping_order_refund(self): <NEW_LINE> <INDENT> logging.debug("test_2_return_sameCodeAndBar_order") <NEW_LINE> market_service.rewards_order_refund(globals()['shopping_order_id'] ) <NEW_LINE> self._test_data.update_post_verify_data() <NEW_LINE> self.expectedData(0 ,121 , 2 , 0 , 0 , 0 , 0 , 1 , 0 , 0 ... | 后台退积分商品
‘hmr组合商品’ 1件
:return: | 625941b88e7ae83300e4ae22 |
def test_basic_file_mapping(): <NEW_LINE> <INDENT> assert_raises(ValueError, get_file_paths, subject=tconf.subject, data_type='sushi', output='raw', run_index=0, hcp_path=tconf.hcp_path) <NEW_LINE> assert_raises(ValueError, get_file_paths, subject=tconf.subject, data_type='rest', output='kimchi', run_index=0, hcp_pat... | Test construction of file paths and names | 625941b8d58c6744b4257ab7 |
def __init__(self, token=None): <NEW_LINE> <INDENT> self._token = None <NEW_LINE> self.discriminator = None <NEW_LINE> if token is not None: <NEW_LINE> <INDENT> self.token = token | BTDeviceTokenParams - a model defined in OpenAPI | 625941b885dfad0860c3acaf |
def WaitForEvent(self, tab, selector, event_name, timeout): <NEW_LINE> <INDENT> util.WaitFor(lambda: self.HasEventCompleted(tab, selector, event_name), timeout=timeout) | Halts media action until the selector's event is fired.
Args:
tab: The tab to check for event on.
selector: Media element selector.
event_name: Name of the event to check if fired or not.
timeout: Timeout to check for event, throws an exception if not fired. | 625941b8a17c0f6771cbdeaa |
def dist (a, b=Atoms('X', positions=[(0,0,0)])): <NEW_LINE> <INDENT> return ((a.x-b.x) ** 2 + (a.y-b.y) ** 2 + (a.z-b.z) ** 2) ** 0.5 | this function calculates the distance that the atom a is
from atom b. Default for atom b is X species at origin | 625941b8be383301e01b52e3 |
def addResiduesToSeqToStructMap(self, chainType, chainId, seqStr, resIds): <NEW_LINE> <INDENT> assert len(seqStr)== len(resIds), "error, there must be the same number of residues as amino acids are in sequence" <NEW_LINE> for key in sorted(self.seqToStruct): <NEW_LINE> <INDENT> if key[:2]== (chainType, chainId): <NEW_L... | Given an already mapped seq to struct object, modify one chain to potentially add new residues
Needed if 3dcons is used as it generally report all residues in sequence and not just the included in pdb | 625941b8ac7a0e7691ed3f30 |
def check_fleet_edges(ai_settings, aliens): <NEW_LINE> <INDENT> for alien in aliens.sprites(): <NEW_LINE> <INDENT> if alien.check_edges(): <NEW_LINE> <INDENT> change_fleet_direction(ai_settings, aliens) <NEW_LINE> break | Responde apropriadamente se algun alienúgena alcançar a borda da tela | 625941b89f2886367277a6e7 |
def __init__(self, gen_pref, gen_feat, dim, n_cols, solver): <NEW_LINE> <INDENT> self.gen_pref = gen_pref <NEW_LINE> self.gen_feat = gen_feat <NEW_LINE> self.n_cols = n_cols <NEW_LINE> self.dim = dim <NEW_LINE> self.solver = solver <NEW_LINE> self.pref_list = self.gen_pref.get_all_prefs() <NEW_LINE> self.n_rows = len(s... | Initializes all the useful structures.
:param gen_pref: the preference generator. See <:genP.GenMacroP> and <:genP.GenMicroP>
:param gen_feat: the feature generator
:param dim: number of possible labels
:param n_cols: number of columns of the matrix sub-game
:param solver: game solver. See for example <:solvers.Fictit... | 625941b8c4546d3d9de72887 |
def drop(n, L): <NEW_LINE> <INDENT> if n == 0: <NEW_LINE> <INDENT> return L <NEW_LINE> <DEDENT> if L == []: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return drop(n-1, L[1:]) | Returns the list L[n:]. | 625941b83eb6a72ae02ec331 |
def copy_statistics_csv_file(self): <NEW_LINE> <INDENT> webpath = os.path.join(self.config['AUTOCMS_WEBDIR'], self.testname) <NEW_LINE> src_stats = os.path.join(self.config['AUTOCMS_BASEDIR'], self.testname, 'statistics.csv') <NEW_LINE> dst_stats = os.path.join(webpath, 'statistics.csv') <NEW_LINE> shutil.copyfile(src_... | Copy the statistics file to the webdir. | 625941b83d592f4c4ed1ced6 |
def get_select_query_results(connection, query, parameters=None): <NEW_LINE> <INDENT> cursor = connection.cursor() <NEW_LINE> if parameters is not None: <NEW_LINE> <INDENT> cursor.execute(query, parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cursor.execute(query) <NEW_LINE> <DEDENT> return cursor | Executes the specified query with the specified tuple of
parameters. Returns a cursor for the query results.
Raises an exception if the query fails for any reason. | 625941b8d8ef3951e3243394 |
def onEnemyHit(self, laser): <NEW_LINE> <INDENT> self.lasers.remove(laser) <NEW_LINE> self.score += Level.SCORE_PER_HIT <NEW_LINE> if self.score >= Level.SCORE_TO_WIN: <NEW_LINE> <INDENT> self.endTheGame() | Perform on Enemy Hit | 625941b8ec188e330fd5a5fd |
def make_copies(service, draft_id, n): <NEW_LINE> <INDENT> draft_response = service.users().drafts().get(userId="me", id=draft_id, format="raw").execute() <NEW_LINE> raw_response = {'raw': draft_response["message"]["raw"]} <NEW_LINE> message = {'message': raw_response} <NEW_LINE> try: <NEW_LINE> <INDENT> for x in range... | make copies of the draft
:param service: authenticated gmail service
:param draft_id: GMail draft ID
:param n: number of copies
:return: True if successful, False otherwise | 625941b8cb5e8a47e48b7906 |
def list_sheets(self, spreadsheet_id: str) -> List[str]: <NEW_LINE> <INDENT> with self.build_sheets_api() as sheets_api: <NEW_LINE> <INDENT> spreadsheet_data = ( sheets_api.spreadsheets() .get( spreadsheetId=spreadsheet_id, fields='sheets.properties.title,sheets.properties.sheetType', ) .execute(**self._google_client_r... | List available sheets | 625941b838b623060ff0ac45 |
def test_callback(self): <NEW_LINE> <INDENT> callback ='jsonp123' <NEW_LINE> response = self.call(self.url, {'appId':self.app.get_token(), 'callback': callback}) <NEW_LINE> self.assert_(self._assertJson(json_string=response.content, status=200, message="", expected_data = ['users'], callback = callback )) | Test that the users for a valid app are retrieved in callbacks | 625941b855399d3f0558850a |
def reduce(self, ratings): <NEW_LINE> <INDENT> ratings_copy = ratings[::] <NEW_LINE> for index, item in enumerate(ratings_copy): <NEW_LINE> <INDENT> restaurant, score = ratings_copy[index] <NEW_LINE> if score is None: ratings.remove(item) <NEW_LINE> <DEDENT> return ratings | Remove ratings from heap that are None. | 625941b89c8ee82313fbb5cb |
def get_processing_value( param: QgsProcessingParameterDefinition, inp: WPSInput, context: ProcessingContext) -> Any: <NEW_LINE> <INDENT> typ = param.type() <NEW_LINE> if typ in ('fileDestination','folderDestination'): <NEW_LINE> <INDENT> value = basename(normpath(inp[0].data)) <NEW_LINE> if value != inp[0].data: <NEW_... | Return processing value from wps inputs
Processes other inputs than layers | 625941b8f9cc0f698b14045c |
def _yank_particles(self, num_records): <NEW_LINE> <INDENT> particles_returned = 0 <NEW_LINE> if self._state is not None and StateKey.PARTICLES_RETURNED in self._state and self._state[StateKey.PARTICLES_RETURNED] > 0: <NEW_LINE> <INDENT> particles_returned = self._state[StateKey.PARTICLES_RETURNED] <NEW_... | Get particles out of the buffer and publish them. Update the state
of what has been published, too.
@param num_records The number of particles to remove from the buffer
@retval A list with num_records elements from the buffer. If num_records
cannot be collected (perhaps due to an EOF), the list will have the
elements i... | 625941b830bbd722463cbc19 |
def test_market_survey_ontime(self): <NEW_LINE> <INDENT> self.t1.engage_date = '2011-05-01' <NEW_LINE> self.t2.engage_date = '2011-05-01' <NEW_LINE> self.nl1.market_survey = True <NEW_LINE> self.nl1.market_survey_date = '2011-05-03' <NEW_LINE> self.nl2.market_survey = True <NEW_LINE> self.nl2.market_survey_date = '2011... | Test that the market survey on time percentage is correct | 625941b8b830903b967e976d |
def input_path(self, *args): <NEW_LINE> <INDENT> return os.path.join(self.sim_dir, self.get_name(), *args) | Given any arguments, relative to the simulation dir, return
the absolute path. | 625941b84c3428357757c181 |
def sample(self, A, c): <NEW_LINE> <INDENT> k = np.round(c - c/5.0) <NEW_LINE> greedy_mdl = GREEDY(A, k=k, num_bases=c) <NEW_LINE> greedy_mdl.factorize(compute_h=False, compute_err=False, niter=1) <NEW_LINE> return greedy_mdl.select | Arguments
---------
A :
c :
Returns
-------
s : selection of samples/indices | 625941b80383005118ecf43b |
def show_fom(self, show=True): <NEW_LINE> <INDENT> if show: <NEW_LINE> <INDENT> self._fom_widget.show() <NEW_LINE> fom = str(self._main_widget.fom) <NEW_LINE> self._fom_line.setText(fom) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._fom_widget.hide() | slot for fom calculable
:param show: if it's true it show the fom widget and hide it if not
:type show: bool | 625941b83c8af77a43ae35f5 |
def cancel(self): <NEW_LINE> <INDENT> new_act = self.actvalidationstate_set.count() > 1 <NEW_LINE> if self.date <= date.today(): <NEW_LINE> <INDENT> if new_act: <NEW_LINE> <INDENT> self.set_state('ANNUL_NOUS', get_request().user) <NEW_LINE> <DEDENT> self.parent_event = None <NEW_LINE> self.save() <NEW_LINE> <DEDENT> el... | Parent event is canceled completely, or partially, act upon it.
| 625941b8a4f1c619b28afe98 |
def push(self, item): <NEW_LINE> <INDENT> self.items.append(item) | 添加一个新的元素item到栈顶 | 625941b807f4c71912b112dd |
def test_list_projects_default_domain(self): <NEW_LINE> <INDENT> domain = unit.new_domain_ref() <NEW_LINE> self.resource_api.create_domain(domain['id'], domain) <NEW_LINE> project1 = unit.new_project_ref(domain_id=domain['id']) <NEW_LINE> self.resource_api.create_project(project1['id'], project1) <NEW_LINE> refs = self... | Test that list projects only returns those in the default domain. | 625941b8d164cc6175782ba4 |
def quit(self): <NEW_LINE> <INDENT> self.session.quit() | Quit session | 625941b857b8e32f524832f7 |
def p_modset_lcb_rcb(p): <NEW_LINE> <INDENT> p[0] = [] | modset : LCB RCB | 625941b8fbf16365ca6f6013 |
def __init__( self, export_name=None, member=None, ): <NEW_LINE> <INDENT> if export_name is not None: <NEW_LINE> <INDENT> self.export_name = export_name <NEW_LINE> <DEDENT> if member is not None: <NEW_LINE> <INDENT> self.member = member | Keyword args:
export_name (str): The name of the export to create when applying the export policy to the directory.
member (ReferenceWithType): Reference to the directory to which the export policy may be applied. The `id` or `name` parameter is required, but cannot be set together. If the `name` parameter is s... | 625941b838b623060ff0ac46 |
def save_parameters(self, filetype='text'): <NEW_LINE> <INDENT> if self.hasRider: <NEW_LINE> <INDENT> pathToData = os.path.split(os.path.split(self.directory)[0])[0] <NEW_LINE> pathToParDir = os.path.join(pathToData, 'riders', self.riderName, 'Parameters') <NEW_LINE> pathToCombDir = os.path.join(pathToParDir, 'Combined... | Saves all the parameter sets to file.
Parameters
----------
filetype : string, optional
- 'text' : a text file with parameters as `c = 0.10+/-0.01
`
- 'matlab' : matlab .mat file
- 'pickle' : python pickled dictionary
| 625941b80a50d4780f666ce6 |
def setUp(self): <NEW_LINE> <INDENT> from hardest.binary_validator import BinaryValidator <NEW_LINE> self.instance = BinaryValidator() | Test setup. | 625941b899cbb53fe6792a3e |
def limit_offset_sql(self, low_mark, high_mark): <NEW_LINE> <INDENT> limit, offset = self._get_limit_offset_params(low_mark, high_mark) <NEW_LINE> return '%s%s' % ( (' LIMIT %d' % limit) if limit else '', (' OFFSET %d' % offset) if offset else '', ) | Return LIMIT/OFFSET SQL clause. | 625941b8d99f1b3c44c673ed |
def rob(self, root): <NEW_LINE> <INDENT> def helper(root): <NEW_LINE> <INDENT> if not root: <NEW_LINE> <INDENT> return 0, 0 <NEW_LINE> <DEDENT> left = helper(root.left) <NEW_LINE> right = helper(root.right) <NEW_LINE> v1 = root.val + left[1] + right[1] <NEW_LINE> v2 = max(left) + max(right) <NEW_LINE> return v1, v2 <NE... | :type root: TreeNode
:rtype: int | 625941b86fece00bbac2d592 |
def assert_fetch_redirects(self, view_name, args=None, kwargs=None): <NEW_LINE> <INDENT> response = response_from_view(view_name, args=args, kwargs=kwargs) <NEW_LINE> self.assertRedirects(response, reverse('openstates:api-key-required')) | Assert view redirects to error page when api-key is missing and in debug mode. | 625941b8bf627c535bc1302d |
def updateData_nv(self, entries): <NEW_LINE> <INDENT> self.ax.cla() <NEW_LINE> self.plotNvisits() <NEW_LINE> nv = float(entries['Nvisits'].get()) <NEW_LINE> if nv > 0: <NEW_LINE> <INDENT> self.plotnvisits(nvisits=nv) <NEW_LINE> <DEDENT> self.ax.set_xlim(self.zmin, self.zmax) <NEW_LINE> self.canvas.draw() | Method to update the figure according to request made on entries
zlim and filter allocation will be plotted here.
Parameters
---------------
entries: dict of tk.Entry | 625941b88c0ade5d55d3e816 |
def sw_align(seqA, seqB, scorer=False, gap=-1): <NEW_LINE> <INDENT> seqA, seqB = _as_lists(seqA, seqB) <NEW_LINE> return malign.sw_align(seqA, seqB, scorer or _get_scorer(seqA, seqB), gap) | Carry out the traditional Smith-Waterman algorithm.
Parameters
----------
seqA, seqB : {str, list, tuple}
The input strings. These should be iterables, so you can use tuples,
lists, or strings.
scorer : dict (default=False)
If set to c{False} a scorer will automatically be calculated,
otherwise, the sc... | 625941b82ae34c7f2600cf89 |
def forward(self, x): <NEW_LINE> <INDENT> return PlusConstantOp.apply(x, self.const) | Use `PlusConstantOp.apply` to call the defined custom operator. | 625941b8dc8b845886cb538b |
def num_deriv3(f, x, incr=0.001): <NEW_LINE> <INDENT> return (-f(x+3.*incr)+8.*f(x+2.*incr)-13.*f(x+incr)+13.*f(x-incr) - 8.*f(x-2.*incr)+f(x-3.*incr))/(8.*incr**3) | Return third derivative of f at x. | 625941b8dc8b845886cb538c |
def wls_fit_dki(design_matrix, data): <NEW_LINE> <INDENT> tol = 1e-6 <NEW_LINE> data = np.asarray(data) <NEW_LINE> data_flat = data.reshape((-1, data.shape[-1])) <NEW_LINE> dki_params = np.empty((len(data_flat), 27)) <NEW_LINE> min_diffusivity = tol / -design_matrix.min() <NEW_LINE> inv_design = np.linalg.pinv(design_m... | Computes weighted linear least squares (WLS) fit to calculate
the diffusion tensor and kurtosis tensor using a weighted linear
regression diffusion kurtosis model [1]_.
Parameters
----------
design_matrix : array (g, 22)
Design matrix holding the covariants used to solve for the regression
coefficients.
data :... | 625941b8d164cc6175782ba5 |
def __bool__(self): <NEW_LINE> <INDENT> return _math.VectorOfUInt64___bool__(self) | __bool__(self) -> bool | 625941b8adb09d7d5db6c5ea |
def get_env_type(env_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> env = gym.make(env_name) <NEW_LINE> del env <NEW_LINE> return 'gym' <NEW_LINE> print('{} is not a viable environment.'.format(env_name)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return 'rl' | Get the type of environment from the env_name string | 625941b88da39b475bd64dce |
def __getattr__(self,a,FloatOnly=False): <NEW_LINE> <INDENT> if a == 'comment': <NEW_LINE> <INDENT> return self.mergeComments() <NEW_LINE> <DEDENT> if a == 'props': <NEW_LINE> <INDENT> if self.geoms: <NEW_LINE> <INDENT> return self[0].props <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DE... | returns: parameter values collected from all geometries
rtype: list (or string, if 'comment' was requested) | 625941b850485f2cf553cbf0 |
def __init__(self, name='u_net', pretrained_weights=None, input_size=(64, 64, 1)): <NEW_LINE> <INDENT> super(UNet, self).__init__(name, input_size) <NEW_LINE> self.layer_dict = dict([]) <NEW_LINE> self.architecture = Model() <NEW_LINE> self.define() <NEW_LINE> if pretrained_weights is not None: <NEW_LINE> <INDENT> self... | :param pretrained_weights:
:param input_size: should be 64 multiple | 625941b8a17c0f6771cbdeab |
def test_known_issues(self): <NEW_LINE> <INDENT> Triton = TritonContext() <NEW_LINE> Triton.setArchitecture(ARCH.X86) <NEW_LINE> Triton.taintRegister(Triton.registers.eax) <NEW_LINE> inst = Instruction() <NEW_LINE> inst.setOpcode(b"\x8D\x04\x06") <NEW_LINE> Triton.processing(inst) <NEW_LINE> self.assertTrue(Triton.isRe... | Check tainting result after processing. | 625941b81f5feb6acb0c49ac |
def getTimeStepDuration(self): <NEW_LINE> <INDENT> return self.instance.timeStepDuration; | Returns the timeStepDuration, it is a it datetime.timedelta. It defines the size of the time step, | 625941b8be383301e01b52e4 |
@cli.group() <NEW_LINE> @click.pass_context <NEW_LINE> def monitor(ctx): <NEW_LINE> <INDENT> from .monitor import calculate_parallelism, get_user_logs, postprocess_jobdict, JobMonitor <NEW_LINE> moddict = {"calculate_parallelism":calculate_parallelism,"get_user_logs":get_user_logs,"postprocess_jobdict":postprocess_jobd... | Job monitoring functions.
| 625941b80c0af96317bb8040 |
def deserialize_grades(json_string): <NEW_LINE> <INDENT> grades = dict() <NEW_LINE> if json_string: <NEW_LINE> <INDENT> data = json.loads(json_string) <NEW_LINE> if 'scores' in data: <NEW_LINE> <INDENT> for grade in data['scores']: <NEW_LINE> <INDENT> if 'studentId' in grade and 'value' in grade: <NEW_LINE> <INDENT> gr... | Deserializes the JSON representation received as arguments to a map of student ids to Grade objects.
:param json_string: JSON representation of the grades objects
:return: a map of student ids to Grade objects | 625941b899fddb7c1c9de1ea |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.