repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1
value | license stringclasses 15
values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
CodeRiderz/rojak | rojak-analyzer/convert_13_labels_to_7_labels.py | Python | bsd-3-clause | 1,929 | 0.004147 | import csv
from bs4 import BeautifulSoup
from collections import Counter
import re
import os
OUTPUT_NAME = os.getenv('OUTPUT_NAME',
'data_detikcom_labelled_740_7_class.csv')
csv_file = open('data_detikcom_labelled_740.csv')
csv_reader = csv.DictReader(csv_file)
# Tranform individual label to candidate pair labe... | itle = row['title']
raw_content = row['raw_content']
labels | = []
label_1 = row['sentiment_1']
if label_1 != '':
candidate_pair_label = label_map[label_1]
if not candidate_pair_label in labels:
labels.append(candidate_pair_label)
label_2 = row['sentiment_2']
if label_2 != '':
candidate_pair_label = label_map[label_2]
... |
Iotic-Labs/py-IoticAgent | src/IoticAgent/Datatypes.py | Python | apache-2.0 | 4,222 | 0.005921 | # Copyright (c) 2016 Iotic Labs Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/Iotic-Labs/py-IoticAgent/blob/master/LICENSE
#
# Unless re... | from __future__ import unicode_literals
BASE64 = 'base64Binary'
'''Represents a sequence of binary octets (bytes) encoded according to RFC 2045,
the standard defining the MIME types (look under "6.8 Base64 Content-Transfer-Encoding").
'''
BOOLEAN = 'boolean'
'''A Boolean true or false value. Representations of true ar... | signedByte'
'''An unsigned 8-bit integer in the range [0, 255]. Derived from the unsignedShort datatype.'''
DATE = 'date'
'''Represents a specific date. The syntax is the same as that for the date part of dateTime,
with an optional time zone indicator. Example: "1889-09-24".
'''
DATETIME = 'dateTime'
'''
Represents a s... |
KayaBaber/Computational-Physics | Assignment_3_chaos_and_pendulums/Pre-GitHub-versions/Phys440_Assignment03_Prob1 (1).py | Python | mit | 1,186 | 0.009434 | '''
Kaya Baber
Physics 440 - Computational Physics
Assignment 3
Problem 1
Hamiltonian Dynamics of a Nonlinear Pendulum
Consider a simple pendulum of length in
gravitational field g. The frequency in the limit of small angles is Ω_0 ≡ radical(g/l) , but do not assume the limit
of small angles for the following calculat... | space trajectories for a variety of initial conditions.
(c) Li | ouville’s Theorem states that the phase-space volume of a infinitesimally close ensemble of states is
conserved. Demonstrate Liouville’s Theorem by considering an ensemble of closely spaced initial conditions.
'''
|
kuujo/active-redis | examples/set.py | Python | mit | 554 | 0.00722 | # Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# Se | e LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed set.
myset = redis.set()
# Add items to the set.
myset.add('foo')
myset.add('bar')
# We can also create a named set by passing a key to ... | t.delete()
print myset # set()
|
zesk06/scores | tests/common_test.py | Python | mit | 524 | 0 | #!/usr/bin/env python
# encoding: | utf-8
"""A test module"""
import datetime
import tempfile
import os
import shutil
import scores.common as common
class TestCommon(object):
""" A Test class"""
def test_date_function(self):
"""Test"""
a_date = datetime.datetime.now()
a_date = a_date.replace(microsecond=0)
t... | a_date
|
lsantagata/smslib_ui | src/configuration_management_tools/migrations/0002_auto_20170731_0022.py | Python | gpl-3.0 | 608 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-31 00:22
from __future__ import unicode_literals
from django.db import migra | tions
class Migration(migrations.Migration):
dependencies = [
('configuration_management_tools', '0001_initial'),
]
| operations = [
migrations.AlterModelOptions(
name='smslibgateways',
options={'managed': False, 'verbose_name': 'Gateways'},
),
migrations.AlterModelOptions(
name='smslibnumberroutes',
options={'managed': False, 'verbose_name': 'Routes'},
... |
volpino/Yeps-EURAC | scripts/scramble/scripts/generic.py | Python | mit | 1,149 | 0.035683 | import os, sys, shutil
# change back to the build dir
if os.path.dirname( sys.argv[0] ) != "":
os.chdir( os.path.dirname( sys.argv[0] ) )
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
import get_platform # fixes fat python 2.5
from ez_setup import use_set... | if os.access( dir, os.F_OK ):
print "scramble.py: removing dir:", dir
shutil.rmtree( dir )
# reset args for distutils
me = sys.argv[0]
sys.argv = [ me ]
sys.argv.append( "egg_info" )
if tag is not None:
#sys.argv.append( "egg_info" )
sys.argv.append( "--tag-build=%s" %tag )
# svn revision (if a... | irectly in tag-build
sys.argv.append( "--no-svn-revision" )
sys.argv.append( "bdist_egg" )
# do it
execfile( "setup.py", globals(), locals() )
|
levilucio/SyVOLT | UMLRT2Kiltera_MM/graph_MT_post__OUT2.py | Python | mit | 2,604 | 0.024578 | """
__graph_MT_post__OUT2.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WI | TH CAUTION
_______________________________________________________________________ | ___
"""
import tkFont
from graphEntity import *
from GraphicalForm import *
from ATOM3Constraint import *
class graph_MT_post__OUT2(graphEntity):
def __init__(self, x, y, semObject = None):
self.semanticObject = semObject
self.sizeX, self.sizeY = 172, 82
graphEntity.__init__(self, x... |
SpaceKatt/CSPLN | scripts/create_web_apps_linux.py | Python | gpl-3.0 | 5,043 | 0.00238 | r'''
<license>
CSPLN_MaryKeelerEdition; Manages images to which notes can be added.
Copyright (C) 2015-2016, Thomas Kercheval
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the L... | check_file_exist(mkever)
return mkever
def copy_webframez(number_apps, app_path):
"""
For each path where we intend to create a linux application,
create a copy of the web2py framework and a modified copy
of web2py.py.
"""
webframe, webdotpy = grab_web2py_frame()
out_paths = gr... | th in out_paths:
shutil.copytree(webframe, os.path.join(path, 'web2py'))
next_path = os.path.join(path, 'web2py')
shutil.copy(webdotpy, next_path)
print ' web2py frame copied to: {}'.format(path)
print ' web2py.py copied to: {}'.format(next_path)
return out_paths
def m... |
mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/pandas/tests/io/test_html.py | Python | mit | 33,092 | 0 | from __future__ import print_function
import glob
import os
import re
import warnings
try:
from importlib import import_module
except ImportError:
import_module = __import__
from distutils.version import LooseVersion
import pytest
import numpy as np
from numpy.random import rand
from pandas import (DataFr... | l(df1, df2)
assert df1[0].iloc[0, 0] == 'Proximates'
assert df1[0].columns[0] == 'Nutrient'
def test_spam_with_types(self):
df1 = self.read_html(self.spam_data, '.*Water.*')
df2 = self.read_h | tml(self.spam_data, 'Unit')
assert_framelist_equal(df1, df2)
assert df1[0].iloc[0, 0] == 'Proximates'
assert df1[0].columns[0] == 'Nutrient'
def test_spam_no_match(self):
dfs = self.read_html(self.spam_data)
for df in dfs:
assert isinstance(df, DataFrame)
d... |
scream7/leetcode | algorithms/python/28.py | Python | apache-2.0 | 315 | 0 | class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type nee | dle: str
:rtype: int
"""
for i in range(len(haystack)-len(needle) + 1):
if haystack[i: i + len(needle)] == needle:
return i
return | -1
|
blitzagency/django-chatterbox | chatterbox/utils/youtube.py | Python | mit | 4,331 | 0.000693 | import logging
from ..models import Activity
from .date import activity_stream_date_to_datetime, datetime_to_string
log = logging.getLogger(__name__)
def activity_from_dict(data):
log.debug("Converting YouTube dict to Activity Model")
activity_dict = activity_dict_from_dict(data)
return Activity.from_ac... | ",
"@id": "https://www.twitter.com/{{user.screen_name}}
"displayName": "Martin Smith",
"url": "http://example.org/martin",
"image": {
"@type": "Link",
"href": "http://example.org/martin/image.jpg",
"mediaType": "image/j | peg"
}
},
------------------------------------------------------
"object" : {
"@id": "urn:example:blog:abc123/xyz",
"@type": "Note",
"url": "http://example.org/blog/2011/02/entry",
"content": "This is a short note"
},
-------... |
tzechiop/PANYNJ-Regression-Analysis-for-Toll-Traffic-Elasticity | mergeResults.py | Python | mit | 2,928 | 0.003415 | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 7 13:10:05 2016
@author: thasegawa
"""
import os
import pandas as pd
economic_list = list(pd.read_excel('data\\fields\\economicIndicators_Real.xlsx', header=None)[0])
#fuel_list = list(pd.read_excel('data\\fields\\fuel_binary.xlsx', header=None)[0]) + [None]
fuel_list ... | 0])
# Iter | ate through each regression result and retrieve R^2 and coefficient
group_list = ['pathmid',
'pathnj',
'pathnyc',
'pathtotal',
'pathwtc']
path = 'data\\regress_out\\all_v2'
outcol_list = ['PATH Group',
'R^2',
'Elasticity Coefficient'... |
awsdocs/aws-doc-sdk-examples | lambda_functions/codecommit/MyCodeCommitFunction.py | Python | apache-2.0 | 2,083 | 0.005281 | # Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This f... | CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# snippet-sourcedescription:[MyCodeCommitFunction.py demonstrates how to use an AWS Lambda function to return the URLs used for cloning an AWS CodeCommi | t repository to a CloudWatch log.]
# snippet-service:[codecommit]
# snippet-keyword:[Python]
# snippet-sourcesyntax:[python]
# snippet-sourcesyntax:[python]
# snippet-keyword:[AWS CodeCommit]
# snippet-keyword:[Code Sample]
# snippet-keyword:[GetRepository]
# snippet-sourcetype:[full-example]
# snippet-sourceaut... |
huran2014/huran.github.io | wot_gateway/usr/lib/python2.7/xml/dom/expatbuilder.py | Python | gpl-2.0 | 36,382 | 0.00044 | """Facility to use the Expat parser to load a minidom instance
from a string or file.
This avoids all the overhead of SAX and pulldom to gain performance.
"""
# Warning!
#
# This module is tightly bound to the implementation details of the
# minidom DOM and can't be used with other DOM implementations. This
# is due... | Info(None, "nmtoken"),
"NMTOKENS": minidom.TypeInfo(None, "nmtokens"),
}
class ElementInfo(object):
__slots__ = '_attr_info', '_model', 'tagName'
def __init__(self, tagName, model=None):
self.tagName = tagName
self._attr_info = []
self._model = model
def __getstate__(self)... | f.tagName
def __setstate__(self, state):
self._attr_info, self._model, self.tagName = state
def getAttributeType(self, aname):
for info in self._attr_info:
if info[1] == aname:
t = info[-2]
if t[0] == "(":
return _typeinfo_map["EN... |
c3nav/c3nav | src/c3nav/mapdata/migrations/0048_ramp.py | Python | apache-2.0 | 1,532 | 0.004569 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-17 19:24
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0047_remove_mapupdat... | name='ramps', to='mapdata.Space', verbose_name='space')),
],
options={
'verbose_name': 'Ramp',
| 'verbose_name_plural': 'Ramps',
'default_related_name': 'ramps',
},
),
]
|
feroda/lessons-python4beginners | students/2016-09-04/simone-cosma/fibonacci.py | Python | agpl-3.0 | 1,035 | 0.016425 | def _checkInput(index):
if index < 0:
raise ValueError("Indice negativo non supportato [{}]".format(index))
elif type(index) != int:
raise TypeError("Inserire un intero [tipo input {}]".format(type(index).__name__))
def fib_from_string(index):
_checkInput(index)
serie = "0 1 1 2 3 5 8".... | se = current_number
current_number = current_number + base
base = old_base
current_index += 1
pass
return current_number
def recursion(index):
if index <= 1:
return index
return recursion(index - 1) + recursion(index - | 2)
def fib_from_recursion_func(index):
_checkInput(index)
return recursion(index)
calculate = fib_from_recursion_func |
ubccr/tacc_stats | analyze/process_pickles/miss_vs_stall.py | Python | lgpl-2.1 | 5,040 | 0.044048 | #!/usr/bin/env python
import analyze_conf
import sys
import datetime, glob, job_stats, os, subprocess, time
import operator
import matplotlib
# Set the matplotlib output mode from config if it exists
if not 'matplotlib.pyplot' in sys.modules:
try:
matplotlib.use(analyze_conf.matplotlib_output_mode)
except NameE... | pl_utils, lariat_utils, plot
import math
import multiprocessing, functools, itertools
import cPickle as pickle
def do_work(file,mintime,wayness,lariat_dict):
retval=(None,None,None,None,None)
res=plot.get_data(file,mintime,wayness,lariat_dict)
if (res is None):
return retval
(ts, ld, tmid,
read_rate... | e, l2_rate, l3_rate, load_rate, read_frac, stall_frac) = res
# return (scipy.stats.tmean(stall_frac),
# scipy.stats.tmean((load_rate - (l1_rate + l2_rate +
# l3_rate))/load_rate))
mean_mem_rate=scipy.stats.tmean(read_rate+write_rate)*64.0
ename=ld.exc.split('/')[-1]
ename=tspl_utils.str... |
nomel/beaglebone | pru-gpio/templates.py | Python | unlicense | 3,354 | 0.004472 | def populate(template, values):
# template is a string containing tags. the tags get replaced with the entries from the values dictionary.
# example:
# > template = "hello there <<your name>>!"
# > values = {"your name": "bukaroo banzai"}
# > populateTemplate( template, values)
# "hello ... | label = "leds:<<header name>>";
gpios = <&gpio<<gpio bank + 1>> <<gpio pin>> 0>;
linux,default-trigger = "none";
default-state = "off";
};
};
};
};
"""
}
prussHelper = {
"parameters": ("st... | ;
__overlay__ {
status = "<<status>>";
};
};
"""
} |
sidzan/netforce | netforce_mfg/netforce_mfg/models/bom_line.py | Python | mit | 1,967 | 0.004575 | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publ... |
from netforce.model import Model, fields
class BomLine(Model):
_name = "bom.line"
_fields = {
"bom_id": fields.Many2One("bom", "BoM", required=True, on_delete="cascade"),
"produc | t_id": fields.Many2One("product", "Product", required=True),
"qty": fields.Decimal("Qty", required=True, scale=6),
"uom_id": fields.Many2One("uom", "UoM", required=True),
"location_id": fields.Many2One("stock.location", "RM Warehouse"),
"container": fields.Selection([["sale", "From Sales... |
dominickhera/PosaRepo | cis3250labs/parseTest.py | Python | apache-2.0 | 2,244 | 0.039661 | #!/usr/bin/python
import re
userInput = raw_input("input equation\n")
numCount = 0
operandCount = 0
entryBracketCount = 0
exitBracketCount = 0
charCount = 0
endOfLine = len(userInput) - 1
for i in range(len(userInput)):
if (re.search('[\s*a-z\s*A-Z]+', userInput[i])):
charCount = charCount + 1
print operandCoun... | andCount = operandCount + 1
print operandCount, " 3"
# if( | re.search('[\s*\+|\s*\-|\s*\/]+', userInput[endOfLine])):
if(re.search('[+-/*]+', userInput[endOfLine])):
print "invalid expression"
print "1"
exit(0)
else:
if((re.search('[\s*a-zA-Z]+', userInput[i - 1])) or (re.search('[\s*\d]+', userInput[i - 1]))):
continue
else:
print 'invalid expression... |
leppa/home-assistant | homeassistant/components/zigbee/switch.py | Python | apache-2.0 | 669 | 0.001495 | """Support for Zigbee switches."""
import voluptuous as vol
from homeassistant.components.switch import SwitchDevice
from . import PLATFORM_S | CHEMA, ZigBeeDigitalOut, ZigBeeDigitalOutConfig
CONF_ON_STATE = "on_state"
DEFAULT_ON_STATE = "high"
STATES = ["high", "low"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_ON_STATE): vol.In(STATES)})
def setup_platform(hass, conf | ig, add_entities, discovery_info=None):
"""Set up the Zigbee switch platform."""
add_entities([ZigBeeSwitch(hass, ZigBeeDigitalOutConfig(config))])
class ZigBeeSwitch(ZigBeeDigitalOut, SwitchDevice):
"""Representation of a Zigbee Digital Out device."""
pass
|
Apanatshka/C3P | c3p/tests/all_tests.py | Python | gpl-3.0 | 1,036 | 0.003861 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Python 3.2 code
#
# Copyright (c) 2012 Jeff Smits
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your opt... | c License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# C3P - C-compatible code preprocessor
# This commandline tool reads a file and expands macro's.
#
# This | file is a utility file and doesn't contain the whole tool.
# Also it does not run standalone.
#
# This file imports all the tests
from .acceptance_tests import Acc_test
from .unit_tests import * |
vladpopovici/WSItk | WSItk/tools/wsi_bot_codebook3.py | Python | mit | 7,170 | 0.007671 | #!/usr/bin/env python2
#
# wsi_bot_codebook3
#
# Version 3 of codebook construction:
#
# -uses OpenCV for faster operation - but different local descriptors than in the 1st version;
# -uses annotation files for defining the regions from where the descriptors are to be
# extracted
# - try to optimize the codebook with... | print("\nK-means clustering (k = | ", str(k), ")")
print("\t...with", str(X.shape[0]), "points")
#-codebook and re-coding
vq = MiniBatchKMeans(n_clusters=k, random_state=rng,
batch_size=500, compute_labels=True, verbose=False) # vector quantizer
vq.fit(X)
vqs.append(vq)
... |
icereval/modular-file-renderer | mfr/ext/image/render.py | Python | apache-2.0 | 481 | 0 | """Ima | ge renderer module."""
from mfr.core import RenderResult
def render_img_tag(fp, src=None, alt=''):
"""A simple image tag renderer.
:param fp: File pointer
:param src: Path to file
:param alt: Alternate text for the image
:return: RenderResult object containing the content html
"""
# Defau... | |
ZeitOnline/zeit.push | src/zeit/push/message.py | Python | bsd-3-clause | 13,344 | 0 | from zeit.cms.i18n import MessageFactory as _
import grokcore.component as grok
import logging
import zeit.cms.interfaces
import zeit.objectlog.interfaces
import zeit.push.interfaces
import zope.cachedescriptors.property
import zope.component
log = logging.getLogger(__name__)
class Message(grok.Adapter):
grok.... | )
@property
def facebook_magazin_enabled(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(
type='facebook', account=source.MAGAZIN_ACCOUNT)
return service and service.get('enabled')
@facebook_magazin_enabled.setter
def faceboo... | e(None)
self.push.set(dict(
type='facebook', account=source.MAGAZIN_ACCOUNT),
enabled=value)
@property
def facebook_magazin_text(self):
source = zeit.push.interfaces.facebookAccountSource(None)
service = self.push.get(
type='facebook', account=source.... |
wigginslab/lean-workbench | lean_workbench/scale/scale_resource.py | Python | mit | 1,731 | 0.005777 | import sys
import os
from scale_model import StartupDataModel, VCModel
from flask.ext.restful import Resource, reqparse
from flask import Flask, jsonify, request, make_response
import os
from database import db
from flask.ext.security import current_user
from json import dumps
class Scale_DAO(object):
def __init_... | (self, **kwargs):
"""
TODO: get old data to render in form as default
"""
#check= request.args.get('check')
if current_user.is_anonymous():
return jsonify(status=400)
scale = Scale_DAO()
if scale.user_scale:
return make_response(dumps(scale... | ef post(self):
"""
TODO: add update instead of just creating whole new record
"""
if current_user.is_anonymous():
return jsonify(msg="You are no longer logged in",status=400)
try:
data = request.json
cb_url = data.get('crunchbase_url')
... |
tangyaohua/dl4mt | session2/train_nmt.py | Python | bsd-3-clause | 1,646 | 0.003645 | im | port numpy
# from nmt import train
# from nmtlm import train
from nmt import train
def main(job_id, params):
print params
trainerr, validerr, testerr = train(saveto=params['model'][0],
reload_=params['reload'][0],
| dim_word=params['dim_word'][0],
dim=params['dim'][0],
n_words=params['n-words'][0],
n_words_src=params['n-words'][0],
decay_c=params['decay-c'][0],
... |
e-gob/plataforma-kioscos-autoatencion | scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/monitoring/bigpanda.py | Python | bsd-3-clause | 5,763 | 0.002776 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
... | age = module.params['message']
if message is not None:
body['errorMessage'] = message
if state == 'finished':
body['status'] = 'success'
else:
body['status'] = 'failure'
request_url = url + '/data/events/deployments/end'
# Build the deployment o... | if 'errorMessage' in deployment:
message = deployment.pop('errorMessage')
deployment['message'] = message
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True, **deployment)
# Send the data to bigpanda
data = js... |
jargij/led-pomper-sha2017 | img_to_queue.py | Python | mit | 3,094 | 0.005171 | import socket
import random
from PIL import Image
import json
import sys, getopt
import math
import pika
# Screen VARS
offset_x = 80
offset_y = 24
screen_width = 240
screen_height = 240
# Internal options
queueAddress = ''
fileName = ''
workers = 36
Matrix = []
def main(argv):
global fileName, workers
inp... | print('The square root of amount of workers is not a whole numbers. GTFO!')
sys.exit()
print("Amount of available workers: " + str(workers))
pompImage()
def addPixelToWorkFile(x, y, r, g, b, index_x, index_y, Matrix):
#print("Current index x:" + str(index_x) + " y: " ... | print("Processiong image to JSON")
im = Image.open(fileName).convert('RGB')
im.thumbnail((240, 240), Image.ANTIALIAS)
_, _, width, height = im.getbbox()
# start with x and y index 1
slice_size = int(screen_width / int(math.sqrt(workers)))
amount_of_keys = int(screen_width / slice_size)
prin... |
sileht/deb-openstack-nova | nova/db/sqlalchemy/migrate_repo/versions/035_secondary_dns.py | Python | apache-2.0 | 1,198 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache... | .create_column(dns2)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
networks = Table('networks', meta, autoload=True)
networks.c.dns1.alter(name='dns')
networks.drop_co | lumn('dns2')
|
linea-it/dri | api/comment/migrations/0006_auto_20191001_1943.py | Python | gpl-3.0 | 596 | 0.001678 | # Generated by Django 2.1.5 on 2019-10-01 19:43
from django.db import migrations, models
class M | igration(migrations.Migration):
dependencies = [
('comment', '0005_auto_20191001_1559'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='dts_type',
field=models.CharField(choices=[('0', 'User Comment'), ('1', 'Validation History'), ('... | ]
|
hirobert/svgwrite | tests/test_clipping.py | Python | gpl-3.0 | 800 | 0.0075 | #!/usr/bin/env python
#coding:utf-8
# Author: mozman --<mozman@gmx.at>
# Purpose: test mixin Clipping
# Created: 31.10.2010
# Copyright (C) 2010, Manfred Moitzi
# License: GPLv3
import unittest
from svgwrite.mixins import Clipping
from svgwrite.base import BaseElement
|
class SVGMock(BaseElement, Clipping):
elementname = 'svg'
class TestClipping(unittest.TestCase):
def test_clip_rect_numbers(self):
obj = SVGMock(debug=True)
obj.clip_rect(1, 2, 3, 4)
self.assertEqual(obj['clip | '], 'rect(1,2,3,4)')
def test_clip_rect_auto(self):
obj = SVGMock(debug=True)
obj.clip_rect('auto', 'auto', 'auto', 'auto')
self.assertEqual(obj['clip'], 'rect(auto,auto,auto,auto)')
if __name__=='__main__':
unittest.main() |
yellcorp/dupescan | dupescan/fs/_walker.py | Python | mit | 4,089 | 0.005625 | from typing import Iterable, Callable, Optional, Any, List, Iterator
from dupescan.fs._fileentry import FileEntry
from dupescan.fs._root import Root
from dupescan.types import AnyPath
FSPredicate = Callable[[FileEntry], bool]
ErrorHandler = Callable[[EnvironmentError], Any]
def catch_filter(inner_filter: FSPredicat... | error_handler_func (if provided) and return false
def wrapped_func(*args, **kwargs):
try:
return inner_filter(*args, **kwargs)
except EnvironmentError as env_error:
if error_handler_func is not None:
error_handler_func(env_error)
return False
... | dir_object_filter: Optional[FSPredicate]=None,
file_object_filter: Optional[FSPredicate]=None,
onerror: Optional[ErrorHandler]=None
):
self._recursive = bool(recursive)
self._onerror = noerror if onerror is None else onerror
self._dir_filter = catch_filter(dir_... |
Vierkantor/PyServ | run.py | Python | gpl-3.0 | 67 | 0 | #!/ | usr/bin/env python3
from pyserv.databrowse import main
main( | )
|
viswimmer1/PythonGenerator | data/python_files/30585323/ravebot.py | Python | gpl-2.0 | 7,909 | 0.013529 | import os, sys
up_path = os.path.abspath('..')
sys.path.append(up_path)
from numpy import *
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib import rc
from objects import SimObject
from utils import scalar
from covar import draw_ellipsoid, vec2cov, cov2vec,\
pr... | urn 0 #Todo: FI | XME
def camera_obj_state(self,x):
#Returns the transform of the camera and object
camera_transform = self.bot.camera_transform(x[0:self.bot.NX])
obj_pos = x[self.bot.NX:]
z = mat(zeros((10,1)))
z[0:7] = camera_transform
z[7:10] = obj_pos
return z
"""
... |
yleo77/leetcode | To_Lower_Case/answer.py | Python | mit | 372 | 0.002688 |
class Solution:
def toLowerCase(self, str: str) -> str:
rs = ""
# 32
section = ord("a") - or | d("A")
| for s in str:
if ord(s) >= ord("A") and ord(s) <= ord("Z"):
rs = rs + chr(ord(s) + section)
else:
rs = rs + s
return rs
sol = Solution()
print(sol.toLowerCase("Hello"))
|
scholer/cadnano2.5 | cadnano/controllers/__init__.py | Python | mit | 298 | 0.003356 | from .nucleicacidpartitemcontroller import NucleicAcidPartItemController
from . | oligoitemcontroller import OligoItemController
from .stranditemcontroller import StrandItemController
from .viewroo | tcontroller import ViewRootController
from .virtualhelixitemcontroller import VirtualHelixItemController |
tensorflow/deepmath | deepmath/deephol/deephol_loop/report.py | Python | apache-2.0 | 8,295 | 0.005907 | r""""DeepHOL large scale reporting in Apache Beam."""
from __future__ import absolute_import
from __future__ import division
# Import Type Annotations
from __future__ import print_function
import io
import os
import apache_beam as beam
from apache_beam.metrics import Metrics
import matplotlib.pyplot as plot
import ten... | open_goals = | file_lines_set(self.open_goals_filename +
'-00000-of-00001.txt')
proven_goals = file_lines_set(self.proven_goals_filename +
'-00000-of-00001.txt')
never_proven = open_goals - proven_goals
num_open_goals = len(never_proven)
num_proven_goal... |
shedskin/shedskin | tests/28.py | Python | gpl-3.0 | 210 | 0.009524 |
def propagate(la): # | la: [list(int)]
print la, la | # [str], [str]
propagate([1]) # []
propagate([2]) # []
|
maurobaraldi/brms | brms/_settings/production.py | Python | mit | 491 | 0.002037 | from b | rms.settings.base import *
import dj_database_url
DEBUG = False
ALLOWED_HOSTS = ['.example.com']
# Use the cached template loader so template is compiled once and read from
# memory instead of reading from disk on each load.
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
... | abase_url.config() |
rodriguesrl/reddit-clone-udemy | posts/migrations/0002_auto_20170307_1920.py | Python | mit | 419 | 0 | # -* | - coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 19:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0001_initial'),
| ]
operations = [
migrations.AlterField(
model_name='post',
name='url',
field=models.URLField(),
),
]
|
UdK-VPT/Open_eQuarter | mole/extensions/eval_enev/oeq_AHDE.py | Python | gpl-2.0 | 1,413 | 0.008499 | # -*- coding: utf-8 -*-
import os,math
from qgis.core import NULL
from mole import oeq_global
from mole.project import config
from mole.extensions import OeQExtension
from mole.stat_corr import rb_contemporary_base_uvalue_by_building_age_lookup
def calculation(self=None, parameters={},feature = None):
from math i... | ger Lüftung im Wohnungsbau
# 20% of the Total Area are used for stairs and floors
return {'AHDE': {'type': QVariant.Double, 'value': ahde}}
extension = OeQExtension(
extension_id=__name__,
category='Evaluation',
subcategory='Building',
extension_name='AHD Building per Livig Area EnEV',
... | extension_filepath=os.path.join(__file__),
colortable = os.path.join(os.path.splitext(__file__)[0] + '.qml'),
field_id='AHDE',
source_type='none',
par_in=['HLAE'],
sourcelayer_name=config.data_layer_name,
targetlayer_name=config.data_layer_name,
active=True,
show_results=['AHDE'],
... |
GbalsaC/bitnamiP | edx-val/edxval/serializers.py | Python | agpl-3.0 | 4,408 | 0.001361 | """
Serializers for Video Abstraction Layer
Serialization is usually sent through the VideoSerializer which uses the
EncodedVideoSerializer which uses the profile_name as it's profile field.
"""
from rest_framework import serializers
from django.core.exceptions import ValidationError
from edxval.models import Profile... | ct format
"""
value = attrs[source]
if attrs.get('fmt') == | 'sjson':
import json
try:
loaded = json.loads(value)
except ValueError:
raise serializers.ValidationError("Not in JSON format")
else:
attrs[source] = json.dumps(loaded)
return attrs
class Meta: # pylint: disab... |
praekelt/jmbo-janrain | janrain/urls.py | Python | bsd-3-clause | 70 | 0 | fro | m django.conf.urls.defaults import patterns, url
urlpattern | s = ()
|
jstammers/EDMSuite | EDMScripts/EDMLoop_neg_slope.py | Python | mit | 14,423 | 0.026555 | # Import a whole load of stuff
from System.IO import *
from System.Drawing import *
from System.Runtime.Remoting import *
from System.Threading import *
from System.Windows.Forms import *
from System.Xml.Serialization import *
from System import *
from Analysis.EDM import *
from DAQ.Environment import *
fro... | es.GetChannelIndex(("LF1",))
lf1Value = pmtChannelValues.GetValue(lf1Index)
#lf1Error = pmtChannelValues.GetError(lf1Index)
lf1dbIndex = pmtChannelValues.GetChannelIndex(("LF1","DB") | )
lf1dbValue = pmtChannelValues.GetValue(lf1dbIndex)
print "SIG: " + str(sigValue)
print "B: " + str(bValue) + " DB: " + str(dbValue)
print "RF1A: " + str(rf1aValue) + " RF2A: " + str(rf |
Lemma1/MAC-POSTS | doc_builder/sphinx-contrib/_template/setup.py | Python | mit | 1,194 | 0.000838 | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
long_desc = '''
This package contains the ${name} Sphinx extension.
.. add description here ..
'''
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-${name}',
version='0.1',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
... | aut | hor='${author}',
author_email='${author_email}',
description='Sphinx "${name}" extension',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Dev... |
varepsilon/clickmodels | clickmodels/inference.py | Python | bsd-3-clause | 36,839 | 0.005076 | from collections import defaultdict
from datetime import datetime
import gc
import json
import math
import random
import sys
from .config_sample import MAX_ITERATIONS, DEBUG, PRETTY_LOG, MAX_DOCS_PER_QUERY, SERP_SIZE, TRANSFORM_LOG, QUERY_INDEPENDENT_PAGER, DEFAULT_REL
class NotImplementedError(Exception):
pass
... | # P(C_k | C_1, ..., C_{k-1}) = \sum_I P(C_1, ..., C_k | I) P(I) / \sum_I P(C_1, ..., C_{k-1} | I) P(I)
curClick = dict((i, clickProbs[i][k]) for i in possibleIntents)
prevClick = dict((i, clickProbs[i][k - 1]) for i in possibleIntents) if k > 0 else dict((i, 1.0) for i in p... | sibleIntents), 2) - math.log(sum(prevClick[i] * intentWeight[i] for i in possibleIntents), 2)
positionPerplexity[correctedRank] += logProb
positionPerplexityClickSkip[correctedRank][click] += logProb
counts[correctedRank] += 1
countsClickSkip[correctedRank... |
ywcui1990/nupic | src/nupic/data/stats.py | Python | agpl-3.0 | 6,351 | 0.013384 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This progra... | _addFirst(value)
return
value = float(value)
if value < self.min:
self.min = value
if value > self.max:
self.max = value
self.sum += value
self.n += 1
def getStats(self):
return dict(min = self.min,
max = | self.max,
sum = self.sum,
n = self.n,
average = self.sum / self.n)
class CategoryStatsCollector(object):
def __init__(self):
self.categories = dict()
def add(self, value):
self.categories[value] = self.categories.get(value, 0) + 1
def getStats(self):
... |
scattermagic/django-wizard-builder | wizard_builder/migrations/0009_pagebase_to_questionpage.py | Python | bsd-3-clause | 1,476 | 0.001355 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-08-02 21:54
from __future__ import unicode_literals
from django.db import migrations, models
def copy_to_question_page(apps, schema_editor):
current_database = schema_editor.connection.alias
QuestionPage = apps.get_model('wizard_builder.QuestionPag... | in QuestionPage.objects.using(current_database):
page.new_position = page.position
page.new_section = page.section
for site in page.sites.all():
page.new_sites.add(site)
page.save()
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_d... | ('wizard_builder', '0008_remove_textpage'),
]
operations = [
migrations.AddField(
model_name='questionpage',
name='new_position',
field=models.PositiveSmallIntegerField(default=0, verbose_name='position'),
),
migrations.AddField(
mode... |
chrys87/fenrir | src/fenrirscreenreader/screenDriver/ptyDriver.py | Python | lgpl-3.0 | 9,221 | 0.010845 | #!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY screen reader
# By Chrys, Storm Dragon, and contributers.
import os, struct, sys, pty, tty, termios, shlex, signal, pyte, time, fcntl ,getpass
from select import select
from fenrirscreenreader.core import debug
from fenrirscreenreader.core.eventData import fenrirEven... | self.attributes[y]) < self.screen.columns:
diff = self.screen.columns - len(self.attributes[y])
self.attributes[y] += [['default', 'default', False, False, False, False, False, False, 'default', 'default']] * diff
def resize(self, lines, columns):
self.screen.resize(lines,... | lumns)
self.setCursor()
self.updateAttributes(True)
def setCursor(self, x = -1, y = -1):
xPos = x
yPos = y
if xPos == -1:
xPos = self.screen.cursor.x
if yPos == -1:
yPos = self.screen.cursor.y
self.screen.cursor.x = min(self.screen.curs... |
ortoloco/jordbruksmark | jordbruksmark/migrations/0003_auto_20161217_2150.py | Python | gpl-3.0 | 472 | 0.002119 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-17 20:50
from __future__ import unicode_ | literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('jordbruksmark', '0002_auto_20161217_2140'),
]
operations = [
migrations.AlterModelOptions(
| name='wochen_menge',
options={'verbose_name': 'Wochen Menge', 'verbose_name_plural': 'Wochen Mengen'},
),
]
|
stackforge/cloudbase-init | cloudbaseinit/metadata/services/nocloudservice.py | Python | apache-2.0 | 11,949 | 0 | # Copyright 2020 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable l... | oes not have a name.")
return
link = network_model.Link(
id=item.get('name'),
name=item.get('name'),
type=network_model.LINK_TYPE_PHYSICAL,
enabled=True,
mac_address=item.get('mac_address'),
mtu=item.get('mtu'),
bon... | networks=self._parse_subnets(item.get("subnets"), link.name),
services=[]
)
def _parse_bond_config_item(self, item):
if not item.get('name'):
LOG.warning("Bond does not have a name.")
return
bond_params = item.get('params')
if not bond_pa... |
hlzz/dotfiles | graphics/cgal/Segment_Delaunay_graph_Linf_2/developer_scripts/lsprotate90.py | Python | bsd-3-clause | 636 | 0.023585 | #!/usr/bin/env pyt | hon
import sys
def inv(s):
if s[0] == '-':
return s[1:]
elif s[0] == '+':
return '-' + s[1:]
else: # plain number
return '-' + s
if len(sys.argv) != 1:
print 'Usage:', sys.argv[0]
sys.exit(1)
for line in sys.stdin:
linesplit = line.strip().split()
if len(linesplit) == 3:
assert(linespl... | == 's')
print('s ' + \
inv(linesplit[2]) + ' ' + linesplit[1] + ' ' + \
inv(linesplit[4]) + ' ' + linesplit[3] )
elif len(linesplit) == 0:
print
|
anparser/anparser | anparser/plugins/other_plugins/yara_parser.py | Python | gpl-3.0 | 3,312 | 0.001208 | # -*- coding: utf-8 -*-
"""
anparser - an Open Source Android Artifact Parser
Copyright (C) 2015 Preston Miller
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(a... | else:
yara_data['Author'] = ''
yara_data['Description'] = ''
yara_data['Flag'] = string['flags']
yara_data['Identifier'] = string['identifier']
yara_data['Data'] = string['data']
yara_data['Offset'] = string['offset']
if tag... | eredDict() |
RetailMeNotSandbox/dart | src/python/dart/model/api_key.py | Python | mit | 370 | 0.002703 | from dart.model.ba | se imp | ort BaseModel, dictable
@dictable
class ApiKey(BaseModel):
def __init__(self, id, user_id, api_key, api_secret):
"""
:type user_id: str
:type api_key: str
:type api_secret: str
"""
self.id = id
self.user_id = user_id
self.api_key = api_key
sel... |
benjaminhabbel/motion_recorder | old/button_loop.py | Python | gpl-3.0 | 770 | 0 | import time
import recordlib
if __name__ == "__main__":
recordlib.initialize()
print("waiting for input")
recordlib.logging.info("waiting for input")
try:
# define interrupt, get rising signal, debounce pin
recordlib.GPIO.add_event_detect(
recordlib.TASTER_1,
rec... | bouncetime=1000
)
recordlib.GPIO.add_event_detect(
recordlib.TASTER_2,
recordlib.GPIO.RISING,
callback=recordlib.stop_recording,
bouncetime=1000
)
# keep script running
while True:
time | .sleep(0.5)
finally:
recordlib.GPIO.cleanup()
print("\nQuit\n")
|
VDBWRAIR/bactpipeline | test/test_fix_fastq.py | Python | gpl-2.0 | 2,468 | 0.040519 | from __future__ import print_function
from imports import *
import common
class Base( common.Base ):
pass
class TestUnitMiSeqToNewbler( Base ):
def _C( self, *args, **kwargs ):
from bactpipeline.fix_fastq import miseq_to_newbler_id
return miseq_to_newbler_id( *args, **kwargs )
def test_r1... | e.fix_fastq import mod_fq_read
return mod_fq_read( *args, **kwargs )
def test_mods_correctly( s | elf ):
from bactpipeline.fix_fastq import miseq_to_newbler_id as mtni
id = 'abcd 1'
seq = 'ATGC'
qual = 'IIII'
r = self._C( id, seq, qual )
read = '{0}\n{1}\n+\n{2}\n'.format(mtni(id),seq,qual)
eq_( read, r )
class TestUnitParseFq( Base ):
def _C( self, *args... |
sdpython/ensae_teaching_cs | _unittests/ut_special/test_LONG_image2.py | Python | mit | 2,586 | 0.001933 | """
@brief test log(time=200s)
"""
import os
import unittest
import math
import warnings
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, is_travis_or_appveyor
from ensae_teaching_cs.special.image.image_synthese_base import Vecteur, Couleur, Source, Repere
from ensae_teach... | Vecteur(-12.6, 0.99, 6.1), None, Couleur(0, 0, 0)))
if is_travis_or_appveyor() == "travis":
warnings.warn("pygame is not available")
return
import pygame
| s.ajoute_objet(RectangleImage(Vecteur(8, -3.5, 9), Vecteur(2, -3.5, 8),
Vecteur(2, 3.8, 8), None, image, invertx=True, pygame=pygame))
from ensae_teaching_cs.helpers.pygame_helper import wait_event
screen = pygame.display.set_mode(s.dim)
screen.fill... |
bchiroma/dreamproject | dream/simulation/Frame.py | Python | gpl-3.0 | 1,515 | 0.014521 | # ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Founda... | ttp://www.gnu.org/licenses/>.
# ===========================================================================
'''
Created on 18 Feb 2013
@author: | George
'''
'''
models a frame entity. This can flow through the system and carry parts
'''
from simpy import Resource
from Globals import G
from Entity import Entity
#The entity object
class Frame(Entity):
type="Frame"
capacity=4 #the number of parts that the frame can take
def __init_... |
aarestad/gradschool-stuff | xml-class/python-xml/JobMarkupLanguage/xparser.py | Python | gpl-2.0 | 2,877 | 0.014599 | #
# This is a parser that generates the document tree for you.
#
# To use this parser, create an instance of XElementParser:
# parser = saxexts.make_parser()
# xp = XElementParser(parser)
#
# If you have defined classes in the current environment, you might want ot
# pass this environment *to* the parser, so your... | _error_handler.reset()
self.parser.parse(document_uri)
if self.parser_error_handler.has_errors():
raise "va | lidation failed"
return self.xth.getDocument().getChild()
except IOError,e:
print "\nI/O Error: " + document_uri + ": " + str(e)
except saxlib.SAXException,e:
print "\nParse Error: " + document_uri + ": " + str(e)
class ErrorPrinter:
"A simple class that just prints error m... |
susurrant-audio/scdown | scdown/celeryconfig.py | Python | mit | 747 | 0 | import os
import re
BROKER_URL = os.getenv("CLOUDAMQP_URL", 'amqp://')
# BROKER_POOL_LIMIT = None
MONGOLAB_URI = None
MONGOLAB_DB = None
URI_WITH_AUTH = None
mongolab = os.getenv("MONGOLAB_UR | I")
if mongolab is not None:
uri_pat = r"mongodb://([^:]+):([^@]+)@([^:]+):(\d+)/(.+)"
user, passwd, host, port, db = re.match(uri_pat, mongolab).groups()
uri = "mongodb://{}:{}".format(host, port)
MONGOLAB_URI = uri
MONGOLAB_DB = db
# CELERY_RESULT_BACKEND = uri
# CELERY_MONGODB_BACKEND_SET... | _SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
|
kewisch/bedrock | bedrock/firefox/views.py | Python | mpl-2.0 | 18,915 | 0.000212 | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import re
from django.conf import settings
from django.db.models import Q
from dja... | product='firefox', channel='release'):
if channel == 'organizations':
channel = 'esr'
if product == 'mobile':
return mobile_details.latest_version(channel)
else:
return firefox_details.latest_version(channe | l)
def installer_help(request):
installer_lang = request.GET.get('installer_lang', None)
installer_channel = request.GET.get('channel', None)
context = {
'installer_lang': None,
'installer_channel': None,
}
if installer_lang and installer_lang in firefox_details.languages:
... |
diegodelemos/reana-job-controller | reana_job_controller/job_manager.py | Python | mit | 4,466 | 0.000224 | # -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2019 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Job Manager."""
import json
import shlex
from flask import current_app
from reana_commons... | xecution(self):
"""After job submission hook."""
pass
@execution_hook
def execute(self):
"""Execute a job.
:returns: Job ID.
:rtype: str
"""
raise NotImplementedError
def get_status(self):
"""Get job status.
:returns: j | ob status.
:rtype: str
"""
raise NotImplementedError
def get_logs(self):
"""Get job log.
:returns: stderr, stdout of a job.
:rtype: dict
"""
raise NotImplementedError
def stop(self):
"""Stop a job."""
raise NotImplementedError
... |
homhei/glance | glance/db/js.py | Python | apache-2.0 | 1,449 | 0.014493 | #!/usr/bin/env python
#encode=utf-8
#vim: tabstop=4 shiftwidth=4 softtabstop=4
#Created on 2013-6-24
#Copyright 2013 nuoqingyun xuqifeng
from bson.code import Code
traffic_map = Code("function () {"
"emit(this.domain, this.bytes);"
"}")
traffic_reduce = Code("function (key, values) {"
... | " count += vals.hits;"
" visits += vals.visit;"
| "});"
" return {bytes:sum, visit:visits, hits:count};"
"}")
|
ksmit799/Toontown-Source | toontown/parties/PartyCatchActivityToonSD.py | Python | mit | 9,299 | 0.003979 | from pandac.PandaModules import Vec3
from direct.interval.IntervalGlobal import Sequence, Parallel, Wait, Func
from direct.interval.IntervalGlobal import LerpScaleInterval
from direct.interval.IntervalGlobal import WaitInterval, ActorInterval, FunctionInterval
from direct.task.Task import Task
from direct.directnotify ... | def exitFallBack(self):
self.fal | lBackIval.pause()
del self.fallBackIval
def enterFallForward(self):
self.notify.debug('enterFallForward')
if self.isLocal:
base.playSfx(self.activity.sndOof)
duration = 2.0
animName = self.FallFwdAnim
startFrame = 12
totalFrames = self.toon.getNum... |
soad241/django-notification | notification/models.py | Python | mit | 14,929 | 0.002612 | import datetime
try:
import cPickle as pickle
except ImportError:
import pickle
from django.db import models
from django.db.models.query import QuerySet
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template import Context
from django.template.loader import render_t... | get_absolute_url = models.permalink(get_absolute_ | url)
class NoticeQueueBatch(models.Model):
"""
A queued notice.
Denormalized data for a notice.
"""
pickled_data = models.TextField()
def create_notice_type(label, display, description, default=2, verbosity=1):
"""
Creates a new NoticeType.
This is intended to be used by other apps as... |
google-research/policy-learning-landscape | eager_pg/trajectory_batch_stats_test.py | Python | apache-2.0 | 5,906 | 0.003725 | # coding=utf-8
# Copyright 2018 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you m | ay not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to i | n writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for eager_pg.trajectory_batch_stats.
Note t... |
nexdatas/configtool | test/DefinitionDlg_test.py | Python | gpl-3.0 | 85,762 | 0 | #!/usr/bin/env python
# This file is part of nexdatas - Tango Server for NeXus data writer
#
# Copyright (C) 2012-2017 DESY, Jan Kotanski <jkotan@mail.desy.de>
#
# nexdatas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the ... | lf.aname)
QTest.keyClicks(mb.ui.valueLineEdit, self.avalue)
self.assertEqual(mb.ui.valueLineEdit.text(), self.avalue)
mb.accept()
def attributeWidgetClose(self):
# aw =
QApplication.activeWindow()
mb = QApplication.activeModalWidget()
self.assertTrue(isinsta... | me)
QTest.keyClicks(mb.ui.valueLineEdit, self.avalue)
self.assertEqual(mb.ui.valueLineEdit.text(), self.avalue)
# mb.close()
mb.reject()
# mb.accept()
# constructor test
# \brief It tests default settings
def test_constructor(self):
fun = sys._getframe().f_co... |
dionbosschieter/numatuned | numatuned.py | Python | mit | 167 | 0 | #!/usr/bin/env python3
import sys
import | numatuned
dryrun = False
if len(sys.argv) > 1:
if sys.argv[1] == '-n':
dryrun = Tr | ue
numatuned.fire(60, dryrun)
|
sethuiyer/mlhub | Deep Sentiment Analysis/build_sentiment_model.py | Python | mit | 1,105 | 0.000905 | import tflearn
from tflearn.data_utils import to_categorical, pad_sequences
from tflearn.datasets import imdb
# IMDB Dataset loading
train, test, _ = imdb.load_data(path='imdb.pkl', n_words=10000,
valid_portion=0.1)
trainX, trainY = train
testX, testY = test
# Data preprocessing
# Sequ... | rainY = to_categorical(trainY, nb_classes=2)
testY = to_categorical(testY, nb_classes=2)
# Network building
net = tflearn.input_data([None, 100])
net = tflearn.embedding(net, input_dim=10000, output_dim=128)
net = tflearn.lstm(net, 128, dropout=0.8)
net = tflearn.fully_connected(net, 2, activation='softmax')
net = tfl... | l = tflearn.DNN(net, tensorboard_verbose=0)
model.fit(trainX, trainY, validation_set=(testX, testY), show_metric=True,
batch_size=32)
model.save('sentiment.tflearn') |
djaodjin/djaodjin-signup | signup/docs.py | Python | bsd-2-clause | 2,317 | 0.001295 | # Copyright (c) 2020, Djaodjin Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and t... | ED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ... | EN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pylint:disable=unused-argument,unused-import
try:
from drf_yasg.openapi import Response as OpenAPIResponse
from drf_yasg.utils import no_body, swagger_auto_schema
except ImportError:
from functools import wraps
from .compat import available_attrs
... |
alex/readthedocs.org | deploy/fabfile.py | Python | mit | 940 | 0.004255 | from fabric.api import env, local, run, sudo
env.user = 'root'
env.hosts = ['204.232.205.6']
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
env.chef_executable = '/va | r/lib/gems/1.8/bin/chef-solo'
def install_chef():
sudo('apt-get update', pty=True)
sudo('apt-get install -y git-core rubygems ruby ruby-dev', pty=True)
sudo('gem install chef --no-ri --no-rdoc', pty | =True)
def sync_config():
local('rsync -av . %s@%s:/etc/chef' % (env.user, env.hosts[0]))
def update():
sync_config()
sudo('cd /etc/chef && %s' % env.chef_executable, pty=True)
def reload():
"Reload the server."
env.user = "docs"
run("kill -HUP `cat %s/gunicorn.pid`" % env.rundir, pty=True)
... |
Tanych/CodeTracking | 121-Best-Time-to-Buy-and-Sell-Stock/solution.py | Python | mit | 316 | 0.025316 | class Solution(object):
def maxProfit(self, prices | ):
"""
:type prices: List[int]
:rtype: int
"""
low=1<<31
profit=0
for p in prices:
if p<low:
low=p
if p-low>profit:
| profit=p-low
return profit |
willowd878/nca47 | nca47/db/sqlalchemy/models/dns.py | Python | apache-2.0 | 1,680 | 0 | import sqlalchemy as sa
from oslo_db.sqlalchemy import types as db_types
from nca47.db.sqlalchemy.models import base as model_base
from nca47.objects import attributes as attr
HasTenant = model_base.HasTenant
HasId = model_base.HasId
HasStatus = model_base.HasStatus
HasOperationMode = model_base.HasOperationMode
c... | .UUID_LEN))
rrs_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
rrs_name = sa.Column(sa.String(attr.NAME_MAX_LEN))
type = sa.Column(sa.String(attr.NAME_MAX_LEN))
klass = sa.Column(sa.String(att | r.NAME_MAX_LEN))
ttl = sa.Column(sa.String(attr.NAME_MAX_LEN))
rdata = sa.Column(sa.String(attr.NAME_MAX_LEN))
|
marwano/utile | testsuite/test_xml.py | Python | bsd-3-clause | 709 | 0 |
from utile import pretty_xml, xml_to_dict, element_to_dict
from tests | uite.support import etree, TestCase
import unittest
XML_DATA = "<html><body><h1>test1</h1><h2>test2</h2></body></html>"
XML_PRETTY = """\
<html>
<body>
<h1>test1</h1>
<h2>test2</h2>
</body>
</html>
"""
XML_DICT = {'body': {'h2': 'test2', 'h1': 'test1'}}
@unittest.skipUnless(etree, 'lxml not installed')
c... | XML_PRETTY)
def test_element_to_dict(self):
self.assertEqual(element_to_dict(etree.XML(XML_DATA)), XML_DICT)
def test_xml_to_dict(self):
self.assertEqual(xml_to_dict(XML_DATA), XML_DICT)
|
t-brandt/acorns-adi | utils/config.py | Python | bsd-2-clause | 2,628 | 0.002664 | #!/usr/bin/env python
#
# Original filename: config.py
#
# Author: Tim Brandt
# Email: tbrandt@astro.princeton.edu
# Date: August 2011
#
# Summary: Set configuration parameters to sensible values.
#
import re
from subprocess import *
import multiprocessing
import numpy as np
def config(nframes, framesize):
####... | ver = Popen(["uname", "-a"], stdout=PIPE).stdout.read()
if osver.startswith("Linux"):
print "You are running Linux."
elif osver.startswith("Darwin"):
print "You are running Mac OS-X."
else:
print "Your operating system is not recognized."
if osver.startswith("Linux"):
| mem = Popen(["free", "-b"], stdout=PIPE).stdout.read()
mem = int(mem.split('\n')[1].split()[1])
elif osver.startswith("Darwin"):
mem = Popen(["vm_stat"], stdout=PIPE).stdout.read().split('\n')
blocksize = re.search('.*size of ([0-9]+) bytes.*', mem[0]).group(1)
totmem = 0.
... |
romain-dartigues/ansible | lib/ansible/plugins/httpapi/nxos.py | Python | gpl-3.0 | 5,290 | 0.001323 | # (c) 2018 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connectio... | ults = []
if response['ins_api'].get('outputs'):
for output in to_list(response['ins_api'] | ['outputs']['output']):
if output['code'] != '200':
raise ConnectionError('%s: %s' % (output['input'], output['msg']))
elif 'body' in output:
result = output['body']
if isinstance(result, dict):
result = json.dumps(result)
... |
thunsaker/cloudpebble | ide/utils/sdk/manifest.py | Python | mit | 11,537 | 0.001127 | import json
import re
import uuid
from django.utils.translation import ugettext as _
from ide.utils.project import APPINFO_MANIFEST, PACKAGE_MANIFEST, InvalidProjectArchiveException
__author__ = 'katharine'
def manifest_name_for_project(project):
if project.is_standard_project_type and project.sdk_version == '... | resource_dict(project, resources),
'projectType': project.project_type
}
}
if project.app_capabilities:
manifest['pebble']['capabilities'] = project.app_capabilities.split(',')
if project.project_type == 'package':
manifest['files'] = ['dist.zip']
else:
manife... | = project.app_modern_multi_js
manifest['pebble']['displayName'] = project.app_long_name
if project.app_is_hidden:
manifest['pebble']['watchapp']['hiddenApp'] = project.app_is_hidden
if project.app_platforms:
manifest['pebble']['targetPlatforms'] = project.app_platform_list
re... |
maxwward/SCOPEBak | askbot/migrations/0021_auto__add_field_comment_score.py | Python | gpl-3.0 | 25,917 | 0.008604 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Comment.score'
db.add_column(u'comment', 'score', self.gf('django.db.models.fields.Int... | sproblem': {
'Meta': {'object_name': 'AnonymousProblem'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('dja... | ax_length': '15'}),
'exercise': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_problems'", 'to': "orm['askbot.Exercise']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField... |
LivingOn/xbmc-script.youtube2kodi | resources/lib/MediaTypes.py | Python | gpl-2.0 | 5,517 | 0.005982 | # -*- coding=utf8 -*-
#******************************************************************************
# MediaTypes.py
#------------------------------------------------------------------------------
#
# Copyright (c) 2015 LivingOn <LivingOn@xmail.net>
#
# This program is free software; you can redistribute it and/or mod... | e License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FIT | NESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#****************************************... |
SUSE/ceph-deploy | ceph_deploy/lib/__init__.py | Python | mit | 817 | 0 | """
This module is meant for vendorizing Python libraries. Most libraries will need
to have some `` | sys.path`` alterations done unless they are doing relative
imports.
Do **not** add anything to this module that does not represent a vendorized
library.
Vendored libraries should go into the ``vendor`` directory and imported from
there. This is so we allow libraries that are installed normally to be imported
if the v... | e import dance here is done so that all other imports throught ceph-deploy
are kept the same regardless of where the module comes from.
The expected way to import remoto would look like this::
from ceph_deploy.lib import remoto
"""
try:
# vendored
from .vendor import remoto
except ImportError:
# nor... |
LLNL/spack | var/spack/repos/builtin/packages/r-pbdzmq/package.py | Python | lgpl-2.1 | 1,667 | 0.002999 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RPbdzmq(RPackage):
"""Programming with Big Data -- Interface to 'ZeroMQ'
'ZeroMQ' is... | n of 'ZeroMQ' is available. A few wrapper
functions compatible with 'rzmq' are also provided."""
homepage = "http://r-pbd.org/"
url = "https://cloud.r-project.org/src/contrib/pbdZMQ_0.2-4.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/pbdZMQ"
version('0.3-4', sha256='0779... | , sha256='ae26c13400e2acfb6463ff9b67156847a22ec79f3b53baf65119efaba1636eca')
version('0.3-2', sha256='ece2a2881c662f77126e4801ba4e01c991331842b0d636ce5a2b591b9de3fc37')
version('0.2-4', sha256='bfacac88b0d4156c70cf63fc4cb9969a950693996901a4fa3dcd59949ec065f6')
depends_on('r@3.0.0:', type=('build', 'run'))
... |
sorenh/cc | vendor/boto/boto/sns/__init__.py | Python | apache-2.0 | 13,553 | 0.003394 | # Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, m... | cess_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
self.region.endpoint, debug, https_connection_factory | , path)
def get_all_topics(self, next_token=None):
"""
:type next_token: string
:param next_token: Token returned by the previous call to
this method.
"""
params = {'ContentType' : 'JSON'}
if next_token:
params['NextToken'] = n... |
mph55/lanstation13 | tools/bot/vgstation/common/config.py | Python | gpl-3.0 | 1,605 | 0.011838 | '''
Created on Jul 28, 2013
@author: Rob
'''
import os, yaml
config = {
'names': [
'NT',
'VGTestServer'
],
'servers':{
'irc.server.tld': {
'port':6667,
'password':None,
'channels':{
'#vgstati... | ):
with open(config_file, 'w') as cw:
yaml.dump(config, cw, default_flow_style=False)
|
with open(config_file, 'r') as cr:
config = yaml.load(cr)
# if config['database']['username'] == '' or config['database']['password'] == '' or config['database']['schema'] == '':
# print('!!! Default config.yml detected. Please edit it before continuing.')
# sys.exit(1)
... |
vially/googlemusic-xbmc | resources/Lib/gmusicapi/clients/__init__.py | Python | gpl-3.0 | 323 | 0.01548 | # -*- coding: utf-8 -*-
#from __future__ import print_function, division, absolute_import | , unicode_literals
#from gmusicapi.clients.webclient import Webclient
#from gmusicapi.clients.musicmanager import Musicmanager
from gmusicapi.clients. | mobileclient import Mobileclient
#(Webclient, Musicmanager, Mobileclient) # noqa
|
NendoTaka/CodeForReference | Codingame/Python/Clash/SortHighLowReverse.py | Python | mit | 112 | 0.017857 | l = []
for x in range(int(input())):
l.append(int(input()))
l.sort()
print(' '.join(str(x) for x in l[ | ::-1]))
| |
globocom/database-as-a-service | dbaas/maintenance/migrations/0051_auto__add_removeinstancedatabase.py | Python | bsd-3-clause | 78,775 | 0.007553 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RemoveInstanceDatabase'
db.create_table(u'maintenance_rem... | go.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'backup.snapshot':... | ated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end_at': ('django.db.models.fields.DateTimeField', [], {'nu... |
IAlwaysBeCoding/mrq | tests/tasks/mongodb.py | Python | mit | 381 | 0 | from mrq.task import Task
from mrq.context import connections
class MongoTimeout(Task):
def run(self, params):
res = connections.mongodb_jobs.eval("""
function() {
var a;
for (i=0;i<10000000;i++) { |
for (y=0;y<10000000;y++) {
a = Math.max(y);
}
}
return a;
}
""")
return res
| |
OCA/vertical-abbey | mass/base_config_settings.py | Python | agpl-3.0 | 793 | 0 | # -*- coding: utf-8 -*-
# Copyright 2017-2019 Barroux Abbey (www.barroux.org)
# Copyright 2017-2019 Akretion France (www.akretion.com)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields
class Ba | seConfigSettings(models.TransientModel):
_inherit = 'base.config.settings'
mass_validation_account_id = fields.Many2one(
related='company_id.mass_validation_account_id')
mass_validation_analytic_account_id = fields.Many2one(
related='company_id.mass_validation_analytic_account_id')
mass... | (
related='company_id.mass_validation_journal_id')
mass_post_move = fields.Boolean(related='company_id.mass_post_move')
|
mkalte666/Dragonflame | IrcClient.py | Python | mit | 3,540 | 0.05113 | # coding=utf-8
import socket
import thread
import time
import Queue
import re
import random
class IrcClient:
def __init__(self, host, port, nick, realname, printAll=True, isMibbitBot=False):
self.nick = nick
self.realname = realname
self.host = host
self.port = port
self.sock = socket.socket()
self.RecvQ... | self.Send("PONG"+event[4:])
def SendMessage(self, destination, message):
self.Send("PRIVMSG "+destination+" :"+message)
def BroadcastMessage(self, message):
for channel in self.channels:
self.SendMessage(channel, message)
def SetNick(self, nickn | ame):
self.Send("NICK "+nickname)
def JoinChannel(self, channelname, channelpassword=""):
self.Send("JOIN "+channelname+" "+channelpassword)
self.channels.append(channelname)
def LeaveChannel(self, channelname):
self.Send("PART "+channelname)
try:
self.channels.remove(channelname)
except:
prin... |
benian/aecg100 | setup.py | Python | mit | 418 | 0.023923 | #!/usr/bin/env python
im | port setuptools
if __name__ == "__main__":
setuptools.setup(
name="aecg100",
version="1.1.0.18",
author="WHALETEQ Co., LTD",
description="WHALETEQ Co., LTD AECG100 Linux SDK | ",
url="https://www.whaleteq.com/en/Support/Download/7/Linux%20SDK",
include_package_data=True,
package_data={
'': ['sdk/*.so', 'sdk/*.h', 'sample/python/*.txt']
},
)
|
lhupfeldt/jenkinsflow | test/prefix_test.py | Python | bsd-3-clause | 1,934 | 0.003619 | # Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import ap | i_select
prefixed_jobs = """
serial flow: [
job: 'top_quick1'
serial flow: [
job: 'top_x_quick2-1'
]
serial flow: [
job: 'top_x_quick2-2'
]
serial flow: [
job: 'top_x_quick2-3'
]
job: 'top_quick3'
parallel flow: (
serial flow: [
job: 'top_y | _z_quick4a'
]
serial flow: [
job: 'quick4b'
]
job: 'top_y_quick5'
)
]
"""
def test_prefix(api_type, capsys):
with api_select.api(__file__, api_type) as api:
def job(name):
api.job(name, max_fails=0, expect_invocations=0, expect_order=None, params=None)
... |
Jimdo/unattended-upgrades | test/test_logdir.py | Python | gpl-2.0 | 1,396 | 0.002149 | #!/usr/bin/python
import apt_pkg
import logging
import os
import mock
import sys
import tempfile
import unittest
sys.path.insert(0, "..")
from | unattended_upgrade import _setup_logging
class MockOptions:
dry_run = False
debug = False
class TestLogdir(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
apt_pkg.init()
self.mock_options = MockOptions()
def test_logdir(self):
# test log
... | h.join(self.tempdir, "mylog")
apt_pkg.config.set("Unattended-Upgrade::LogDir", logdir)
logging.root.handlers = []
_setup_logging(self.mock_options)
self.assertTrue(os.path.exists(logdir))
def test_logdir_depreated(self):
# test if the deprecated APT::UnattendedUpgrades dir i... |
NendoTaka/CodeForReference | Python/Sort/CountingSort.py | Python | mit | 511 | 0.007828 | def countingsort(sortablelist):
maxval = max(sortablelist)
m = maxval + 1
count = [0] * m # init with zeros
for a in sortablelist:
count[a] += 1 | # count occurences
i = 0
for a in range(m): # emit
for c in range(count[a]): # - emit 'count[a]' copies of 'a'
sortablelist[i] = a
i += 1
def main():
import random
a = [random.randint(0, 1000) for i in range(100)]
countingsort(a)
print (a)
main() | |
soerendip42/rdkit | Code/GraphMol/ReducedGraphs/Wrap/testReducedGraphs.py | Python | bsd-3-clause | 1,470 | 0.014286 | # $Id$
#
from rdkit import Chem
from rdkit.Chem import rdReducedGraphs as rdRG
from rdkit import RDConfig
import numpy
import unittest
class TestCase(unittest.TestCase) :
def setUp(self):
pass
def test1(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
mrg = rdRG.GenerateMolExtendedReducedGraph(m)
mrg.... | ReducedGraph(m)
mrg.UpdatePropertyCache(False)
self.failUnlessEq | ual('[*]cCCO',Chem.MolToSmiles(mrg))
fp1 = rdRG.GenerateErGFingerprintForReducedGraph(mrg)
fp2 = rdRG.GetErGFingerprint(m)
md = max(abs(fp1-fp2))
self.failUnless(md<1e-4)
def test3(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
fp1 = rdRG.GetErGFingerprint(m)
m = Chem.MolFromSmiles('OCCC1C... |
iw3hxn/LibrERP | stock_inventory_export/__openerp__.py | Python | agpl-3.0 | 541 | 0 | # -*- encoding: utf-8 - | *-
{
'name': 'Export Inventory Costs',
'version': '3.0.0.0',
'category': "Warehouse Management",
'description': """
Export Inventory Costs
""",
'author': 'Didotech SRL',
'website': 'http://www.didotech.com',
'lice | nse': 'AGPL-3',
"depends": [
'base',
'stock',
],
"data": [
'wizard/wizard_inventory_costs_view.xml',
'views/stock_view.xml'
],
"demo": [],
"active": False,
"installable": True,
"application": True,
}
|
graik/biskit | archive_biskit2/Biskit/AmberEntropyMaster.py | Python | gpl-3.0 | 25,638 | 0.017396 | ## numpy-oldnumeric calls replaced by custom script; 09/06/2016
## Automatically adapted for numpy-oldnumeric Mar 26, 2007 by alter_code1.py
##
## Biskit, a toolkit for the manipulation of macromolecular structures
## Copyright (C) 2004-2018 Raik Gruenberg & Johan Leckner
##
## This program is free software; you can r... | MA 02139, USA.
##
##
"""
Parallellized AmberEntropist calculation.
"""
import os.path, copy
import Biskit.oldnumeric as N0
import Biskit.tools as T
import Biskit.settings as settings
import Biskit.mathUtils as MU
from Biskit.PVM.TrackingJobMaster import TrackingJobMaster
from Biskit.PVM.hosts import cpus_all, nice_... | , EHandler, StdLog
from Biskit.Dock import Complex
slave_path = T.projectRoot()+"/Biskit/AmberEntropySlave.py"
class AmberEntropyMaster(TrackingJobMaster):
"""
Run many AmberEntropist calculations on many nodes. The Master has
a standard set of 13 protocols to run on rec, lig, and com
trajectories, a... |
SmileEric/SEIMS | preprocess/config.py | Python | gpl-2.0 | 6,003 | 0.01266 | #! /usr/bin/env python
#coding=utf-8
## @Configuration of Preprocessing for SEIMS
#
# TODO, give more detailed description here.
import os,platform
## Directionaries
if platform.system() == "Windows":
DATA_BASE_DIR = r'E:\github-zlj\model_data\model_dianbu_30m_longterm\data_prepare'
PREPROC_SCRIPT_DIR = r'E... | ir.tif"
streamLinkOut = "stream_link.tif"
## masked and output to mongoDB file names
slopeM = "slope.tif"
filldemM = "dem.tif"
accM = "acc.tif"
streamOrderM = "stream_order.tif"
flowDirDinfM = "flow_dir_angle_dinf.tif"
dirCodeDinfM = "flow_dir_dinf.tif"
slopeD | infM = "slope_dinf.tif"
weightDinfM = "weight_dinf.tif"
subbasinVec = "subbasin.shp"
basinVec = "basin.shp"
chwidthName = "chwidth.tif"
landuseMFile = "landuse.tif"
soilTexture = "soil_texture.tif"
hydroGroup = "hydro_group.tif"
usleK = "usle_k.tif"
initSoilMoist = "moist_in.tif"
depressionFile = "depression.tif"
... |
leapcode/bitmask-dev | src/leap/bitmask/mail/outgoing/service.py | Python | gpl-3.0 | 17,108 | 0 | # -*- coding: utf-8 -*-
# outgoing/service.py
# Copyright (C) 2013-2017 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later ve... | n
the original message is not altered. For any other content-type, the
method attempts to fetch the recipient's public key. If the
recipient's public key is available, the message is encrypted and
signed; otherwise it is only signed.
Note that, if the C{encrypted_only} configura... | table summarizes the overall behaviour of the gateway:
+---------------------------------------------------+----------------+
| content-type | rcpt pubkey | enforce encr. | action |
+---------------------+-------------+---------------+----------------+
| multipart/encryp... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.