code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def run(edges, iterations=1000, force_strength=5.0, dampening=0.01, max_velocity=2.0, max_distance=50, is_3d=True): """Runs a force-directed-layout algorithm on the input graph. iterations - Number of FDL iterations to run in coordinate generation force_strength - Strength of Coulomb and Hooke forc...
Runs a force-directed-layout algorithm on the input graph. iterations - Number of FDL iterations to run in coordinate generation force_strength - Strength of Coulomb and Hooke forces (edit this to scale the distance between nodes) dampening - Multiplier to reduce force applied to nodes...
def get_event_attendees(self, id, **data): """ GET /events/:id/attendees/ Returns a :ref:`paginated <pagination>` response with a key of ``attendees``, containing a list of :format:`attendee`. """ return self.get("/events/{0}/attendees/".format(id), data=data)
GET /events/:id/attendees/ Returns a :ref:`paginated <pagination>` response with a key of ``attendees``, containing a list of :format:`attendee`.
def get_context_data(self, **kwargs): """ checks if there is SocialFrind model record for the user if not attempt to create one if all fail, redirects to the next page """ context = super(FriendListView, self).get_context_data(**kwargs) friends = [] for f...
checks if there is SocialFrind model record for the user if not attempt to create one if all fail, redirects to the next page
def _paths_must_exists(path): """ Raises error if path doesn't exist. :param path: str path to check :return: str same path passed in """ path = to_unicode(path) if not os.path.exists(path): raise argparse.ArgumentTypeError("{} is not a valid file/folder.".format(path)) return pa...
Raises error if path doesn't exist. :param path: str path to check :return: str same path passed in
def column_spec_path(cls, project, location, dataset, table_spec, column_spec): """Return a fully-qualified column_spec string.""" return google.api_core.path_template.expand( "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}/columnSpecs/{column_spec}", ...
Return a fully-qualified column_spec string.
def check_update(): """ Return True if an update is available on pypi """ r = requests.get("https://pypi.python.org/pypi/prof/json") data = r.json() if versiontuple(data['info']['version']) > versiontuple(__version__): return True return False
Return True if an update is available on pypi
def _update_assignment_email_status(offer_assignment_id, send_id, status, site_code=None): """ Update the offer_assignment and offer_assignment_email model using the Ecommerce assignmentemail api. Arguments: offer_assignment_id (str): Key of the entry in the offer_assignment model. send_id (...
Update the offer_assignment and offer_assignment_email model using the Ecommerce assignmentemail api. Arguments: offer_assignment_id (str): Key of the entry in the offer_assignment model. send_id (str): Unique message id from Sailthru status (str): status to be sent to the api site_c...
def msg_curse(self, args=None, max_width=None): """Return the dict to display in the curse interface.""" # Init the return message ret = [] # Only process if stats exist and display plugin enable... if not self.stats or self.is_disable(): return ret # Max si...
Return the dict to display in the curse interface.
def list_available_tools(self): """ Lists all the Benchmarks configuration files found in the configuration folders :return: """ benchmarks = [] if self.alternative_config_dir: for n in glob.glob(os.path.join(self.alternative_config_dir, self.BENCHMARKS_DIR,...
Lists all the Benchmarks configuration files found in the configuration folders :return:
def inDignities(self, idA, idB): """ Returns the dignities of A which belong to B. """ objA = self.chart.get(idA) info = essential.getInfo(objA.sign, objA.signlon) # Should we ignore exile and fall? return [dign for (dign, ID) in info.items() if ID == idB]
Returns the dignities of A which belong to B.
def _find_key_cols(df): """Identify columns in a DataFrame that could be a unique key""" keys = [] for col in df: if len(df[col].unique()) == len(df[col]): keys.append(col) return keys
Identify columns in a DataFrame that could be a unique key
def is_seq_of(seq, expected_type, seq_type=None): """Check whether it is a sequence of some type. Args: seq (Sequence): The sequence to be checked. expected_type (type): Expected type of sequence items. seq_type (type, optional): Expected sequence type. Returns: bool: Wheth...
Check whether it is a sequence of some type. Args: seq (Sequence): The sequence to be checked. expected_type (type): Expected type of sequence items. seq_type (type, optional): Expected sequence type. Returns: bool: Whether the sequence is valid.
def _publish_queue_grpc(self): """ send the messages in the tx queue to the GRPC manager :return: None """ messages = EventHub_pb2.Messages(msg=self._tx_queue) publish_request = EventHub_pb2.PublishRequest(messages=messages) self.grpc_manager.send_message(publish_...
send the messages in the tx queue to the GRPC manager :return: None
def build_api_struct(self): """ Calls the clean method of the class and returns the info in a structure that Atlas API is accepting. """ self.clean() data = {"type": self.measurement_type} # add all options for option in self.used_options: opt...
Calls the clean method of the class and returns the info in a structure that Atlas API is accepting.
def attach_attachment(self, analysis, attachment): """ Attach a file or a given set of files to an analysis :param analysis: analysis where the files are to be attached :param attachment: files to be attached. This can be either a single file or a list of files :return: ...
Attach a file or a given set of files to an analysis :param analysis: analysis where the files are to be attached :param attachment: files to be attached. This can be either a single file or a list of files :return: None
def setup(self, phase=None, quantity='', conductance='', **kwargs): r""" This method takes several arguments that are essential to running the algorithm and adds them to the settings. Parameters ---------- phase : OpenPNM Phase object The phase on which the a...
r""" This method takes several arguments that are essential to running the algorithm and adds them to the settings. Parameters ---------- phase : OpenPNM Phase object The phase on which the algorithm is to be run. quantity : string The name of th...
def _clip_line( self, line_pt_1, line_pt_2 ): """ clip line to canvas """ x_min = min(line_pt_1[0], line_pt_2[0]) x_max = max(line_pt_1[0], line_pt_2[0]) y_min = min(line_pt_1[1], line_pt_2[1]) y_max = max(line_pt_1[1], line_pt_2[...
clip line to canvas
def get_stack_index(self, stack_index, plugin_index): """Get the real index of the selected item.""" other_plugins_count = sum([other_tabs[0].count() \ for other_tabs in \ self.plugins_tabs[:plugin_index]]) real_index = stack_...
Get the real index of the selected item.
async def analog_write(self, command): """ This method writes a value to an analog pin. It is used to set the output of a PWM pin or the angle of a Servo. :param command: {"method": "analog_write", "params": [PIN, WRITE_VALUE]} :returns: No return message. """ p...
This method writes a value to an analog pin. It is used to set the output of a PWM pin or the angle of a Servo. :param command: {"method": "analog_write", "params": [PIN, WRITE_VALUE]} :returns: No return message.
def multiget(client, keys, **options): """Executes a parallel-fetch across multiple threads. Returns a list containing :class:`~riak.riak_object.RiakObject` or :class:`~riak.datatypes.Datatype` instances, or 4-tuples of bucket-type, bucket, key, and the exception raised. If a ``pool`` option is inc...
Executes a parallel-fetch across multiple threads. Returns a list containing :class:`~riak.riak_object.RiakObject` or :class:`~riak.datatypes.Datatype` instances, or 4-tuples of bucket-type, bucket, key, and the exception raised. If a ``pool`` option is included, the request will use the given worker ...
def info(self, message, domain=None): """ Shortcut function for `utils.loggable.info` Args: message: see `utils.loggable.info` domain: see `utils.loggable.info` """ if domain is None: domain = self.extension_name info(message, domain)
Shortcut function for `utils.loggable.info` Args: message: see `utils.loggable.info` domain: see `utils.loggable.info`
def add_pyspark_path(): """Add PySpark to the library path based on the value of SPARK_HOME. """ try: spark_home = os.environ['SPARK_HOME'] sys.path.append(os.path.join(spark_home, 'python')) py4j_src_zip = glob(os.path.join(spark_home, 'python', ...
Add PySpark to the library path based on the value of SPARK_HOME.
def result(self, wait=False): """ Gets the result of the method call. If the call was successful, return the result, otherwise, reraise the exception. :param wait: Block until the result is available, or just get the result. :raises: RuntimeError when called and the result is no...
Gets the result of the method call. If the call was successful, return the result, otherwise, reraise the exception. :param wait: Block until the result is available, or just get the result. :raises: RuntimeError when called and the result is not yet available.
def cmd_follow(self, args): '''control following of vehicle''' if len(args) < 2: print("map follow 0|1") return follow = int(args[1]) self.map.set_follow(follow)
control following of vehicle
def _serialize_uint(value, size=32, padding=0): """ Translates a python integral or a BitVec into a 32 byte string, MSB first """ if size <= 0 or size > 32: raise ValueError from .account import EVMAccount # because of circular import if not isinstance(value...
Translates a python integral or a BitVec into a 32 byte string, MSB first
def invalidate(self, key): """Remove the given data item along with all items that depend on it in the graph.""" if key not in self.data: return del self.data[key] # Find all components that used it and invalidate their results for cname in self.components: ...
Remove the given data item along with all items that depend on it in the graph.
def from_parmed(cls, path, *args, **kwargs): """ Try to load a file automatically with ParmEd. Not guaranteed to work, but might be useful if it succeeds. Arguments --------- path : str Path to file that ParmEd can load """ st = parmed.load_fi...
Try to load a file automatically with ParmEd. Not guaranteed to work, but might be useful if it succeeds. Arguments --------- path : str Path to file that ParmEd can load
def create_textfile_with_contents(filename, contents, encoding='utf-8'): """ Creates a textual file with the provided contents in the workdir. Overwrites an existing file. """ ensure_directory_exists(os.path.dirname(filename)) if os.path.exists(filename): os.remove(filename) outstrea...
Creates a textual file with the provided contents in the workdir. Overwrites an existing file.
def get_new_document(self, cursor_pos=None): """ Create a `Document` instance that contains the resulting text. """ lines = [] # Original text, before cursor. if self.original_document.text_before_cursor: lines.append(self.original_document.text_before_cursor...
Create a `Document` instance that contains the resulting text.
def volume_present(name, bricks, stripe=False, replica=False, device_vg=False, transport='tcp', start=False, force=False, arbiter=False): ''' Ensure that the volume exists name name of the volume bricks list of brick paths replica replica count for volum...
Ensure that the volume exists name name of the volume bricks list of brick paths replica replica count for volume arbiter use every third brick as arbiter (metadata only) .. versionadded:: 2019.2.0 start ensure that the volume is also started ...
def get_user(self, login): """ http://confluence.jetbrains.net/display/YTD2/GET+user """ return youtrack.User(self._get("/admin/user/" + urlquote(login.encode('utf8'))), self)
http://confluence.jetbrains.net/display/YTD2/GET+user
def get_converter(rule): """ Parse rule will extract the converter from the rule as a generator We iterate through the parse_rule results to find the converter parse_url returns the static rule part in the first iteration parse_url returns the dynamic rule part in the second iteration if its dynami...
Parse rule will extract the converter from the rule as a generator We iterate through the parse_rule results to find the converter parse_url returns the static rule part in the first iteration parse_url returns the dynamic rule part in the second iteration if its dynamic
def _name_to_index(self, channels): """ Return the channel indices for the specified channel names. Integers contained in `channel` are returned unmodified, if they are within the range of ``self.channels``. Parameters ---------- channels : int or str or list of...
Return the channel indices for the specified channel names. Integers contained in `channel` are returned unmodified, if they are within the range of ``self.channels``. Parameters ---------- channels : int or str or list of int or list of str Name(s) of the channel(s...
def on_proposal(self, proposal, proto): "called to inform about synced peers" assert isinstance(proto, HDCProtocol) assert isinstance(proposal, Proposal) if proposal.height >= self.cm.height: assert proposal.lockset.is_valid self.last_active_protocol = proto
called to inform about synced peers
def buffer(self, frame): """Enable buffering for the frame from that point onwards.""" frame.buffer = self.temporary_identifier() self.writeline('%s = []' % frame.buffer)
Enable buffering for the frame from that point onwards.
def _create_variables(self, n_features, n_classes): """Create the TensorFlow variables for the model. :param n_features: number of features :param n_classes: number of classes :return: self """ self.W_ = tf.Variable( tf.zeros([n_features, n_classes]), name='w...
Create the TensorFlow variables for the model. :param n_features: number of features :param n_classes: number of classes :return: self
def list_cluster_role_binding(self, **kwargs): # noqa: E501 """list_cluster_role_binding # noqa: E501 list or watch objects of kind ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=Tru...
list_cluster_role_binding # noqa: E501 list or watch objects of kind ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_cluster_role_binding(async_req=True) ...
def _is_junction(arg): ''' Return True, if arg is a junction statement. ''' return isinstance(arg, dict) and len(arg) == 1 and next(six.iterkeys(arg)) == 'junction'
Return True, if arg is a junction statement.
def _save_function_initial_state(self, function_key, function_address, state): """ Save the initial state of a function, and merge it with existing ones if there are any. :param FunctionKey function_key: The key to this function. :param int function_address: Address of the function. ...
Save the initial state of a function, and merge it with existing ones if there are any. :param FunctionKey function_key: The key to this function. :param int function_address: Address of the function. :param SimState state: Initial state of the function. :return: None
def get_urlclass_from (scheme, assume_local_file=False): """Return checker class for given URL scheme. If the scheme cannot be matched and assume_local_file is True, assume a local file. """ if scheme in ("http", "https"): klass = httpurl.HttpUrl elif scheme == "ftp": klass = ftpurl....
Return checker class for given URL scheme. If the scheme cannot be matched and assume_local_file is True, assume a local file.
def set_local_address(ams_netid): # type: (Union[str, SAmsNetId]) -> None """Set the local NetID (**Linux only**). :param str: new AmsNetID :rtype: None **Usage:** >>> import pyads >>> pyads.open_port() >>> pyads.set_local_address('0.0.0.0.1.1') """ ...
Set the local NetID (**Linux only**). :param str: new AmsNetID :rtype: None **Usage:** >>> import pyads >>> pyads.open_port() >>> pyads.set_local_address('0.0.0.0.1.1')
def add_property(self, prop, objects=()): """Add a property to the definition and add ``objects`` as related.""" self._properties.add(prop) self._objects |= objects self._pairs.update((o, prop) for o in objects)
Add a property to the definition and add ``objects`` as related.
def pathparts(self): """A list of the parts of the path, with the root node returning an empty list. """ try: parts = self.parent.pathparts() parts.append(self.name) return parts except AttributeError: return []
A list of the parts of the path, with the root node returning an empty list.
def read_message_handler(stream): """ Send message to user if the opponent has read the message """ while True: packet = yield from stream.get() session_id = packet.get('session_key') user_opponent = packet.get('username') message_id = packet.get('message_id') ...
Send message to user if the opponent has read the message
def predict(self, temp_type): """ Transpile the predict method. Parameters ---------- :param temp_type : string The kind of export type (embedded, separated, exported). Returns ------- :return : string The transpiled predict metho...
Transpile the predict method. Parameters ---------- :param temp_type : string The kind of export type (embedded, separated, exported). Returns ------- :return : string The transpiled predict method as string.
def risk(self, domain, **kwargs): """Returns back the risk score for a given domain""" return self._results('risk', '/v1/risk', items_path=('components', ), domain=domain, cls=Reputation, **kwargs)
Returns back the risk score for a given domain
def _generate_non_lastnames_variations(non_lastnames): """Generate variations for all non-lastnames. E.g. For 'John Richard', this method generates: [ 'John', 'J', 'Richard', 'R', 'John Richard', 'John R', 'J Richard', 'J R', ] """ if not non_lastnames: return [] # Generate nam...
Generate variations for all non-lastnames. E.g. For 'John Richard', this method generates: [ 'John', 'J', 'Richard', 'R', 'John Richard', 'John R', 'J Richard', 'J R', ]
def api_auth(func): """ If the user is not logged in, this decorator looks for basic HTTP auth data in the request header. """ @wraps(func) def _decorator(request, *args, **kwargs): authentication = APIAuthentication(request) if authentication.authenticate(): return ...
If the user is not logged in, this decorator looks for basic HTTP auth data in the request header.
def exception_info(self, timeout=None): """Return a tuple of (exception, traceback) raised by the call that the future represents. Args: timeout: The number of seconds to wait for the exception if the future isn't done. If None, then there is no limit on the wait ...
Return a tuple of (exception, traceback) raised by the call that the future represents. Args: timeout: The number of seconds to wait for the exception if the future isn't done. If None, then there is no limit on the wait time. Returns: Th...
def insert(self, i, arg): r"""Insert whitespace, an unparsed argument string, or an argument object. :param int i: Index to insert argument into :param Arg arg: Argument to insert >>> arguments = TexArgs(['\n', RArg('arg0'), '[arg2]']) >>> arguments.insert(1, '[arg1]') ...
r"""Insert whitespace, an unparsed argument string, or an argument object. :param int i: Index to insert argument into :param Arg arg: Argument to insert >>> arguments = TexArgs(['\n', RArg('arg0'), '[arg2]']) >>> arguments.insert(1, '[arg1]') >>> len(arguments) ...
def _mkdirs(d): """ Make all directories up to d. No exception is raised if d exists. """ try: os.makedirs(d) except OSError as e: if e.errno != errno.EEXIST: raise
Make all directories up to d. No exception is raised if d exists.
def normalize(u): ''' normalize(u) yields a vetor with the same direction as u but unit length, or, if u has zero length, yields u. ''' u = np.asarray(u) unorm = np.sqrt(np.sum(u**2, axis=0)) z = np.isclose(unorm, 0) c = np.logical_not(z) / (unorm + z) return u * c
normalize(u) yields a vetor with the same direction as u but unit length, or, if u has zero length, yields u.
def fetch(self, minutes=values.unset, start_date=values.unset, end_date=values.unset, task_queue_sid=values.unset, task_queue_name=values.unset, friendly_name=values.unset, task_channel=values.unset): """ Fetch a WorkersStatisticsInstance :param unicode...
Fetch a WorkersStatisticsInstance :param unicode minutes: Filter cumulative statistics by up to 'x' minutes in the past. :param datetime start_date: Filter cumulative statistics by a start date. :param datetime end_date: Filter cumulative statistics by a end date. :param unicode task_qu...
def _create_and_add_parameters(params): ''' Parses the configuration and creates Parameter instances. ''' global _current_parameter if _is_simple_type(params): _current_parameter = SimpleParameter(params) _current_option.add_parameter(_current_parameter) else: # must be a...
Parses the configuration and creates Parameter instances.
def _load(self, scale=1.0): """Load the SLSTR relative spectral responses """ LOG.debug("File: %s", str(self.requested_band_filename)) ncf = Dataset(self.requested_band_filename, 'r') wvl = ncf.variables['wavelength'][:] * scale resp = ncf.variables['response'][:] ...
Load the SLSTR relative spectral responses
def update(self, auth_payload=values.unset): """ Update the ChallengeInstance :param unicode auth_payload: Optional payload to verify the Challenge :returns: Updated ChallengeInstance :rtype: twilio.rest.authy.v1.service.entity.factor.challenge.ChallengeInstance """ ...
Update the ChallengeInstance :param unicode auth_payload: Optional payload to verify the Challenge :returns: Updated ChallengeInstance :rtype: twilio.rest.authy.v1.service.entity.factor.challenge.ChallengeInstance
def as_url(self): ''' Reverse object converted to `web.URL`. If Reverse is bound to env: * try to build relative URL, * use current domain name, port and scheme as default ''' if '' in self._scope: return self._finalize().as_url if no...
Reverse object converted to `web.URL`. If Reverse is bound to env: * try to build relative URL, * use current domain name, port and scheme as default
def pool(data, batch_size, key, batch_size_fn=lambda new, count, sofar: count, random_shuffler=None, shuffle=False, sort_within_batch=False): """Sort within buckets, then batch, then shuffle batches. Partitions data into chunks of size 100*batch_size, sorts examples within each chunk using sort_ke...
Sort within buckets, then batch, then shuffle batches. Partitions data into chunks of size 100*batch_size, sorts examples within each chunk using sort_key, then batch these examples and shuffle the batches.
def ellipsemode(self, mode=None): ''' Set the current ellipse drawing mode. :param mode: CORNER, CENTER, CORNERS :return: ellipsemode if mode is None or valid. ''' if mode in (self.CORNER, self.CENTER, self.CORNERS): self.ellipsemode = mode return...
Set the current ellipse drawing mode. :param mode: CORNER, CENTER, CORNERS :return: ellipsemode if mode is None or valid.
def read_data(self,variable_instance): """ read values from the device """ if self.inst is None: return if variable_instance.visavariable.device_property.upper() == 'PRESENT_VALUE': return self.parse_value(self.inst.query('?U6P0')) elif variable_in...
read values from the device
def load(self, path=None): """ Loads the XML-file (with sentiment annotations) from the given path. By default, Sentiment.path is lazily loaded. """ # <word form="great" wordnet_id="a-01123879" pos="JJ" polarity="1.0" subjectivity="1.0" intensity="1.0" /> # <word form="damnmi...
Loads the XML-file (with sentiment annotations) from the given path. By default, Sentiment.path is lazily loaded.
def add_subnet(self, subnet_type, quantity=None, vlan_id=None, version=4, test_order=False): """Orders a new subnet :param str subnet_type: Type of subnet to add: private, public, global :param int quantity: Number of IPs in the subnet :param int vlan_id: VLAN id for ...
Orders a new subnet :param str subnet_type: Type of subnet to add: private, public, global :param int quantity: Number of IPs in the subnet :param int vlan_id: VLAN id for the subnet to be placed into :param int version: 4 for IPv4, 6 for IPv6 :param bool test_order: If true, th...
def create_event_handler(event_type, handler): """Register a comm and return a serializable object with target name""" target_name = '{hash}_{event_type}'.format(hash=hash(handler), event_type=event_type) def handle_comm_opened(comm, msg): @comm.on_msg def _handle_msg(msg): dat...
Register a comm and return a serializable object with target name
def disconnect(self, connection): """The other side has asked us to disconnect. """ proto = self.getLocalProtocol(connection) proto.transport.loseConnection() return {}
The other side has asked us to disconnect.
def camera_position(self, camera_location): """ Set camera position of all active render windows """ if camera_location is None: return if isinstance(camera_location, str): camera_location = camera_location.lower() if camera_location == 'xy': ...
Set camera position of all active render windows
def get_unit_id(unit_name): """ Return the unit id to the unit 'unit_name' """ unit_name = unit_name.lower() attribute = 'uniqueIdentifier' response = LDAP_search( pattern_search='(cn={})'.format(unit_name), attribute=attribute ) unit_id = "" try: for element...
Return the unit id to the unit 'unit_name'
def clone(self, snapshot_name_or_id=None, mode=library.CloneMode.machine_state, options=None, name=None, uuid=None, groups=None, basefolder='', register=True): """Clone this Machine Options: snapshot_name_or_id - value can be either ISnapshot, name,...
Clone this Machine Options: snapshot_name_or_id - value can be either ISnapshot, name, or id mode - set the CloneMode value options - define the CloneOptions options name - define a name of the new VM uuid - set the uuid of the new VM grou...
def is_stop(self): ''' has either of the stop processing flags been set ''' if len(self._processed_coordinators) > 0: self.free_processed_queue() return self._cancel_called or self._processing_stop
has either of the stop processing flags been set
def get_transition(self, # suppress(too-many-arguments) line, line_index, column, is_escaped, comment_system_transitions, eof=False): """Get transition from InTextParser."""...
Get transition from InTextParser.
def write_summary(all_procs, summary_file): """ Write a summary of all run processes to summary_file in tab-delimited format. """ if not summary_file: return with summary_file: writer = csv.writer(summary_file, delimiter='\t', lineterminator='\n') writer.writerow(('direc...
Write a summary of all run processes to summary_file in tab-delimited format.
def check_theta(self): """Validate the computed theta against the copula specification. This method is used to assert the computed theta is in the valid range for the copula.""" lower, upper = self.theta_interval if (not lower <= self.theta <= upper) or (self.theta in self.invalid_theta...
Validate the computed theta against the copula specification. This method is used to assert the computed theta is in the valid range for the copula.
def stacked_node_layout(self,EdgeAttribute=None,network=None,NodeAttribute=None,\ nodeList=None,x_position=None,y_start_position=None,verbose=None): """ Execute the Stacked Node Layout on a network. :param EdgeAttribute (string, optional): The name of the edge column contai ning numeric values that will be ...
Execute the Stacked Node Layout on a network. :param EdgeAttribute (string, optional): The name of the edge column contai ning numeric values that will be used as weights in the layout algor ithm. Only columns containing numeric values are shown :param network (string, optional): Specifies a network by name,...
def create_api_stage(restApiId, stageName, deploymentId, description='', cacheClusterEnabled=False, cacheClusterSize='0.5', variables=None, region=None, key=None, keyid=None, profile=None): ''' Creates a new API stage for a given restApiId and deploymentId. CLI Exa...
Creates a new API stage for a given restApiId and deploymentId. CLI Example: .. code-block:: bash salt myminion boto_apigateway.create_api_stage restApiId stagename deploymentId \\ description='' cacheClusterEnabled=True|False cacheClusterSize='0.5' variables='{"name": "value"}'
def get_all_service_user_objects(self, include_machine = False): """ Fetches all service user objects from the AD, and returns MSADUser object. Service user refers to an user whith SPN (servicePrincipalName) attribute set """ logger.debug('Polling AD for all user objects, machine accounts included: %s'% inclu...
Fetches all service user objects from the AD, and returns MSADUser object. Service user refers to an user whith SPN (servicePrincipalName) attribute set
def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of...
Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers().
def run_prepare(*data): """ Run seqcluster prepare to merge all samples in one file """ out_dir = os.path.join(dd.get_work_dir(data[0][0]), "seqcluster", "prepare") out_dir = os.path.abspath(safe_makedir(out_dir)) prepare_dir = os.path.join(out_dir, "prepare") tools = dd.get_expression_calle...
Run seqcluster prepare to merge all samples in one file
def _set_properties(self): """Setup title, icon, size, scale, statusbar, main grid""" self.set_icon(icons["PyspreadLogo"]) # Without minimum size, initial size is minimum size in wxGTK self.minSizeSet = False # Leave save mode post_command_event(self, self.SafeModeExit...
Setup title, icon, size, scale, statusbar, main grid
def _validate_checksum(self): """Given a mnemonic word string, confirm seed checksum (last word) matches the computed checksum. :rtype: bool """ phrase = self.phrase.split(" ") if self.word_list.get_checksum(self.phrase) == phrase[-1]: return True raise Value...
Given a mnemonic word string, confirm seed checksum (last word) matches the computed checksum. :rtype: bool
def deleteAllStyles(self, verbose=None): """ Deletes all vision styles except for default style :param verbose: print more :returns: default: successful operation """ response=api(url=self.___url+'styles', method="DELETE", verbose=verbose) return response
Deletes all vision styles except for default style :param verbose: print more :returns: default: successful operation
def _setuie(self, i): """Initialise bitstring with unsigned interleaved exponential-Golomb code for integer i. Raises CreationError if i < 0. """ if i < 0: raise CreationError("Cannot use negative initialiser for unsigned " "interleaved expon...
Initialise bitstring with unsigned interleaved exponential-Golomb code for integer i. Raises CreationError if i < 0.
def is_done(self): """ Returns True if the read stream is done (either it's returned EOF or the pump doesn't have wait_for_output set), and the write side does not have pending bytes to send. """ return (not self.wait_for_output or self.eof) and \ not (ha...
Returns True if the read stream is done (either it's returned EOF or the pump doesn't have wait_for_output set), and the write side does not have pending bytes to send.
def POST(self): """ Add new entry """ form = self.form() if not form.validates(): todos = model.get_todos() return render.index(todos, form) model.new_todo(form.d.title) raise web.seeother('/')
Add new entry
def upgrade(): """Upgrade database.""" op.create_table( 'pidrelations_pidrelation', sa.Column('created', sa.DateTime(), nullable=False), sa.Column('updated', sa.DateTime(), nullable=False), sa.Column('parent_id', sa.Integer(), nullable=False), sa.Column('child_id', sa.Int...
Upgrade database.
def parse(cls, value, record_bytes): """Parses the pointer label. Parameters ---------- pointer_data Supported values for `pointer_data` are:: ^PTR = nnn ^PTR = nnn <BYTES> ^PTR = "filename" ^PTR = ("filename")...
Parses the pointer label. Parameters ---------- pointer_data Supported values for `pointer_data` are:: ^PTR = nnn ^PTR = nnn <BYTES> ^PTR = "filename" ^PTR = ("filename") ^PTR = ("filename", nnn) ...
def make_key_url(self, key): """Gets a URL for a key.""" if type(key) is bytes: key = key.decode('utf-8') buf = io.StringIO() buf.write(u'keys') if not key.startswith(u'/'): buf.write(u'/') buf.write(key) return self.make_url(buf.getvalue()...
Gets a URL for a key.
def add(self, properties): """ Add a faked HBA resource. Parameters: properties (dict): Resource properties. Special handling and requirements for certain properties: * 'element-id' will be auto-generated with a unique value across ...
Add a faked HBA resource. Parameters: properties (dict): Resource properties. Special handling and requirements for certain properties: * 'element-id' will be auto-generated with a unique value across all instances of this resource type, if not spe...
def stop(self): """ Stop this process. Once closed, it should not, and cannot be used again. :return: :py:attr:`~exitcode`. """ self.child.terminate() self._cleanup() return self.child.exitcode
Stop this process. Once closed, it should not, and cannot be used again. :return: :py:attr:`~exitcode`.
def ping(self, params=None): """ Returns True if the cluster is up, False otherwise. `<http://www.elastic.co/guide/>`_ """ try: return self.transport.perform_request("HEAD", "/", params=params) except TransportError: return False
Returns True if the cluster is up, False otherwise. `<http://www.elastic.co/guide/>`_
def ntp_authentication_key_encryption_type_md5_type_md5(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ntp = ET.SubElement(config, "ntp", xmlns="urn:brocade.com:mgmt:brocade-ntp") authentication_key = ET.SubElement(ntp, "authentication-key") key...
Auto Generated Code
def parse(self): """ Convert line to shape object """ log.debug(self) self.parse_composite() self.split_line() self.convert_coordinates() self.convert_meta() self.make_shape() log.debug(self)
Convert line to shape object
def save_images(self): """Save selected images. This uses Astropy FITS package to save the outputs no matter what user chose to load the images. """ res_dict = self.treeview.get_selected() clobber = self.settings.get('clobber', False) self.treeview.clear_selecti...
Save selected images. This uses Astropy FITS package to save the outputs no matter what user chose to load the images.
def get_custom_value(self, field_name): """ Get a value for a specified custom field field_name - Name of the custom field you want. """ custom_field = self.get_custom_field(field_name) return CustomFieldValue.objects.get_or_create( field=custom_field, object_id=self....
Get a value for a specified custom field field_name - Name of the custom field you want.
def contingency_table(y, z): """Note: if y and z are not rounded to 0 or 1, they are ignored """ y = K.cast(K.round(y), K.floatx()) z = K.cast(K.round(z), K.floatx()) def count_matches(y, z): return K.sum(K.cast(y, K.floatx()) * K.cast(z, K.floatx())) ones = K.ones_like(y) zeros =...
Note: if y and z are not rounded to 0 or 1, they are ignored
def add_variable(self, name): """Add a variable to the problem""" if name in self._variables: raise ValueError( "A variable named " + name + " already exists." ) self._variables[name] = len(self._variables) self.bounds[name] = (0, None) ne...
Add a variable to the problem
def matrix_to_marching_cubes(matrix, pitch, origin): """ Convert an (n,m,p) matrix into a mesh, using marching_cubes. Parameters ----------- matrix: (n,m,p) bool, voxel matrix pitch: float, what pitch was the voxel matrix computed with origin: (3,) float, what is the origin of the voxel mat...
Convert an (n,m,p) matrix into a mesh, using marching_cubes. Parameters ----------- matrix: (n,m,p) bool, voxel matrix pitch: float, what pitch was the voxel matrix computed with origin: (3,) float, what is the origin of the voxel matrix Returns ---------- mesh: Trimesh object, generat...
def find(self,cell_designation,cell_filter=lambda x,c: 'c' in x and x['c'] == c): """ finds spike containers in a multi spike containers collection """ res = [i for i,sc in enumerate(self.spike_containers) if cell_filter(sc.meta,cell_designation)] if len(res) > 0: ...
finds spike containers in a multi spike containers collection
def rosen_nesterov(self, x, rho=100): """needs exponential number of steps in a non-increasing f-sequence. x_0 = (-1,1,...,1) See Jarre (2011) "On Nesterov's Smooth Chebyshev-Rosenbrock Function" """ f = 0.25 * (x[0] - 1)**2 f += rho * sum((x[1:] - 2 * x[:-1]**2 + 1)**2...
needs exponential number of steps in a non-increasing f-sequence. x_0 = (-1,1,...,1) See Jarre (2011) "On Nesterov's Smooth Chebyshev-Rosenbrock Function"
def load(self, modules): """Load Python modules and check their usability :param modules: list of the modules that must be loaded :return: """ self.modules_assoc = [] for module in modules: if not module.enabled: logger.info("Module %s is decl...
Load Python modules and check their usability :param modules: list of the modules that must be loaded :return:
def interact(self, **local): """ Drops the user into an interactive Python session with the ``sess`` variable set to the current session instance. If keyword arguments are supplied, these names will also be available within the session. """ import code code.interact(local=dict(sess=self, **local))
Drops the user into an interactive Python session with the ``sess`` variable set to the current session instance. If keyword arguments are supplied, these names will also be available within the session.
def find_distinct(self, collection, key): """ Search a collection for the distinct key values provided. Args: collection: The db collection. See main class documentation. key: The name of the key to find distinct values. For example with the indicators c...
Search a collection for the distinct key values provided. Args: collection: The db collection. See main class documentation. key: The name of the key to find distinct values. For example with the indicators collection, the key could be "type". Returns: ...