code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def autozoom(self, points): '''Fit the current view to the correct zoom level to display all *points*. The camera viewing direction and rotation pivot match the geometric center of the points and the distance from that point is calculated in order for all points to be in the fie...
Fit the current view to the correct zoom level to display all *points*. The camera viewing direction and rotation pivot match the geometric center of the points and the distance from that point is calculated in order for all points to be in the field of view. This is currently u...
def prebinned_hist(counts, binlims, ax=None, *args, **kwargs): """Plot a histogram with counts, binlims already given. Example ======= >>> gaus = np.random.normal(size=100) >>> counts, binlims = np.histogram(gaus, bins='auto') >>> prebinned_hist(countsl binlims) """ ax = get_ax(ax) ...
Plot a histogram with counts, binlims already given. Example ======= >>> gaus = np.random.normal(size=100) >>> counts, binlims = np.histogram(gaus, bins='auto') >>> prebinned_hist(countsl binlims)
def entails(self, other): """ Inverse is_entailed_by """ other = BoolCell.coerce(other) return other.is_entailed_by(self)
Inverse is_entailed_by
def irregular_sampling(T, N, rseed=None): """ Generates an irregularly sampled time vector by perturbating a linearly spaced vector and latter deleting a certain number of points Parameters ---------- T: float Time span of the vector, i.e. how long it is in time N: positiv...
Generates an irregularly sampled time vector by perturbating a linearly spaced vector and latter deleting a certain number of points Parameters ---------- T: float Time span of the vector, i.e. how long it is in time N: positive integer Number of samples of the resulting t...
def fit(self, X, y=None): """Compute mixture of von Mises Fisher clustering. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features) """ if self.normalize: X = normalize(X) self._check_force_weights() random_state...
Compute mixture of von Mises Fisher clustering. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features)
def _fingerprint_target_specs(self, specs): """Returns a fingerprint of the targets resolved from given target specs.""" assert self._build_graph is not None, ( 'cannot fingerprint specs `{}` without a `BuildGraph`'.format(specs) ) hasher = sha1() for spec in sorted(specs): for target in...
Returns a fingerprint of the targets resolved from given target specs.
def parse_option(self, option, block_name, *values): """ Parse status, end_status, timer_status and status_msg options. """ if option.endswith('status'): status = values[0] if status not in self.VALID_STATUSES: raise ValueError(u'Invalid IM status "{0...
Parse status, end_status, timer_status and status_msg options.
def from_config(cls, cp, model, nprocesses=1, use_mpi=False): """Loads the sampler from the given config file.""" section = "sampler" # check name assert cp.get(section, "name") == cls.name, ( "name in section [sampler] must match mine") # get the number of walkers to...
Loads the sampler from the given config file.
def get_cgi_parameter_str_or_none(form: cgi.FieldStorage, key: str) -> Optional[str]: """ Extracts a string parameter from a CGI form, or ``None`` if the key doesn't exist or the string is zero-length. """ s = get_cgi_parameter_str(form, key) if s is None or len...
Extracts a string parameter from a CGI form, or ``None`` if the key doesn't exist or the string is zero-length.
def sort(self, column, order=Qt.AscendingOrder): """Overriding sort method.""" ascending = order == Qt.AscendingOrder self.model.sort(self.COLUMN_INDEX, order=ascending) return True
Overriding sort method.
def push(self, line): """Transform and push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter's runsource() method is called with the concatenated contents of the buffer as...
Transform and push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter's runsource() method is called with the concatenated contents of the buffer as source. If this indicat...
def _FormatMessage(self, event): """Formats the message. Args: event (EventObject): event. Returns: str: message field. Raises: NoFormatterFound: if no event formatter can be found to match the data type in the event. """ message, _ = self._output_mediator.GetForma...
Formats the message. Args: event (EventObject): event. Returns: str: message field. Raises: NoFormatterFound: if no event formatter can be found to match the data type in the event.
def getSingle(self, type_uri, default=None): """Get a single value for an attribute. If no value was sent for this attribute, use the supplied default. If there is more than one value for this attribute, this method will fail. @type type_uri: str @param type_uri: The URI for the...
Get a single value for an attribute. If no value was sent for this attribute, use the supplied default. If there is more than one value for this attribute, this method will fail. @type type_uri: str @param type_uri: The URI for the attribute @param default: The value to return ...
async def eat(self, philosopher): '''The ``philosopher`` performs one of these two actions: * eat, if he has both forks and then :meth:`release_forks`. * try to :meth:`pickup_fork`, if he has fewer than 2 forks. ''' loop = philosopher._loop while True: forks ...
The ``philosopher`` performs one of these two actions: * eat, if he has both forks and then :meth:`release_forks`. * try to :meth:`pickup_fork`, if he has fewer than 2 forks.
def _hm_read_address(self): """Reads from the DCB and maps to yaml config file.""" response = self._hm_send_address(self.address, 0, 0, 0) lookup = self.config['keys'] offset = self.config['offset'] keydata = {} for i in lookup: try: kd...
Reads from the DCB and maps to yaml config file.
def internal_name(self): """ Return the unique internal name """ unq = 'f_' + super().internal_name() if self.tparams is not None: unq += "_" + "_".join(self.tparams) if self.tret is not None: unq += "_" + self.tret return unq
Return the unique internal name
def stream(self, date_created_from=values.unset, date_created_to=values.unset, limit=None, page_size=None): """ Streams ExecutionInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. ...
Streams ExecutionInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param datetime date_created_from: Only show E...
async def _send_loop(self): """ This loop is responsible for popping items off the send queue, encrypting them, and sending them over the network. Besides `connect`, only this method ever sends data. """ while self._user_connected and not self._reconnecting: ...
This loop is responsible for popping items off the send queue, encrypting them, and sending them over the network. Besides `connect`, only this method ever sends data.
def fix_lines(source_lines, options, filename=''): """Return fixed source code.""" # Transform everything to line feed. Then change them back to original # before returning fixed source code. original_newline = find_newline(source_lines) tmp_source = ''.join(normalize_line_endings(source_lines, '\n'...
Return fixed source code.
def __gen_token_anno_file(self, top_level_layer): """ creates an etree representation of a <multiFeat> file that describes all the annotations that only span one token (e.g. POS, lemma etc.). Note: discoursegraphs will create one token annotation file for each top level layer (e...
creates an etree representation of a <multiFeat> file that describes all the annotations that only span one token (e.g. POS, lemma etc.). Note: discoursegraphs will create one token annotation file for each top level layer (e.g. conano, tiger etc.).
def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False, validating=True): """Return UIDs of the selected services """ service_uids = form.get("uids", []) return service_uids, {}
Return UIDs of the selected services
def _extra_trust_root_validation(self): """ Manually invoked windows certificate chain builder and verification step when there are extra trust roots to include in the search process """ store = None cert_chain_context_pointer = None try: # We set up...
Manually invoked windows certificate chain builder and verification step when there are extra trust roots to include in the search process
def _get_init_args(self): """Creates dict with properties marked as readonly""" args = {} for rop in self.ro_properties: if rop in self.properties: args[rop] = self.properties[rop] return args
Creates dict with properties marked as readonly
def process_flagged_blocks(self, content: str) -> str: '''Replace flagged blocks either with their contents or nothing, depending on the value of ``FOLIANT_FLAGS`` environment variable and ``flags`` config value. :param content: Markdown content :returns: Markdown content without flagg...
Replace flagged blocks either with their contents or nothing, depending on the value of ``FOLIANT_FLAGS`` environment variable and ``flags`` config value. :param content: Markdown content :returns: Markdown content without flagged blocks
def getProjectAreas(self, archived=False, returned_properties=None): """Get all :class:`rtcclient.project_area.ProjectArea` objects If no :class:`rtcclient.project_area.ProjectArea` objects are retrieved, `None` is returned. :param archived: (default is False) whether the project area ...
Get all :class:`rtcclient.project_area.ProjectArea` objects If no :class:`rtcclient.project_area.ProjectArea` objects are retrieved, `None` is returned. :param archived: (default is False) whether the project area is archived :param returned_properties: the returned propert...
def names(self): """Get the names in an expression""" if is_term(self.terms): return frozenset([self.terms.name]) return frozenset(term.name for term in com.flatten(self.terms))
Get the names in an expression
def path_wrapper(func): """return the given infer function wrapped to handle the path Used to stop inference if the node has already been looked at for a given `InferenceContext` to prevent infinite recursion """ @functools.wraps(func) def wrapped(node, context=None, _func=func, **kwargs): ...
return the given infer function wrapped to handle the path Used to stop inference if the node has already been looked at for a given `InferenceContext` to prevent infinite recursion
def days(self): """Return the 7 days of the week as a list (of datetime.date objects)""" monday = self.day(0) return [monday + timedelta(days=i) for i in range(7)]
Return the 7 days of the week as a list (of datetime.date objects)
def _press_special_key(self, key, down): """ Helper method for special keys. Source: http://stackoverflow.com/questions/11045814/emulate-media-key-press-on-mac """ key_code = special_key_translate_table[key] ev = NSEvent.otherEventWithType_location_modifierFlags_timestamp_wind...
Helper method for special keys. Source: http://stackoverflow.com/questions/11045814/emulate-media-key-press-on-mac
def _validate_alias_command(alias_command): """ Check if the alias command is valid. Args: alias_command: The command to validate. """ if not alias_command: raise CLIError(EMPTY_ALIAS_ERROR) split_command = shlex.split(alias_command) boundary_index = len(split_command) ...
Check if the alias command is valid. Args: alias_command: The command to validate.
def insert(self): """Insert the object into the database""" if not self.curs: raise LIGOLwDBError, "Database connection not initalized" if len(self.table) == 0: raise LIGOLwDBError, 'attempt to insert empty table' for tab in self.table.keys(): # find and add any missing unique ids ...
Insert the object into the database
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0): """ Command blink(1) to fade to RGB color, no color correction applied. """ action = ord('c') fade_time = int(fade_milliseconds / 10) th = (fade_time & 0xff00) >> 8 tl = fade_tim...
Command blink(1) to fade to RGB color, no color correction applied.
def predict(self): """ Returns ------- proba : ndarray, shape=(n_clusters, ) The probability of given cluster being label 1. """ if self.w_ is not None: sigmoid = lambda t: 1. / (1. + np.exp(-t)) return sigmoid(np.dot(self.centers, sel...
Returns ------- proba : ndarray, shape=(n_clusters, ) The probability of given cluster being label 1.
def temp_output_file(prefix="tmp", suffix="", dir=None, make_parents=False, always_clean=False): """ A context manager for convenience in creating a temporary file, which is deleted when exiting the context. Usage: with temp_output_file() as (fd, path): ... """ return _temp_output(False, prefix=p...
A context manager for convenience in creating a temporary file, which is deleted when exiting the context. Usage: with temp_output_file() as (fd, path): ...
def get_families_by_ids(self, *args, **kwargs): """Pass through to provider FamilyLookupSession.get_families_by_ids""" # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_ids catalogs = self._get_provider_session('family_lookup_session').get_families_by...
Pass through to provider FamilyLookupSession.get_families_by_ids
def set_proxy(self, proxy, update=True): """ Set proxy for requests session """ update_web_driver = False if self.current_proxy != proxy: # Did we change proxies? update_web_driver = True self.current_proxy = proxy if proxy is None: ...
Set proxy for requests session
def debug(self): """Return debug setting""" debug = False if os.path.isfile(os.path.join(self.tcex.args.tc_temp_path, 'DEBUG')): debug = True return debug
Return debug setting
def _apply_over_vars_with_dim(func, self, dim=None, **kwargs): '''wrapper for datasets''' ds = type(self)(coords=self.coords, attrs=self.attrs) for name, var in self.data_vars.items(): if dim in var.dims: ds[name] = func(var, dim=dim, **kwargs) else: ds[name] = var ...
wrapper for datasets
def mode_yubikey_otp(self, private_uid, aes_key): """ Set the YubiKey up for standard OTP validation. """ if not self.capabilities.have_yubico_OTP(): raise yubikey_base.YubiKeyVersionError('Yubico OTP not available in %s version %d.%d' \ ...
Set the YubiKey up for standard OTP validation.
def commit(self): """Commit this change""" if not self.connection: import boto self.connection = boto.connect_route53() return self.connection.change_rrsets(self.hosted_zone_id, self.to_xml())
Commit this change
def check_content(content, **kwargs): '''check content for "active" urls''' # valid html root tag try: # render elements tree from content tree = fragment_fromstring(content) # flag for prevent content rerendering, when no "active" urls found processed = False # djang...
check content for "active" urls
def _parse_function_return_types_from_doc(cls, doc): """ This will extract the return type for list of lists so that the repr can display the header. :param doc: str of the function doc :return dict of {func.__name__:{'api_type':'type','col_name':[], ...
This will extract the return type for list of lists so that the repr can display the header. :param doc: str of the function doc :return dict of {func.__name__:{'api_type':'type','col_name':[], 'col_type':[],'repr_type':None}}
def get_http_authentication(private_key: RsaKey, private_key_id: str) -> HTTPSignatureHeaderAuth: """ Get HTTP signature authentication for a request. """ key = private_key.exportKey() return HTTPSignatureHeaderAuth( headers=["(request-target)", "user-agent", "host", "date"], algorit...
Get HTTP signature authentication for a request.
def nii_ones_like(in_file, value, dtype, newpath=None): """Create a NIfTI file filled with ``value``, matching properties of ``in_file``""" import os import numpy as np import nibabel as nb nii = nb.load(in_file) data = np.ones(nii.shape, dtype=float) * value out_file = os.path.join(newpat...
Create a NIfTI file filled with ``value``, matching properties of ``in_file``
def enum_check(*args, func=None): """Check if arguments are of protocol type.""" func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (enum.EnumMeta, aenum.EnumMeta)): name = type(var).__name__ raise EnumError( f'Function {func} expecte...
Check if arguments are of protocol type.
def _query(self, path, args=None, skip_cache=False, skip_sleep=False): """return results for a NCBI query, possibly from the cache :param: path: relative query path (e.g., 'einfo.fcgi') :param: args: dictionary of query args :param: skip_cache: whether to bypass the cache on reading ...
return results for a NCBI query, possibly from the cache :param: path: relative query path (e.g., 'einfo.fcgi') :param: args: dictionary of query args :param: skip_cache: whether to bypass the cache on reading :param: skip_sleep: whether to bypass query throttling :rtype: xml st...
def exit_standby(name, instance_ids, should_decrement_desired_capacity=False, region=None, key=None, keyid=None, profile=None): ''' Exit desired instances from StandBy mode .. versionadded:: 2016.11.0 CLI example:: salt-call boto_asg.exit_standby my_autoscale_group_name '["i...
Exit desired instances from StandBy mode .. versionadded:: 2016.11.0 CLI example:: salt-call boto_asg.exit_standby my_autoscale_group_name '["i-xxxxxx"]'
def get_all_handleable_leaves(self): """ Get list of all handleable devices, return only those that represent leaf nodes within the filtered device tree. """ nodes = self.get_device_tree() return [node.device for node in sorted(nodes.values(), key=DevNode....
Get list of all handleable devices, return only those that represent leaf nodes within the filtered device tree.
def multidict(D): '''creates a multidictionary''' keys = list(D.keys()) if len(keys) == 0: return [[]] try: N = len(D[keys[0]]) islist = True except: N = 1 islist = False dlist = [dict() for d in range(N)] for k in keys: if islist: ...
creates a multidictionary
def create_from_hdu(cls, hdu, ebins): """ Creates and returns an HpxMap object from a FITS HDU. hdu : The FITS ebins : Energy bin edges [optional] """ hpx = HPX.create_from_hdu(hdu, ebins) colnames = hdu.columns.names cnames = [] if hpx.conv.convname ...
Creates and returns an HpxMap object from a FITS HDU. hdu : The FITS ebins : Energy bin edges [optional]
def print_item_callback(item): """Print an item callback, used by &listen.""" print('&listen [{}, {}={}]'.format( item.get('cmd', ''), item.get('id', ''), item.get('data', '')))
Print an item callback, used by &listen.
def update_stats(self, stats, value, _type, sample_rate=1): """ Pipeline function that formats data, samples it and passes to send() >>> client = StatsdClient() >>> client.update_stats('example.update_stats', 73, "c", 0.9) """ stats = self.format(stats, value, _type, sel...
Pipeline function that formats data, samples it and passes to send() >>> client = StatsdClient() >>> client.update_stats('example.update_stats', 73, "c", 0.9)
def _macs2_cmd(method="chip"): """Main command for macs2 tool.""" if method.lower() == "chip": cmd = ("{macs2} callpeak -t {chip_bam} -c {input_bam} {paired} " " {genome_size} -n {name} -B {options}") elif method.lower() == "atac": cmd = ("{macs2} callpeak -t {chip_bam} --nom...
Main command for macs2 tool.
def RgbToHsl(r, g, b): '''Convert the color from RGB coordinates to HSL. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (h, s, l) tuple in the range: h...
Convert the color from RGB coordinates to HSL. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (h, s, l) tuple in the range: h[0...360], s[0...1], ...
def strip_rts_retries(self, idx): """strip(1 byte) rts_retries :idx: int :return: int idx :return: int """ rts_retries, = struct.unpack_from('<B', self._rtap, idx) return idx + 1, rts_retries
strip(1 byte) rts_retries :idx: int :return: int idx :return: int
def fetch_items(self, category, **kwargs): """Fetch the commits :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] to_date = kwargs['to_date'] branches = kwa...
Fetch the commits :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
def generate_dummy_graph(network): """Generate a dummy graph to feed to the FIAS libraries. It adds the "pos" attribute and removes the 380 kV duplicate buses when the buses have been split, so that all load and generation is attached to the 220kV bus.""" graph = pypsa.descriptors.OrderedGraph(...
Generate a dummy graph to feed to the FIAS libraries. It adds the "pos" attribute and removes the 380 kV duplicate buses when the buses have been split, so that all load and generation is attached to the 220kV bus.
def reset(self): """Reset emulator. All registers and memory are reset. """ self.__mem.reset() self.__cpu.reset() self.__tainter.reset() # Instructions pre and post handlers. self.__instr_handler_pre = None, None self.__instr_handler_post = None, None ...
Reset emulator. All registers and memory are reset.
def diff_lines(self): """A diff between the original BUILD file and the resulting BUILD file.""" start_lines = self._build_file_source_lines[:] end_lines = self.build_file_lines() diff_generator = unified_diff(start_lines, end_lines, fr...
A diff between the original BUILD file and the resulting BUILD file.
def parse(self, sentence): """Parse raw sentence into ConllSentence Parameters ---------- sentence : list a list of (word, tag) tuples Returns ------- ConllSentence ConllSentence object """ words = np.zeros((len(sentence) ...
Parse raw sentence into ConllSentence Parameters ---------- sentence : list a list of (word, tag) tuples Returns ------- ConllSentence ConllSentence object
def get_members(cls, member_class=None, is_member=None, sort_key=None, _parameter=None): """ Collect all class level attributes matching the given criteria. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :para...
Collect all class level attributes matching the given criteria. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_o...
def set_permissions(filename, uid=None, gid=None, mode=0775): """ Set pemissions for given `filename`. Args: filename (str): name of the file/directory uid (int, default proftpd): user ID - if not set, user ID of `proftpd` is used gid (int): group...
Set pemissions for given `filename`. Args: filename (str): name of the file/directory uid (int, default proftpd): user ID - if not set, user ID of `proftpd` is used gid (int): group ID, if not set, it is not changed mode (int, default 0775): unix ...
def dump(data, abspath, indent_format=False, float_precision=None, ensure_ascii=True, overwrite=False, enable_verbose=True): """Dump Json serializable object to file. Provides multiple choice to customize the behavior. :param data: Serializable python object. ...
Dump Json serializable object to file. Provides multiple choice to customize the behavior. :param data: Serializable python object. :type data: dict or list :param abspath: ``save as`` path, file extension has to be ``.json`` or ``.gz`` (for compressed Json) :type abspath: string :par...
def _get_choices(self, gandi): """ Internal method to get choices list """ packages = super(CertificatePackageType, self)._get_choices(gandi) return list(set([pack.split('_')[1] for pack in packages]))
Internal method to get choices list
def benchmark_setup(self): """Benchmark setup execution. """ def f(): self._setup() self.mod_ext.synchronize(**self.ext_kwargs) f() # Ignore first self.setup_stat = self._calc_benchmark_stat(f)
Benchmark setup execution.
def p_gate_op_5(self, program): """ gate_op : BARRIER id_list ';' """ program[0] = node.Barrier([program[2]]) self.verify_bit_list(program[2]) self.verify_distinct([program[2]])
gate_op : BARRIER id_list ';'
def get_devicecore_api(self): """Returns a :class:`.DeviceCoreAPI` bound to this device cloud instance This provides access to the same API as :attr:`.DeviceCloud.devicecore` but will create a new object (with a new cache) each time called. :return: devicecore API object bound to this ...
Returns a :class:`.DeviceCoreAPI` bound to this device cloud instance This provides access to the same API as :attr:`.DeviceCloud.devicecore` but will create a new object (with a new cache) each time called. :return: devicecore API object bound to this device cloud account :rtype: :cla...
def _ConvertValueMessage(value, message): """Convert a JSON representation into Value message.""" if isinstance(value, dict): _ConvertStructMessage(value, message.struct_value) elif isinstance(value, list): _ConvertListValueMessage(value, message.list_value) elif value is None: message.null_value = ...
Convert a JSON representation into Value message.
def stem_singular_word(self, word): """Stem a singular word to its common stem form.""" context = Context(word, self.dictionary, self.visitor_provider) context.execute() return context.result
Stem a singular word to its common stem form.
def to_array_with_default(value, default_value): """ Converts value into array object with specified default. Single values are converted into arrays with single element. :param value: the value to convert. :param default_value: default array object. :return: array obj...
Converts value into array object with specified default. Single values are converted into arrays with single element. :param value: the value to convert. :param default_value: default array object. :return: array object or default array when value is None.
async def delete_shade_from_scene(self, shade_id, scene_id): """Delete a shade from a scene.""" return await self.request.delete( self._base_path, params={ATTR_SCENE_ID: scene_id, ATTR_SHADE_ID: shade_id} )
Delete a shade from a scene.
def next(self, times=1): """Returns a new instance of self times is not supported yet. """ return Range(copy(self.end), self.end + self.elapse, tz=self.start.tz)
Returns a new instance of self times is not supported yet.
def randkey(bits, keyspace=string.ascii_letters + string.digits + '#/.', rng=None): """ Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable...
Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:r...
def local_check (self): """Local check function can be overridden in subclasses.""" log.debug(LOG_CHECK, "Checking %s", unicode(self)) # strict extern URLs should not be checked assert not self.extern[1], 'checking strict extern URL' # check connection log.debug(LOG_CHECK...
Local check function can be overridden in subclasses.
def discard(self, element): """Remove element from the RangeSet if it is a member. If the element is not a member, do nothing. """ try: i = int(element) set.discard(self, i) except ValueError: pass
Remove element from the RangeSet if it is a member. If the element is not a member, do nothing.
def for_category(self, category, live_only=False): """ Returns queryset of EntryTag instances for specified category. :param category: the Category instance. :param live_only: flag to include only "live" entries. :rtype: django.db.models.query.QuerySet. """ filte...
Returns queryset of EntryTag instances for specified category. :param category: the Category instance. :param live_only: flag to include only "live" entries. :rtype: django.db.models.query.QuerySet.
def binary(self): """ return encoded representation """ if isinstance(self.value, bytes): length = len(self.value) if length > 4294967295: raise OutputException('uint32 overflow') elif self.bits != 8: return ( ...
return encoded representation
def _free(self, ptr): """ Handler for any libc `free` SimProcedure call. If the heap has faithful support for `free`, it ought to be implemented in a `free` function (as opposed to the `_free` function). :param ptr: the location in memory to be freed """ raise NotImpleme...
Handler for any libc `free` SimProcedure call. If the heap has faithful support for `free`, it ought to be implemented in a `free` function (as opposed to the `_free` function). :param ptr: the location in memory to be freed
def getDocFactory(self, fragmentName, default=None): """ Retrieve a Nevow document factory for the given name. @param fragmentName: a short string that names a fragment template. @param default: value to be returned if the named template is not found. """ themes...
Retrieve a Nevow document factory for the given name. @param fragmentName: a short string that names a fragment template. @param default: value to be returned if the named template is not found.
def send_mail( subject, sender, to, message, html_message=None, cc=None, bcc=None, attachments=None, host=None, port=None, auth_user=None, auth_password=None, use_tls=False, fail_silently=False, ): """Send a single email to a recipient list. ...
Send a single email to a recipient list. All members of the recipient list will see the other recipients in the 'To' field. Note: The API for this method is frozen. New code wanting to extend the functionality should use the EmailMessage class directly.
def _ProcessTask(self, task): """Processes a task. Args: task (Task): task. """ logger.debug('Started processing task: {0:s}.'.format(task.identifier)) if self._tasks_profiler: self._tasks_profiler.Sample(task, 'processing_started') self._task = task storage_writer = self._st...
Processes a task. Args: task (Task): task.
def report_numbers2marc(self, key, value): """Populate the ``037`` MARC field.""" def _get_mangled_source(source): if source == 'arXiv': return 'arXiv:reportnumber' return source source = _get_mangled_source(value.get('source')) if value.get('hidden'): return { ...
Populate the ``037`` MARC field.
def read(self, line, f, data): """See :meth:`PunchParser.read`""" data["energy"] = float(f.readline().split()[1]) N = len(data["symbols"]) # if the data are already read before, just overwrite them gradient = data.get("gradient") if gradient is None: gradient ...
See :meth:`PunchParser.read`
def is_valid(self): """ Error reporting is triggered when a form is checked for validity """ is_valid = super(GAErrorReportingMixin, self).is_valid() if self.is_bound and not is_valid: try: self.report_errors_to_ga(self.errors) except: # n...
Error reporting is triggered when a form is checked for validity
def power_spectrum(self, input_filepath): '''Calculates the power spectrum (4096 point DFT). This method internally invokes the stat command with the -freq option. Note: The file is downmixed to mono prior to computation. Parameters ---------- input_filepath : str ...
Calculates the power spectrum (4096 point DFT). This method internally invokes the stat command with the -freq option. Note: The file is downmixed to mono prior to computation. Parameters ---------- input_filepath : str Path to input file to compute stats on. ...
def getLocation(self): """ Return the latitude+longitutde of the picture. Returns None if no location given for this pic. """ method = 'flickr.photos.geo.getLocation' try: data = _doget(method, photo_id=self.id) except FlickrError: # Some other error m...
Return the latitude+longitutde of the picture. Returns None if no location given for this pic.
def cli(env): """List Reserved Capacity groups.""" manager = CapacityManager(env.client) result = manager.list() table = formatting.Table( ["ID", "Name", "Capacity", "Flavor", "Location", "Created"], title="Reserved Capacity" ) for r_c in result: occupied_string = "#" * i...
List Reserved Capacity groups.
def properties_strict(instance): """Ensure that no custom properties are used, but only the official ones from the specification. """ if instance['type'] not in enums.TYPES: return # only check properties for official objects defined_props = enums.PROPERTIES.get(instance['type'], []) f...
Ensure that no custom properties are used, but only the official ones from the specification.
def reduce_min(attrs, inputs, proto_obj): """Reduce the array along a given axis by minimum value""" new_attrs = translation_utils._fix_attribute_names(attrs, {'axes':'axis'}) return 'min', new_attrs, inputs
Reduce the array along a given axis by minimum value
def get_task_fs(self, courseid, taskid): """ :param courseid: the course id of the course :param taskid: the task id of the task :raise InvalidNameException :return: A FileSystemProvider to the folder containing the task files """ if not id_checker(courseid): ...
:param courseid: the course id of the course :param taskid: the task id of the task :raise InvalidNameException :return: A FileSystemProvider to the folder containing the task files
def update(self, max_norm=None): """Updates parameters according to the installed optimizer and the gradients computed in the previous forward-backward batch. Gradients are clipped by their global norm if `max_norm` is set. Parameters ---------- max_norm: float, optional...
Updates parameters according to the installed optimizer and the gradients computed in the previous forward-backward batch. Gradients are clipped by their global norm if `max_norm` is set. Parameters ---------- max_norm: float, optional If set, clip values of all grad...
def get(self): """ get method """ try: cluster = self.get_argument_cluster() role = self.get_argument_role() environ = self.get_argument_environ() topology_name = self.get_argument_topology() component = self.get_argument_component() metric_names = self.get_required_arguments...
get method
def incr(self, key, to_add=1): """Increments the value of a given key by ``to_add``""" if key not in self.value: self.value[key] = CountMetric() self.value[key].incr(to_add)
Increments the value of a given key by ``to_add``
def summary(self, solution=None, threshold=1E-06, fva=None, names=False, floatfmt='.3g'): """ Print a summary of the input and output fluxes of the model. Parameters ---------- solution: cobra.Solution, optional A previously solved model solution to u...
Print a summary of the input and output fluxes of the model. Parameters ---------- solution: cobra.Solution, optional A previously solved model solution to use for generating the summary. If none provided (default), the summary method will resolve the model. ...
def install_payment_instruction(self, instruction, token_type="Unrestricted", transaction_id=None): """ InstallPaymentInstruction instruction: The PaymentInstruction to send, for example: MyRole=='Calle...
InstallPaymentInstruction instruction: The PaymentInstruction to send, for example: MyRole=='Caller' orSay 'Roles do not match'; token_type: Defaults to "Unrestricted" transaction_id: Defaults to a new ID
def get_mass(chebi_id): '''Returns mass''' if len(__MASSES) == 0: __parse_chemical_data() return __MASSES[chebi_id] if chebi_id in __MASSES else float('NaN')
Returns mass
def execute(self): """ Execute the command. Intercepts the help subsubcommand to show the help text. """ if self.args and self.argument(0) == "help": self.error(self.usage() + "\n\n" + self.help()) return False return True
Execute the command. Intercepts the help subsubcommand to show the help text.
def update_view(self, table, view): """Updates the SQL query for a view. If the output table exists, it is replaced with the supplied view query. Otherwise a new table is created with this view. :param table: The table to contain the view. :type table: BQTable :param vi...
Updates the SQL query for a view. If the output table exists, it is replaced with the supplied view query. Otherwise a new table is created with this view. :param table: The table to contain the view. :type table: BQTable :param view: The SQL query for the view. :type v...
def identifiers(self, identifiers): """ :type identifiers: subject_abcs.IdentifierCollection """ if (isinstance(identifiers, subject_abcs.IdentifierCollection) or identifiers is None): self._identifiers = identifiers else: raise ValueError...
:type identifiers: subject_abcs.IdentifierCollection
def deploy_snmp(snmp, host=None, admin_username=None, admin_password=None, module=None): ''' Change the QuickDeploy SNMP community string, used for switches as well CLI Example: .. code-block:: bash salt dell dracr.deploy_snmp SNMP_STRING host=<remote DRAC or CMC> ...
Change the QuickDeploy SNMP community string, used for switches as well CLI Example: .. code-block:: bash salt dell dracr.deploy_snmp SNMP_STRING host=<remote DRAC or CMC> admin_username=<DRAC user> admin_password=<DRAC PW> salt dell dracr.deploy_password diana secret