code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def get_response(self, method, endpoint, headers=None, json=None, params=None, data=None): # pylint: disable=too-many-arguments """ Returns the response from the requested endpoint with the requested method :param method: str. one of the methods accepted by Requests ('POST', 'GET', ...) ...
Returns the response from the requested endpoint with the requested method :param method: str. one of the methods accepted by Requests ('POST', 'GET', ...) :param endpoint: str. the relative endpoint to access :param params: (optional) Dictionary or bytes to be sent in the query string f...
def convert_to_ns(self, value): ''' converts a value to the prefixed rdf ns equivalent. If not found returns the value as is args: value: the value to convert ''' parsed = self.parse_uri(value) try: rtn_val = "%s_%s" % (self.uri_dict[parsed[0...
converts a value to the prefixed rdf ns equivalent. If not found returns the value as is args: value: the value to convert
def get_by_id(self, webhook, params={}, **options): """Returns the full record for the given webhook. Parameters ---------- webhook : {Id} The webhook to get. [params] : {Object} Parameters for the request """ path = "/webhooks/%s" % (webhook) return sel...
Returns the full record for the given webhook. Parameters ---------- webhook : {Id} The webhook to get. [params] : {Object} Parameters for the request
def get_huisnummer_by_id(self, id): ''' Retrieve a `huisnummer` by the Id. :param integer id: the Id of the `huisnummer` :rtype: :class:`Huisnummer` ''' def creator(): res = crab_gateway_request( self.client, 'GetHuisnummerWithStatusByHuisnumm...
Retrieve a `huisnummer` by the Id. :param integer id: the Id of the `huisnummer` :rtype: :class:`Huisnummer`
def flatter(x, k=1): ''' flatter(x) yields a numpy array equivalent to x but whose first dimension has been flattened. flatter(x, k) yields a numpy array whose first k dimensions have been flattened; if k is negative, the last k dimensions are flattened. If np.inf or -np.inf is passed, then this is ...
flatter(x) yields a numpy array equivalent to x but whose first dimension has been flattened. flatter(x, k) yields a numpy array whose first k dimensions have been flattened; if k is negative, the last k dimensions are flattened. If np.inf or -np.inf is passed, then this is equivalent to flattest(x). No...
def op_nodes(self, op=None): """Get the list of "op" nodes in the dag. Args: op (Type): Instruction subclass op nodes to return. if op=None, return all op nodes. Returns: list[DAGNode]: the list of node ids containing the given op. """ nod...
Get the list of "op" nodes in the dag. Args: op (Type): Instruction subclass op nodes to return. if op=None, return all op nodes. Returns: list[DAGNode]: the list of node ids containing the given op.
def raw(request): """shows untransformed hierarchical xml output""" foos = foobar_models.Foo.objects.all() return HttpResponse(tree.xml(foos), mimetype='text/xml')
shows untransformed hierarchical xml output
def is_valid(self, tree): """ returns true, iff the order of the tokens in the graph are the same as in the Conano file (converted to plain text). """ conano_plaintext = etree.tostring(tree, encoding='utf8', method='text') token_str_list = conano_plaintext.split() ...
returns true, iff the order of the tokens in the graph are the same as in the Conano file (converted to plain text).
def write_to_disk( manifest_root_dir: Optional[Path] = None, manifest_name: Optional[str] = None, prettify: Optional[bool] = False, ) -> Manifest: """ Write the active manifest to disk Defaults - Writes manifest to cwd unless Path is provided as manifest_root_dir. - Writes manifest with ...
Write the active manifest to disk Defaults - Writes manifest to cwd unless Path is provided as manifest_root_dir. - Writes manifest with a filename of Manifest[version].json unless a desired manifest name (which must end in json) is provided as manifest_name. - Writes the minified manifest version t...
def build(self, builder): """Build XML by appending to builder""" params = dict(OID=self.oid, Name=self.name, DataType=self.datatype.value) if self.sas_format_name is not None: params["SASFormatName"] = self.sas_format_name builder.start("CodeList", params) for item ...
Build XML by appending to builder
def get_imgid(self, img): """Obtain a unique identifier of the image. Parameters ---------- img : astropy.io.fits.HDUList Returns ------- str: Identification of the image """ imgid = img.filename() # More heuristics here......
Obtain a unique identifier of the image. Parameters ---------- img : astropy.io.fits.HDUList Returns ------- str: Identification of the image
def build_assert(cls: Type[_Block], nodes: List[ast.stmt], min_line_number: int) -> _Block: """ Assert block is all nodes that are after the Act node. Note: The filtering is *still* running off the line number of the Act node, when instead it should be using the last lin...
Assert block is all nodes that are after the Act node. Note: The filtering is *still* running off the line number of the Act node, when instead it should be using the last line of the Act block.
def from_indra_pickle(path: str, name: Optional[str] = None, version: Optional[str] = None, description: Optional[str] = None, authors: Optional[str] = None, contact: Optional[str] = None, ...
Import a model from :mod:`indra`. :param path: Path to pickled list of :class:`indra.statements.Statement` :param name: The name for the BEL graph :param version: The version of the BEL graph :param description: The description of the graph :param authors: The authors of this graph :param conta...
def write(self, oprot): ''' Write this object to the given output protocol and return self. :type oprot: thryft.protocol._output_protocol._OutputProtocol :rtype: pastpy.gen.database.impl.dbf.dbf_database_configuration.DbfDatabaseConfiguration ''' oprot.write_struct_begi...
Write this object to the given output protocol and return self. :type oprot: thryft.protocol._output_protocol._OutputProtocol :rtype: pastpy.gen.database.impl.dbf.dbf_database_configuration.DbfDatabaseConfiguration
def _get_value(self): """ Return two delegating variables. Each variable should contain a value attribute with the real value. """ x, y = self._point.x, self._point.y self._px, self._py = self._item_point.canvas.get_matrix_i2i(self._item_point, ...
Return two delegating variables. Each variable should contain a value attribute with the real value.
def prune_influence_map_subj_obj(self): """Prune influence map to include only edges where the object of the upstream rule matches the subject of the downstream rule.""" def get_rule_info(r): result = {} for ann in self.model.annotations: if ann.subject ==...
Prune influence map to include only edges where the object of the upstream rule matches the subject of the downstream rule.
def translation(language): """ Return a translation object in the default 'django' domain. """ global _translations if language not in _translations: _translations[language] = Translations(language) return _translations[language]
Return a translation object in the default 'django' domain.
def ServiceWorker_inspectWorker(self, versionId): """ Function path: ServiceWorker.inspectWorker Domain: ServiceWorker Method name: inspectWorker Parameters: Required arguments: 'versionId' (type: string) -> No description No return value. """ assert isinstance(versionId, (str,) ...
Function path: ServiceWorker.inspectWorker Domain: ServiceWorker Method name: inspectWorker Parameters: Required arguments: 'versionId' (type: string) -> No description No return value.
def RegisterParser(cls, parser_class): """Registers a parser class. The parser classes are identified based on their lower case name. Args: parser_class (type): parser class (subclass of BaseParser). Raises: KeyError: if parser class is already set for the corresponding name. """ ...
Registers a parser class. The parser classes are identified based on their lower case name. Args: parser_class (type): parser class (subclass of BaseParser). Raises: KeyError: if parser class is already set for the corresponding name.
def is_cnpj(numero, estrito=False): """Uma versão conveniente para usar em testes condicionais. Apenas retorna verdadeiro ou falso, conforme o argumento é validado. :param bool estrito: Padrão ``False``, indica se apenas os dígitos do número deverão ser considerados. Se verdadeiro, potenciais carac...
Uma versão conveniente para usar em testes condicionais. Apenas retorna verdadeiro ou falso, conforme o argumento é validado. :param bool estrito: Padrão ``False``, indica se apenas os dígitos do número deverão ser considerados. Se verdadeiro, potenciais caracteres que formam a máscara serão re...
def is_any_type_set(sett: Set[Type]) -> bool: """ Helper method to check if a set of types is the {AnyObject} singleton :param sett: :return: """ return len(sett) == 1 and is_any_type(min(sett))
Helper method to check if a set of types is the {AnyObject} singleton :param sett: :return:
def purge(**kwargs): ''' Purge all the jobs currently scheduled on the minion CLI Example: .. code-block:: bash salt '*' schedule.purge ''' ret = {'comment': [], 'result': True} for name in list_(show_all=True, return_yaml=False): if name == 'enabled': ...
Purge all the jobs currently scheduled on the minion CLI Example: .. code-block:: bash salt '*' schedule.purge
def _tokenize_latex(self, exp): """ Internal method to tokenize latex """ tokens = [] prevexp = "" while exp: t, exp = self._get_next_token(exp) if t.strip() != "": tokens.append(t) if prevexp == exp: bre...
Internal method to tokenize latex
def to_json(self): """ Writes the complete Morse-Smale merge hierarchy to a string object. @ Out, a string object storing the entire merge hierarchy of all minima and maxima. """ capsule = {} capsule["Hierarchy"] = [] for ( dying, ...
Writes the complete Morse-Smale merge hierarchy to a string object. @ Out, a string object storing the entire merge hierarchy of all minima and maxima.
def get_tagged_version(self): """ Get the version of the local working set as a StrictVersion or None if no viable tag exists. If the local working set is itself the tagged commit and the tip and there are no local modifications, use the tag on the parent changeset. """ tags = list(self.get_tags()) if '...
Get the version of the local working set as a StrictVersion or None if no viable tag exists. If the local working set is itself the tagged commit and the tip and there are no local modifications, use the tag on the parent changeset.
def sed(regexpr, repl, force=False, recursive=False, dpath_list=None, fpath_list=None, verbose=None, include_patterns=None, exclude_patterns=[]): """ Python implementation of sed. NOT FINISHED searches and replaces text in files Args: regexpr (str): regx patterns to find ...
Python implementation of sed. NOT FINISHED searches and replaces text in files Args: regexpr (str): regx patterns to find repl (str): text to replace force (bool): recursive (bool): dpath_list (list): directories to search (defaults to cwd)
def from_list(cls, l): """Return a Point instance from a given list""" if len(l) == 3: x, y, z = map(float, l) return cls(x, y, z) elif len(l) == 2: x, y = map(float, l) return cls(x, y) else: raise AttributeError
Return a Point instance from a given list
def add_formats_by_name(self, rfmt_list): """ adds formats by short label descriptors, such as 'txt', 'json', or 'html' """ for fmt in rfmt_list: if fmt == "json": self.add_report_format(JSONReportFormat) elif fmt in ("txt", "text"): ...
adds formats by short label descriptors, such as 'txt', 'json', or 'html'
def view_indexes(self, done=None): '''return a list waypoint indexes in view order''' ret = [] if done is None: done = set() idx = 0 # find first point not done yet while idx < self.count(): if not idx in done: break id...
return a list waypoint indexes in view order
def cell_fate(data, groupby='clusters', disconnected_groups=None, self_transitions=False, n_neighbors=None, copy=False): """Computes individual cell endpoints Arguments --------- data: :class:`~anndata.AnnData` Annotated data matrix. groupby: `str` (default: `'clusters'`) Key to whi...
Computes individual cell endpoints Arguments --------- data: :class:`~anndata.AnnData` Annotated data matrix. groupby: `str` (default: `'clusters'`) Key to which to assign the fates. disconnected_groups: list of `str` (default: `None`) Which groups to treat as disconnected f...
def _add_document(self, doc_id, conn=None, nosave=False, score=1.0, payload=None, replace=False, partial=False, language=None, **fields): """ Internal add_document used for both batch and single doc indexing """ if conn is None: conn = self.redis ...
Internal add_document used for both batch and single doc indexing
def info_authn(self): """Check to see if user if authenticated for info.json. Must have Authorization header with value that has the form "Bearer TOKEN", where TOKEN is an appropriate and valid access token. """ authz_header = request.headers.get('Authorization', '[none]...
Check to see if user if authenticated for info.json. Must have Authorization header with value that has the form "Bearer TOKEN", where TOKEN is an appropriate and valid access token.
def get_queryset(self): """ Returns a queryset of all states holding a non-special election on a date. """ try: date = ElectionDay.objects.get(date=self.kwargs["date"]) except Exception: raise APIException( "No elections on {}.".for...
Returns a queryset of all states holding a non-special election on a date.
def _iter_path(pointer): """Take a cairo_path_t * pointer and yield ``(path_operation, coordinates)`` tuples. See :meth:`Context.copy_path` for the data structure. """ _check_status(pointer.status) data = pointer.data num_data = pointer.num_data points_per_type = PATH_POINTS_PER_TYPE ...
Take a cairo_path_t * pointer and yield ``(path_operation, coordinates)`` tuples. See :meth:`Context.copy_path` for the data structure.
def store_text_cursor_anchor(self): """ Stores the document cursor anchor. :return: Method success. :rtype: bool """ self.__text_cursor_anchor = (self.textCursor(), self.horizontalScrollBar().sliderPosition(), ...
Stores the document cursor anchor. :return: Method success. :rtype: bool
def chi_squareds(self, p=None): """ Returns a list of chi squared for each data set. Also uses ydata_massaged. p=None means use the fit results """ if len(self._set_xdata)==0 or len(self._set_ydata)==0: return None if p is None: p = self.results[0] # ge...
Returns a list of chi squared for each data set. Also uses ydata_massaged. p=None means use the fit results
def _post_login_page(self, login_url): """Login to HydroQuebec website.""" data = {"login": self.username, "_58_password": self.password} try: raw_res = yield from self._session.post(login_url, data=data, ...
Login to HydroQuebec website.
def get_datastream_data(self, datastream, options): """ Get input data for the datastream :param datastream: string :param options: dict """ response_format=None if options and 'format' in options and options['format'] is not None: response_format = o...
Get input data for the datastream :param datastream: string :param options: dict
def get_source(self, name): """Concrete implementation of InspectLoader.get_source.""" path = self.get_filename(name) try: source_bytes = self.get_data(path) except OSError as exc: e = _ImportError('source not available through get_data()', ...
Concrete implementation of InspectLoader.get_source.
def get_active_services(): """ Retrieve a list of all active system services. @see: L{get_services}, L{start_service}, L{stop_service}, L{pause_service}, L{resume_service} @rtype: list( L{win32.ServiceStatusProcessEntry} ) @return: List of service statu...
Retrieve a list of all active system services. @see: L{get_services}, L{start_service}, L{stop_service}, L{pause_service}, L{resume_service} @rtype: list( L{win32.ServiceStatusProcessEntry} ) @return: List of service status descriptors.
def _decode_datetime(obj): """Decode a msgpack'ed datetime.""" if '__datetime__' in obj: obj = datetime.datetime.strptime(obj['as_str'].decode(), "%Y%m%dT%H:%M:%S.%f") return obj
Decode a msgpack'ed datetime.
def copy(self, extra=None): """ Creates a copy of this instance with the same uid and some extra params. The default implementation creates a shallow copy using :py:func:`copy.copy`, and then copies the embedded and extra parameters over and returns the copy. Subclasses s...
Creates a copy of this instance with the same uid and some extra params. The default implementation creates a shallow copy using :py:func:`copy.copy`, and then copies the embedded and extra parameters over and returns the copy. Subclasses should override this method if the default approa...
def _consolidate_auth(ssh_password=None, ssh_pkey=None, ssh_pkey_password=None, allow_agent=True, host_pkey_directories=None, logger=None): """ Get sure authentication inform...
Get sure authentication information is in place. ``ssh_pkey`` may be of classes: - ``str`` - in this case it represents a private key file; public key will be obtained from it - ``paramiko.Pkey`` - it will be transparently added to loaded keys
def get_mac_acl_for_intf_input_interface_type(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_mac_acl_for_intf = ET.Element("get_mac_acl_for_intf") config = get_mac_acl_for_intf input = ET.SubElement(get_mac_acl_for_intf, "input") int...
Auto Generated Code
def adjust_worker_number_by_load(self): """Try to create the minimum workers specified in the configuration :return: None """ if self.interrupted: logger.debug("Trying to adjust worker number. Ignoring because we are stopping.") return to_del = [] ...
Try to create the minimum workers specified in the configuration :return: None
def clone(self, into=None): """Clone this PEX environment into a new PEXBuilder. :keyword into: (optional) An optional destination directory to clone this PEXBuilder into. If not specified, a temporary directory will be created. Clones PEXBuilder into a new location. This is useful if the PEXBuild...
Clone this PEX environment into a new PEXBuilder. :keyword into: (optional) An optional destination directory to clone this PEXBuilder into. If not specified, a temporary directory will be created. Clones PEXBuilder into a new location. This is useful if the PEXBuilder has been frozen and rendered...
def pdf_case_report(institute_id, case_name): """Download a pdf report for a case""" institute_obj, case_obj = institute_and_case(store, institute_id, case_name) data = controllers.case_report_content(store, institute_obj, case_obj) # add coverage report on the bottom of this report if current_app...
Download a pdf report for a case
def get_user_shell(): """ For commands executed directly via an SSH command-line, SSH looks up the user's shell via getpwuid() and only defaults to /bin/sh if that field is missing or empty. """ try: pw_shell = pwd.getpwuid(os.geteuid()).pw_shell except KeyError: pw_shell = N...
For commands executed directly via an SSH command-line, SSH looks up the user's shell via getpwuid() and only defaults to /bin/sh if that field is missing or empty.
def _ige(message, key, iv, operation="decrypt"): """Given a key, given an iv, and message do whatever operation asked in the operation field. Operation will be checked for: "decrypt" and "encrypt" strings. Returns the message encrypted/decrypted. message must be a multiple by 16 bytes (for divis...
Given a key, given an iv, and message do whatever operation asked in the operation field. Operation will be checked for: "decrypt" and "encrypt" strings. Returns the message encrypted/decrypted. message must be a multiple by 16 bytes (for division in 16 byte blocks) key must be 32 byte iv ...
def add_line(self, p1, p2, char_length): """ Add a line to the list. Check if the nodes already exist, and add them if not. Return the line index (1-indixed, starting with 1) """ p1_id = self.get_point_id(p1, char_length) p2_id = self.get_point_id(p2, char_length...
Add a line to the list. Check if the nodes already exist, and add them if not. Return the line index (1-indixed, starting with 1)
def enumerate_device_serials(vid=FT232H_VID, pid=FT232H_PID): """Return a list of all FT232H device serial numbers connected to the machine. You can use these serial numbers to open a specific FT232H device by passing it to the FT232H initializer's serial parameter. """ try: # Create a libf...
Return a list of all FT232H device serial numbers connected to the machine. You can use these serial numbers to open a specific FT232H device by passing it to the FT232H initializer's serial parameter.
def install_napps(cls, napps): """Install local or remote NApps. This method is recursive, it will install each napps and your dependencies. """ mgr = NAppsManager() for napp in napps: mgr.set_napp(*napp) LOG.info(' NApp %s:', mgr.napp_id) ...
Install local or remote NApps. This method is recursive, it will install each napps and your dependencies.
def _get_vars(self): ''' load the vars section from a play, accounting for all sorts of variable features including loading from yaml files, prompting, and conditional includes of the first file found in a list. ''' if self.vars is None: self.vars = {} if type(self....
load the vars section from a play, accounting for all sorts of variable features including loading from yaml files, prompting, and conditional includes of the first file found in a list.
def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('...
Generate info line for GNTP Message :return string:
def as_text(self): '''Fetch and render all regions For search and test purposes just a prototype ''' from leonardo.templatetags.leonardo_tags import _render_content request = get_anonymous_request(self) content = '' try: for region in [re...
Fetch and render all regions For search and test purposes just a prototype
def get_index(table, field_name, op, value): ''' Returns the index of the first list entry that matches. If no matches are found, it returns None NOTE: it is not returning a list. It is returning an integer in range 0..LEN(target) NOTE: both 'None' and 0 evaluate as False in python. So, if you are ...
Returns the index of the first list entry that matches. If no matches are found, it returns None NOTE: it is not returning a list. It is returning an integer in range 0..LEN(target) NOTE: both 'None' and 0 evaluate as False in python. So, if you are checking for a None being returned, be explicit. "i...
def infer_shape(self, node, input_shapes): """Return a list of output shapes based on ``input_shapes``. This method is optional. It allows to compute the shape of the output without having to evaluate. Parameters ---------- node : `theano.gof.graph.Apply` Th...
Return a list of output shapes based on ``input_shapes``. This method is optional. It allows to compute the shape of the output without having to evaluate. Parameters ---------- node : `theano.gof.graph.Apply` The node of this Op in the computation graph. in...
def update_kwargs(kwargs, *updates): """ Utility function for merging multiple keyword arguments, depending on their type: * Non-existent keys are added. * Existing lists or tuples are extended, but not duplicating entries. The keywords ``command`` and ``entrypoint`` are however simply overwritte...
Utility function for merging multiple keyword arguments, depending on their type: * Non-existent keys are added. * Existing lists or tuples are extended, but not duplicating entries. The keywords ``command`` and ``entrypoint`` are however simply overwritten. * Nested dictionaries are updated, overrid...
def _get_audio_sample_bit(self, audio_abs_path): """ Parameters ---------- audio_abs_path : str Returns ------- sample_bit : int """ sample_bit = int( subprocess.check_output( ("""sox --i {} | grep "{}" | awk -F " : " '{{...
Parameters ---------- audio_abs_path : str Returns ------- sample_bit : int
def get_slack_channels(self, token): ''' Get all channel names from Slack ''' ret = salt.utils.slack.query( function='rooms', api_key=token, # These won't be honored until https://github.com/saltstack/salt/pull/41187/files is merged opts={...
Get all channel names from Slack
def duration(self, value): """The duration property. Args: value (string). the property value. """ if value == self._defaults['duration'] and 'duration' in self._values: del self._values['duration'] else: self._values['duration'] = val...
The duration property. Args: value (string). the property value.
def _on_cluster_discovery(self, future): """Invoked when the Redis server has responded to the ``CLUSTER_NODES`` command. :param future: The future containing the response from Redis :type future: tornado.concurrent.Future """ LOGGER.debug('_on_cluster_discovery(%r)', f...
Invoked when the Redis server has responded to the ``CLUSTER_NODES`` command. :param future: The future containing the response from Redis :type future: tornado.concurrent.Future
def get_plat_specifier(): """ Standard platform specifier used by distutils """ import setuptools # NOQA import distutils plat_name = distutils.util.get_platform() plat_specifier = ".%s-%s" % (plat_name, sys.version[0:3]) if hasattr(sys, 'gettotalrefcount'): plat_specifier += '-...
Standard platform specifier used by distutils
def _get_asset_content(self, asset_id, asset_content_type_str=None, asset_content_id=None): """stub""" rm = self.my_osid_object._get_provider_manager('REPOSITORY') if 'assignedBankIds' in self.my_osid_object._my_map: if self.my_osid_object._proxy is not None: als = rm...
stub
def ROC_AUC_analysis(adata,groupby,group=None, n_genes=100): """Calculate correlation matrix. Calculate a correlation matrix for genes strored in sample annotation using rank_genes_groups.py Parameters ---------- adata : :class:`~anndata.AnnData` Ann...
Calculate correlation matrix. Calculate a correlation matrix for genes strored in sample annotation using rank_genes_groups.py Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix. groupby : `str` The ...
def authorize(self, callback=None, state=None, **kwargs): """ Returns a redirect response to the remote authorization URL with the signed callback given. :param callback: a redirect url for the callback :param state: an optional value to embed in the OAuth request. ...
Returns a redirect response to the remote authorization URL with the signed callback given. :param callback: a redirect url for the callback :param state: an optional value to embed in the OAuth request. Use this if you want to pass around application ...
def flush(self): """flush() -> List of decoded messages. Decodes the packets in the internal buffer. This enables the continuation of the processing of received packets after a :exc:`ProtocolError` has been handled. :return: A (possibly empty) list of decoded me...
flush() -> List of decoded messages. Decodes the packets in the internal buffer. This enables the continuation of the processing of received packets after a :exc:`ProtocolError` has been handled. :return: A (possibly empty) list of decoded messages from the buffered pack...
def Close(self): """Closes the database file. Raises: RuntimeError: if the database is not opened. """ if not self._connection: raise RuntimeError('Cannot close database not opened.') # We need to run commit or not all data is stored in the database. self._connection.commit() s...
Closes the database file. Raises: RuntimeError: if the database is not opened.
def graphviz_parser(preprocessor, tag, markup): """ Simple Graphviz parser """ # Parse the markup string m = DOT_BLOCK_RE.search(markup) if m: # Get program and DOT code code = m.group('code') program = m.group('program').strip() # Run specified program with our markup ...
Simple Graphviz parser
def _set_minimum_links(self, v, load=False): """ Setter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_minimum_links is considered as a private method. Backends lookin...
Setter method for minimum_links, mapped from YANG variable /interface/port_channel/minimum_links (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_minimum_links is considered as a private method. Backends looking to populate this variable should do so via calling ...
def disconnect(self, si, logger, vcenter_data_model, vm_uuid, network_name=None, vm=None): """ disconnect network adapter of the vm. If 'network_name' = None - disconnect ALL interfaces :param <str> si: :param logger: :param VMwarevCenterResourceModel vcenter_data_model: ...
disconnect network adapter of the vm. If 'network_name' = None - disconnect ALL interfaces :param <str> si: :param logger: :param VMwarevCenterResourceModel vcenter_data_model: :param <str> vm_uuid: the uuid of the vm :param <str | None> network_name: the name of the specific net...
def traverse_pagination(response, endpoint, content_filter_query, query_params): """ Traverse a paginated API response and extracts and concatenates "results" returned by API. Arguments: response (dict): API response object. endpoint (Slumber.Resource): API endpoint obje...
Traverse a paginated API response and extracts and concatenates "results" returned by API. Arguments: response (dict): API response object. endpoint (Slumber.Resource): API endpoint object. content_filter_query (dict): query parameters used to filter catalog results. ...
def configure_api(app): """Configure API Endpoints. """ from heman.api.empowering import resources as empowering_resources from heman.api.cch import resources as cch_resources from heman.api.form import resources as form_resources from heman.api import ApiCatchall # Add Empowering resources...
Configure API Endpoints.
def end_of_month(val): """ Return a new datetime.datetime object with values that represent a end of a month. :param val: Date to ... :type val: datetime.datetime | datetime.date :rtype: datetime.datetime """ if type(val) == date: val = datetime.fromordinal(val.toordinal()) i...
Return a new datetime.datetime object with values that represent a end of a month. :param val: Date to ... :type val: datetime.datetime | datetime.date :rtype: datetime.datetime
def _get_labels(self, y): """ Construct pylearn2 dataset labels. Parameters ---------- y : array_like, optional Labels. """ y = np.asarray(y) assert y.ndim == 1 # convert to one-hot labels = np.unique(y).tolist() oh = n...
Construct pylearn2 dataset labels. Parameters ---------- y : array_like, optional Labels.
def decode(data_url): """ Decode DataURL data """ metadata, data = data_url.rsplit(',', 1) _, metadata = metadata.split('data:', 1) parts = metadata.split(';') if parts[-1] == 'base64': data = b64decode(data) else: data = unquote(data) for part in parts: if p...
Decode DataURL data
def http(self): """A thread local instance of httplib2.Http. Returns: httplib2.Http: An Http instance authorized by the credentials. """ if self._use_cached_http and hasattr(self._local, 'http'): return self._local.http if self._http_replay is not None: ...
A thread local instance of httplib2.Http. Returns: httplib2.Http: An Http instance authorized by the credentials.
def os_version(self, value): """The os_version property. Args: value (string). the property value. """ if value == self._defaults['ai.device.osVersion'] and 'ai.device.osVersion' in self._values: del self._values['ai.device.osVersion'] else: ...
The os_version property. Args: value (string). the property value.
def get_allowed_operations(resource, subresouce_path): """Helper function to get the HTTP allowed methods. :param resource: ResourceBase instance from which the path is loaded. :param subresource_path: JSON field to fetch the value from. Either a string, or a list of strings in case of a nested...
Helper function to get the HTTP allowed methods. :param resource: ResourceBase instance from which the path is loaded. :param subresource_path: JSON field to fetch the value from. Either a string, or a list of strings in case of a nested field. :returns: A list of allowed HTTP methods.
def convert_to_dataset(obj, *, group="posterior", coords=None, dims=None): """Convert a supported object to an xarray dataset. This function is idempotent, in that it will return xarray.Dataset functions unchanged. Raises `ValueError` if the desired group can not be extracted. Note this goes through a...
Convert a supported object to an xarray dataset. This function is idempotent, in that it will return xarray.Dataset functions unchanged. Raises `ValueError` if the desired group can not be extracted. Note this goes through a DataInference object. See `convert_to_inference_data` for more details. Raise...
def thresholdForIdentity(identity, colors): """ Get the best identity threshold for a specific identity value. @param identity: A C{float} nucleotide identity. @param colors: A C{list} of (threshold, color) tuples, where threshold is a C{float} and color is a C{str} to be used as a cell backgro...
Get the best identity threshold for a specific identity value. @param identity: A C{float} nucleotide identity. @param colors: A C{list} of (threshold, color) tuples, where threshold is a C{float} and color is a C{str} to be used as a cell background. This is as returned by C{parseColors}. ...
def datetime_to_time(date, time): """Take the date and time 4-tuples and return the time in seconds since the epoch as a floating point number.""" if (255 in date) or (255 in time): raise RuntimeError("specific date and time required") time_tuple = ( date[0]+1900, date[1], date[2], ...
Take the date and time 4-tuples and return the time in seconds since the epoch as a floating point number.
def del_character(self, name): """Remove the Character from the database entirely. This also deletes all its history. You'd better be sure. """ self.query.del_character(name) self.del_graph(name) del self.character[name]
Remove the Character from the database entirely. This also deletes all its history. You'd better be sure.
def truncate_table(self, tablename): """ SQLite3 doesn't support direct truncate, so we just use delete here """ self.get(tablename).remove() self.db.commit()
SQLite3 doesn't support direct truncate, so we just use delete here
def lookup(parser, var, context, resolve=True, apply_filters=True): """ Try to resolve the varialbe in a context If ``resolve`` is ``False``, only string variables are returned """ if resolve: try: return Variable(var).resolve(context) except VariableDoesNotExist: ...
Try to resolve the varialbe in a context If ``resolve`` is ``False``, only string variables are returned
def from_value(value): """ Converts specified value into PagingParams. :param value: value to be converted :return: a newly created PagingParams. """ if isinstance(value, PagingParams): return value if isinstance(value, AnyValueMap): retu...
Converts specified value into PagingParams. :param value: value to be converted :return: a newly created PagingParams.
def try_log_part(self, context=None, with_start_message=True): """ Залогировать, если пришло время из part_log_time_minutes :return: boolean Возвращает True если лог был записан """ if context is None: context = {} self.__counter += 1 if time.time() - ...
Залогировать, если пришло время из part_log_time_minutes :return: boolean Возвращает True если лог был записан
def set_sample_weight(pipeline_steps, sample_weight=None): """Recursively iterates through all objects in the pipeline and sets sample weight. Parameters ---------- pipeline_steps: array-like List of (str, obj) tuples from a scikit-learn pipeline or related object sample_weight: array-like ...
Recursively iterates through all objects in the pipeline and sets sample weight. Parameters ---------- pipeline_steps: array-like List of (str, obj) tuples from a scikit-learn pipeline or related object sample_weight: array-like List of sample weight Returns ------- sample_w...
def File(self, name, directory = None, create = 1): """Look up or create a File node with the specified name. If the name is a relative path (begins with ./, ../, or a file name), then it is looked up relative to the supplied directory node, or to the top level directory of the FS (supp...
Look up or create a File node with the specified name. If the name is a relative path (begins with ./, ../, or a file name), then it is looked up relative to the supplied directory node, or to the top level directory of the FS (supplied at construction time) if no directory is supplied....
def mmatch(expr, delimiter, greedy, search_type, regex_match=False, exact_match=False, opts=None): ''' Helper function to search for minions in master caches If 'greedy' return accepted minions that matched by the condition or absent in the c...
Helper function to search for minions in master caches If 'greedy' return accepted minions that matched by the condition or absent in the cache. If not 'greedy' return the only minions have cache data and matched by the condition.
def flavor_list(request): """Utility method to retrieve a list of flavors.""" try: return api.nova.flavor_list(request) except Exception: exceptions.handle(request, _('Unable to retrieve instance flavors.')) return []
Utility method to retrieve a list of flavors.
def getDetailInfo(self, CorpNum, ItemCode, MgtKey): """ 전자명세서 상세정보 확인 args CorpNum : 팝빌회원 사업자번호 ItemCode : 명세서 종류 코드 [121 - 거래명세서], [122 - 청구서], [123 - 견적서], [124 - 발주서], [125 - 입금표], [126 - 영수증] MgtKey : ...
전자명세서 상세정보 확인 args CorpNum : 팝빌회원 사업자번호 ItemCode : 명세서 종류 코드 [121 - 거래명세서], [122 - 청구서], [123 - 견적서], [124 - 발주서], [125 - 입금표], [126 - 영수증] MgtKey : 파트너 문서관리번호 return 문서 상세정보 object ...
def find_file( self, folder_id, basename, limit = 500 ): ''' Finds a file based on a box path Returns a list of file IDs Returns multiple file IDs if the file was split into parts with the extension '.partN' (where N is an integer) ''' search_folder = self.client.folder( ...
Finds a file based on a box path Returns a list of file IDs Returns multiple file IDs if the file was split into parts with the extension '.partN' (where N is an integer)
def fetch_session_start_times(data_dir, pivot, session_dates): """ :param data_dir: (str) directory in which the output file will be saved :param pivot: (int) congressperson document to use as a pivot for scraping the data :param session_dates: (list) datetime objects to fetch the start times for ""...
:param data_dir: (str) directory in which the output file will be saved :param pivot: (int) congressperson document to use as a pivot for scraping the data :param session_dates: (list) datetime objects to fetch the start times for
def generate_sinusoidal_lightcurve( times, mags=None, errs=None, paramdists={ 'period':sps.uniform(loc=0.04,scale=500.0), 'fourierorder':[2,10], 'amplitude':sps.uniform(loc=0.1,scale=0.9), 'phioffset':0.0, }, magsarefluxes=F...
This generates fake sinusoidal light curves. This can be used for a variety of sinusoidal variables, e.g. RRab, RRc, Cepheids, Miras, etc. The functions that generate these model LCs below implement the following table:: ## FOURIER PARAMS FOR SINUSOIDAL VARIABLES # # type fo...
def get_clamav_conf(filename): """Initialize clamav configuration.""" if os.path.isfile(filename): return ClamavConfig(filename) log.warn(LOG_PLUGIN, "No ClamAV config file found at %r.", filename)
Initialize clamav configuration.
def waitForSlotEvent(self, flags=0): """ C_WaitForSlotEvent :param flags: 0 (default) or `CKF_DONT_BLOCK` :type flags: integer :return: slot :rtype: integer """ tmp = 0 (rv, slot) = self.lib.C_WaitForSlotEvent(flags, tmp) if rv != CKR_OK: ...
C_WaitForSlotEvent :param flags: 0 (default) or `CKF_DONT_BLOCK` :type flags: integer :return: slot :rtype: integer
def unique_id(self): """Creates a unique ID for the `Atom` based on its parents. Returns ------- unique_id : (str, str, str) (polymer.id, residue.id, atom.id) """ chain = self.parent.parent.id residue = self.parent.id return chain, residue, se...
Creates a unique ID for the `Atom` based on its parents. Returns ------- unique_id : (str, str, str) (polymer.id, residue.id, atom.id)
def parse_ical(vcal): '''Parse Opencast schedule iCalendar file and return events as dict ''' vcal = vcal.replace('\r\n ', '').replace('\r\n\r\n', '\r\n') vevents = vcal.split('\r\nBEGIN:VEVENT\r\n') del(vevents[0]) events = [] for vevent in vevents: event = {} for line in ve...
Parse Opencast schedule iCalendar file and return events as dict
def stage_http_request(self, conn_id, version, url, target, method, headers, payload): """Set request HTTP information including url, headers, etc.""" # pylint: disable=attribute-defined-outside-init self._http_request_version = version self._http_request_conn_...
Set request HTTP information including url, headers, etc.