code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def write_file_to_zip_with_neutral_metadata(zfile, filename, content): """ Write the string `content` to `filename` in the open ZipFile `zfile`. Args: zfile (ZipFile): open ZipFile to write the content into filename (str): the file path within the zip file to write into content (str)...
Write the string `content` to `filename` in the open ZipFile `zfile`. Args: zfile (ZipFile): open ZipFile to write the content into filename (str): the file path within the zip file to write into content (str): the content to write into the zip Returns: None
def next(cls): """Return next available record identifier.""" try: with db.session.begin_nested(): obj = cls() db.session.add(obj) except IntegrityError: # pragma: no cover with db.session.begin_nested(): # Someone has like...
Return next available record identifier.
def upload_html(destination, html, name=None): """ Uploads the HTML to a file on the server """ [project, path, n] = parse_destination(destination) try: dxfile = dxpy.upload_string(html, media_type="text/html", project=project, folder=path, hidden=True, name=name or None) return dxfi...
Uploads the HTML to a file on the server
def call_for_each_tower( towers, func, devices=None, use_vs=None): """ Run `func` on all GPUs (towers) and return the results. Args: towers (list[int]): a list of GPU id. func: a lambda to be called inside each tower devices: a list of devices to ...
Run `func` on all GPUs (towers) and return the results. Args: towers (list[int]): a list of GPU id. func: a lambda to be called inside each tower devices: a list of devices to be used. By default will use '/gpu:{tower}' use_vs (list[bool]): list of use_vs to pass...
def split_and_strip_without(string, exclude, separator_regexp=None): """Split a string into items, and trim any excess spaces Any items in exclude are not in the returned list >>> split_and_strip_without('fred, was, here ', ['was']) ['fred', 'here'] """ result = split_and_strip(string, separa...
Split a string into items, and trim any excess spaces Any items in exclude are not in the returned list >>> split_and_strip_without('fred, was, here ', ['was']) ['fred', 'here']
def dfs_postorder(self, reverse=False): """Generator that returns each element of the tree in Postorder order. Keyword arguments: reverse -- if true, the search is done from right to left.""" stack = deque() stack.append(self) visited = set() while stack: ...
Generator that returns each element of the tree in Postorder order. Keyword arguments: reverse -- if true, the search is done from right to left.
def match(self, pattern): """Perform regex match at index.""" m = pattern.match(self._string, self._index) if m: self._index = m.end() return m
Perform regex match at index.
def tredparse(args): """ %prog tredparse Compare performances of various variant callers on simulated STR datasets. Adds coverage comparisons as panel C and D. """ p = OptionParser(tredparse.__doc__) p.add_option('--maxinsert', default=300, type="int", help="Maximum number ...
%prog tredparse Compare performances of various variant callers on simulated STR datasets. Adds coverage comparisons as panel C and D.
def _common_query_parameters(self, doc_type, includes, owner, promulgated_only, series, sort): ''' Extract common query parameters between search and list into slice. @param includes What metadata to return in results (e.g. charm-config). @param doc_type...
Extract common query parameters between search and list into slice. @param includes What metadata to return in results (e.g. charm-config). @param doc_type Filter to this type: bundle or charm. @param promulgated_only Whether to filter to only promulgated charms. @param sort Sorting the...
def divrank_scipy(G, alpha=0.25, d=0.85, personalization=None, max_iter=100, tol=1.0e-6, nstart=None, weight='weight', dangling=None): ''' Returns the DivRank (Diverse Rank) of the nodes in the graph. This code is based on networkx.pagerank_scipy ''' import scipy....
Returns the DivRank (Diverse Rank) of the nodes in the graph. This code is based on networkx.pagerank_scipy
def compare(self, statement_a, statement_b): """ Return the calculated similarity of two statements based on the Jaccard index. """ # Make both strings lowercase document_a = self.nlp(statement_a.text.lower()) document_b = self.nlp(statement_b.text.lower()) ...
Return the calculated similarity of two statements based on the Jaccard index.
def run(self, cmd, sudo=False, ignore_error=False, success_status=(0,), error_callback=None, custom_log=None, retry=0): """Run a command on the remote host. The command is run on the remote host, if there is a redirected host then the command will be run on that redirected host. See...
Run a command on the remote host. The command is run on the remote host, if there is a redirected host then the command will be run on that redirected host. See __init__. :param cmd: the command to run :type cmd: str :param sudo: True if the command should be run with sudo, thi...
def plot(result_pickle_file_path, show, plot_save_file): """ [sys_analyser] draw result DataFrame """ import pandas as pd from .plot import plot_result result_dict = pd.read_pickle(result_pickle_file_path) plot_result(result_dict, show, plot_save_file)
[sys_analyser] draw result DataFrame
def generate_config_set(self, config): ''' Generates a list of magnitude frequency distributions and renders as a tuple :param dict/list config: Configuration paramters of magnitude frequency distribution ''' if isinstance(config, dict): # Configu...
Generates a list of magnitude frequency distributions and renders as a tuple :param dict/list config: Configuration paramters of magnitude frequency distribution
def parse_on_condition(self, node): """ Parses <OnCondition> @param node: Node containing the <OnCondition> element @type node: xml.etree.Element """ try: test = node.lattrib['test'] except: self.raise_error('<OnCondition> must specify a ...
Parses <OnCondition> @param node: Node containing the <OnCondition> element @type node: xml.etree.Element
def timebinlc_worker(task): ''' This is a parallel worker for the function below. Parameters ---------- task : tuple This is of the form:: task[0] = lcfile task[1] = binsizesec task[3] = {'outdir','lcformat','lcformatdir', 'timeco...
This is a parallel worker for the function below. Parameters ---------- task : tuple This is of the form:: task[0] = lcfile task[1] = binsizesec task[3] = {'outdir','lcformat','lcformatdir', 'timecols','magcols','errcols','minbinelems'} ...
def T11(word, rules): '''If a VVV sequence contains a /u,y/-final diphthong, insert a syllable boundary between the diphthong and the third vowel.''' WORD = word offset = 0 for vvv in precedence_sequences(WORD): i = vvv.start(1) + (1 if vvv.group(1)[-1] in 'uyUY' else 2) + offset WO...
If a VVV sequence contains a /u,y/-final diphthong, insert a syllable boundary between the diphthong and the third vowel.
def asterisk_to_min_max(field, time_filter, search_engine_endpoint, actual_params=None): """ traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE] :param field: map the stats to this field. :param time_filter: this is the value to be translated. think in "[* TO 2000]" :param sear...
traduce [* TO *] to something like [MIN-INDEXED-DATE TO MAX-INDEXED-DATE] :param field: map the stats to this field. :param time_filter: this is the value to be translated. think in "[* TO 2000]" :param search_engine_endpoint: solr core :param actual_params: (not implemented) to merge with other params....
def populate_branch(self, editor, root_item, tree_cache=None): """ Generates an outline of the editor's content and stores the result in a cache. """ if tree_cache is None: tree_cache = {} # Removing cached items for which line is > total line...
Generates an outline of the editor's content and stores the result in a cache.
def from_filename(cls, filename): """ Class constructor using the path to the corresponding mp3 file. The metadata will be read from this file to create the song object, so it must at least contain valid ID3 tags for artist and title. """ if not filename: logg...
Class constructor using the path to the corresponding mp3 file. The metadata will be read from this file to create the song object, so it must at least contain valid ID3 tags for artist and title.
def run(self, N=100): """ Parameter --------- N: int number of particles Returns ------- wgts: Weights object The importance weights (with attributes lw, W, and ESS) X: ThetaParticles object The N particles (wi...
Parameter --------- N: int number of particles Returns ------- wgts: Weights object The importance weights (with attributes lw, W, and ESS) X: ThetaParticles object The N particles (with attributes theta, logpost) norm_cst: flo...
def do_forceescape(value): """Enforce HTML escaping. This will probably double escape variables.""" if hasattr(value, '__html__'): value = value.__html__() return escape(text_type(value))
Enforce HTML escaping. This will probably double escape variables.
async def rset(self, timeout: DefaultNumType = _default) -> SMTPResponse: """ Send an SMTP RSET command, which resets the server's envelope (the envelope contains the sender, recipient, and mail data). :raises SMTPResponseException: on unexpected server response code """ ...
Send an SMTP RSET command, which resets the server's envelope (the envelope contains the sender, recipient, and mail data). :raises SMTPResponseException: on unexpected server response code
def format_output(self, rendered_widgets): """ This output will yeild all widgets grouped in a un-ordered list """ ret = [u'<ul class="formfield">'] for i, field in enumerate(self.fields): label = self.format_label(field, i) help_text = self.format_help_te...
This output will yeild all widgets grouped in a un-ordered list
def get_events(self, start_time, end_time, ignore_cancelled = True, get_recurring_events_as_instances = True, restrict_to_calendars = []): '''A wrapper for events().list. Returns the events from the calendar within the specified times. Some of the interesting fields are: description, end, htmlLi...
A wrapper for events().list. Returns the events from the calendar within the specified times. Some of the interesting fields are: description, end, htmlLink, location, organizer, start, summary Note: "Cancelled instances of recurring events (but not the underlying recurring event) will ...
def _auth_req_callback_func(self, context, internal_request): """ This function is called by a frontend module when an authorization request has been processed. :type context: satosa.context.Context :type internal_request: satosa.internal.InternalData :rtype: satosa.resp...
This function is called by a frontend module when an authorization request has been processed. :type context: satosa.context.Context :type internal_request: satosa.internal.InternalData :rtype: satosa.response.Response :param context: The request context :param internal...
def rename(self, name, **kwargs): """ :param name: New name for the object :type name: string Renames the remote object. The name is changed on the copy of the object in the project associated with the handler. """ return self._rename(self._dxid, {"pro...
:param name: New name for the object :type name: string Renames the remote object. The name is changed on the copy of the object in the project associated with the handler.
def getIncludeAndRuntime(): """ A function from distutils' build_ext.py that was updated and changed to ACTUALLY WORK """ include_dirs, library_dirs = [], [] py_include = distutils.sysconfig.get_python_inc() plat_py_include = distutils.sysconfig.get_python_inc(plat_specific=1) include_...
A function from distutils' build_ext.py that was updated and changed to ACTUALLY WORK
def versions_request(self): """List Available REST API Versions""" ret = self.handle_api_exceptions('GET', '', api_ver='') return [str_dict(x) for x in ret.json()]
List Available REST API Versions
def compare_digest(a, b): """Compare 2 hash digest.""" py_version = sys.version_info[0] if py_version >= 3: return _compare_digest_py3(a, b) return _compare_digest_py2(a, b)
Compare 2 hash digest.
def extract_docs(): """ Parses the nano.rpc.Client for methods that have a __doc_meta__ attribute and saves generated docs """ methods = [] def _key(entry): return sorted_entries = sorted(Client.__dict__.items(), key=lambda x: x[0]) tree = {} meta_key = '__doc_meta__' ...
Parses the nano.rpc.Client for methods that have a __doc_meta__ attribute and saves generated docs
def parse_eggs_list(path): """Parse eggs list from the script at the given path """ with open(path, 'r') as script: data = script.readlines() start = 0 end = 0 for counter, line in enumerate(data): if not start: if 'sys.path[0:0]' in line: ...
Parse eggs list from the script at the given path
def run_powerflow_onthefly(components, components_data, grid, export_pypsa_dir=None, debug=False): """ Run powerflow to test grid stability Two cases are defined to be tested here: i) load case ii) feed-in case Parameters ---------- components: dict of pandas.DataFrame components...
Run powerflow to test grid stability Two cases are defined to be tested here: i) load case ii) feed-in case Parameters ---------- components: dict of pandas.DataFrame components_data: dict of pandas.DataFrame export_pypsa_dir: str Sub-directory in output/debug/grid/ where csv...
def is_spontaneous(gene, custom_id=None): """Input a COBRApy Gene object and check if the ID matches a spontaneous ID regex. Args: gene (Gene): COBRApy Gene custom_id (str): Optional custom spontaneous ID if it does not match the regular expression ``[Ss](_|)0001`` Returns: boo...
Input a COBRApy Gene object and check if the ID matches a spontaneous ID regex. Args: gene (Gene): COBRApy Gene custom_id (str): Optional custom spontaneous ID if it does not match the regular expression ``[Ss](_|)0001`` Returns: bool: If gene ID matches spontaneous ID
def run(self): """The actual event loop. Calls the ``owner``'s :py:meth:`~Component.start_event` method, then calls its :py:meth:`~Component.new_frame_event` and :py:meth:`~Component.new_config_event` methods as required until :py:meth:`~Component.stop` is called. Finally the ``...
The actual event loop. Calls the ``owner``'s :py:meth:`~Component.start_event` method, then calls its :py:meth:`~Component.new_frame_event` and :py:meth:`~Component.new_config_event` methods as required until :py:meth:`~Component.stop` is called. Finally the ``owner``'s :py:meth...
def home_mode_set_state(self, state, **kwargs): """Set the state of Home Mode""" # It appears that surveillance station needs lowercase text # true/false for the on switch if state not in (HOME_MODE_ON, HOME_MODE_OFF): raise ValueError('Invalid home mode state') api...
Set the state of Home Mode
def keys(self): """Return ids of all indexed documents.""" result = [] if self.fresh_index is not None: result += self.fresh_index.keys() if self.opt_index is not None: result += self.opt_index.keys() return result
Return ids of all indexed documents.
def mean_return_by_quantile(factor_data, by_date=False, by_group=False, demeaned=True, group_adjust=False): """ Computes mean returns for factor quantiles across provided forward returns columns. ...
Computes mean returns for factor quantiles across provided forward returns columns. Parameters ---------- factor_data : pd.DataFrame - MultiIndex A MultiIndex DataFrame indexed by date (level 0) and asset (level 1), containing the values for a single alpha factor, forward returns for ...
async def unset_lock(self, resource, lock_identifier): """ Unlock this instance :param resource: redis key to set :param lock_identifier: uniquie id of lock :raises: LockError if the lock resource acquired with different lock_identifier """ try: with a...
Unlock this instance :param resource: redis key to set :param lock_identifier: uniquie id of lock :raises: LockError if the lock resource acquired with different lock_identifier
def registIssue(self, CorpNum, taxinvoice, writeSpecification=False, forceIssue=False, dealInvoiceMgtKey=None, memo=None, emailSubject=None, UserID=None): """ 즉시 발행 args CorpNum : 팝빌회원 사업자번호 taxinvoice : 세금계산서 객체 writeSpecification ...
즉시 발행 args CorpNum : 팝빌회원 사업자번호 taxinvoice : 세금계산서 객체 writeSpecification : 거래명세서 동시작성 여부 forceIssue : 지연발행 강제여부 dealInvoiceMgtKey : 거래명세서 문서관리번호 memo : 메모 emailSubject : 메일제목, 미기재시 기본제목으로 전송 ...
def get_border_phase(self, idn=0, idr=0): """Return one of nine border fields Parameters ---------- idn: int Index for refractive index. One of -1 (left), 0 (center), 1 (right) idr: int Index for radius. One of -1 (left), 0 (center...
Return one of nine border fields Parameters ---------- idn: int Index for refractive index. One of -1 (left), 0 (center), 1 (right) idr: int Index for radius. One of -1 (left), 0 (center), 1 (right)
def helical_turbulent_fd_Mori_Nakayama(Re, Di, Dc): r'''Calculates Darcy friction factor for a fluid flowing inside a curved pipe such as a helical coil under turbulent conditions, using the method of Mori and Nakayama [1]_, also shown in [2]_ and [3]_. .. math:: f_{curv} = 0.3\left(\...
r'''Calculates Darcy friction factor for a fluid flowing inside a curved pipe such as a helical coil under turbulent conditions, using the method of Mori and Nakayama [1]_, also shown in [2]_ and [3]_. .. math:: f_{curv} = 0.3\left(\frac{D_i}{D_c}\right)^{0.5} \left[Re\left(\frac{...
def verbose_message(self): """return more complete message""" if self.threshold is None: return 'No threshold' return '%.1f is %s than %.1f' % (self.value, self.adjective, self.threshold)
return more complete message
def validate_path(ctx, param, value): """Detect a workflow path if it is not passed.""" client = ctx.obj if value is None: from renku.models.provenance import ProcessRun activity = client.process_commit() if not isinstance(activity, ProcessRun): raise click.BadParameter...
Detect a workflow path if it is not passed.
def macro_body(self, node, frame, children=None): """Dump the function def of a macro or call block.""" frame = self.function_scoping(node, frame, children) # macros are delayed, they never require output checks frame.require_output_check = False args = frame.arguments # ...
Dump the function def of a macro or call block.
def srcnode(self): """If this node is in a build path, return the node corresponding to its source file. Otherwise, return ourself. """ srcdir_list = self.dir.srcdir_list() if srcdir_list: srcnode = srcdir_list[0].Entry(self.name) srcnode.must_be_...
If this node is in a build path, return the node corresponding to its source file. Otherwise, return ourself.
def get(self, metric_id=None, **kwargs): """Get metrics :param int metric_id: Metric ID :return: Metrics data (:class:`dict`) Additional named arguments may be passed and are directly transmitted to API. It is useful to use the API search features. .. seealso:: https:/...
Get metrics :param int metric_id: Metric ID :return: Metrics data (:class:`dict`) Additional named arguments may be passed and are directly transmitted to API. It is useful to use the API search features. .. seealso:: https://docs.cachethq.io/reference#get-metrics .. s...
async def _deploy(self, charm_url, application, series, config, constraints, endpoint_bindings, resources, storage, channel=None, num_units=None, placement=None, devices=None): """Logic shared between `Model.deploy` and `BundleHandler.deploy`. ...
Logic shared between `Model.deploy` and `BundleHandler.deploy`.
def get_code(self, timestamp=None): """ :param timestamp: time to use for code generation :type timestamp: int :return: two factor code :rtype: str """ return generate_twofactor_code_for_time(b64decode(self.shared_secret), ...
:param timestamp: time to use for code generation :type timestamp: int :return: two factor code :rtype: str
def retrieve(self, session, lookup_keys, *args, **kwargs): """ Retrieves a model using the lookup keys provided. Only one model should be returned by the lookup_keys or else the manager will fail. :param Session session: The SQLAlchemy session to use :param dict lookup_k...
Retrieves a model using the lookup keys provided. Only one model should be returned by the lookup_keys or else the manager will fail. :param Session session: The SQLAlchemy session to use :param dict lookup_keys: A dictionary mapping the fields and their expected values ...
def share_file(comm, path): """ Copies the file from rank 0 to all other ranks Puts it in the same place on all machines """ localrank, _ = get_local_rank_size(comm) if comm.Get_rank() == 0: with open(path, 'rb') as fh: data = fh.read() comm.bcast(data) else: ...
Copies the file from rank 0 to all other ranks Puts it in the same place on all machines
def iter_islast(iterable): """Generate (item, islast) pairs for an iterable. Generates pairs where the first element is an item from the iterable source and the second element is a boolean flag indicating if it is the last item in the sequence. """ it = iter(iterable) prev = next(it) fo...
Generate (item, islast) pairs for an iterable. Generates pairs where the first element is an item from the iterable source and the second element is a boolean flag indicating if it is the last item in the sequence.
def path_size(path, total=False, ext='', level=None, verbosity=0): """Walk the file tree and query the file.stat object(s) to compute their total (or individual) size in bytes Returns: dict: {relative_path: file_size_in_bytes, ...} Examples: >>> all(d >= 0 for d in path_size(__file__).values()...
Walk the file tree and query the file.stat object(s) to compute their total (or individual) size in bytes Returns: dict: {relative_path: file_size_in_bytes, ...} Examples: >>> all(d >= 0 for d in path_size(__file__).values()) True >>> sum(path_size(os.path.dirname(__file__)).values()) ...
def _writeXputMaps(self, session, directory, mapCards, name=None, replaceParamFile=None): """ GSSHAPY Project Write Map Files to File Method """ if self.mapType in self.MAP_TYPES_SUPPORTED: for card in self.projectCards: if (card.name in...
GSSHAPY Project Write Map Files to File Method
def header(self, sheet, name): """ Write sheet header. Args: sheet: (xlwt.Worksheet.Worksheet) instance of xlwt sheet. name: (unicode) name of sheet. """ header = sheet.row(0) for i, column in enumerate(self.headers[name]): header.writ...
Write sheet header. Args: sheet: (xlwt.Worksheet.Worksheet) instance of xlwt sheet. name: (unicode) name of sheet.
def select(self, *features): """ selects the features given as string e.g passing 'hello' and 'world' will result in imports of 'hello' and 'world'. Then, if possible 'hello.feature' and 'world.feature' are imported and select is called in each feature module. ...
selects the features given as string e.g passing 'hello' and 'world' will result in imports of 'hello' and 'world'. Then, if possible 'hello.feature' and 'world.feature' are imported and select is called in each feature module.
def commit(name, repository, tag='latest', message=None, author=None): ''' .. versionchanged:: 2018.3.0 The repository and tag must now be passed separately using the ``repository`` and ``tag`` arguments, rather than together in the (now deprec...
.. versionchanged:: 2018.3.0 The repository and tag must now be passed separately using the ``repository`` and ``tag`` arguments, rather than together in the (now deprecated) ``image`` argument. Commits a container, thereby promoting it to an image. Equivalent to running the ``docker co...
def unindent_selection(self, cursor): """ Un-indents selected text :param cursor: QTextCursor """ doc = self.editor.document() tab_len = self.editor.tab_length nb_lines = len(cursor.selection().toPlainText().splitlines()) if nb_lines == 0: nb_...
Un-indents selected text :param cursor: QTextCursor
def get_table_list(self, cursor): "Returns a list of table names in the current database." result = [TableInfo(SfProtectName(x['name']), 't') for x in self.table_list_cache['sobjects']] return result
Returns a list of table names in the current database.
def object_to_json(obj, indent=2): """ transform object to json """ instance_json = json.dumps(obj, indent=indent, ensure_ascii=False, cls=DjangoJSONEncoder) return instance_json
transform object to json
def _DecodeUnrecognizedFields(message, pair_type): """Process unrecognized fields in message.""" new_values = [] codec = _ProtoJsonApiTools.Get() for unknown_field in message.all_unrecognized_fields(): # TODO(craigcitro): Consider validating the variant if # the assignment below doesn't ...
Process unrecognized fields in message.
def get_salic_url(item, prefix, df_values=None): """ Mount a salic url for the given item. """ url_keys = { 'pronac': 'idPronac', 'uf': 'uf', 'product': 'produto', 'county': 'idmunicipio', 'item_id': 'idPlanilhaItem', 'stage': 'etapa', } if df_val...
Mount a salic url for the given item.
def diff(self, container): """ Inspect changes on a container's filesystem. Args: container (str): The container to diff Returns: (str) Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ ...
Inspect changes on a container's filesystem. Args: container (str): The container to diff Returns: (str) Raises: :py:class:`docker.errors.APIError` If the server returns an error.
def RemoveMethod(self, function): """ Removes the specified function's MethodWrapper from the added_methods list, so we don't re-bind it when making a clone. """ self.added_methods = [dm for dm in self.added_methods if not dm.method is function]
Removes the specified function's MethodWrapper from the added_methods list, so we don't re-bind it when making a clone.
def capture_heroku_database(self): """ Capture Heroku database backup. """ self.print_message("Capturing database backup for app '%s'" % self.args.source_app) args = [ "heroku", "pg:backups:capture", "--app=%s" % self.args.source_app, ] if self...
Capture Heroku database backup.
def trim_decimals(s, precision=-3): """ Convert from scientific notation using precision """ encoded = s.encode('ascii', 'ignore') str_val = "" if six.PY3: str_val = str(encoded, encoding='ascii', errors='ignore')[:precision] else: # If pre...
Convert from scientific notation using precision
def _compile_mapping(self, schema, invalid_msg=None): """Create validator for given mapping.""" invalid_msg = invalid_msg or 'mapping value' # Keys that may be required all_required_keys = set(key for key in schema if key is not Extra and ...
Create validator for given mapping.
def _parse_hextet(self, hextet_str): """Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn't strictly a hex number from [0..FFFF]. ...
Convert an IPv6 hextet string into an integer. Args: hextet_str: A string, the number to parse. Returns: The hextet as an integer. Raises: ValueError: if the input isn't strictly a hex number from [0..FFFF].
def code_almost_equal(a, b): """Return True if code is similar. Ignore whitespace when comparing specific line. """ split_a = split_and_strip_non_empty_lines(a) split_b = split_and_strip_non_empty_lines(b) if len(split_a) != len(split_b): return False for (index, _) in enumerate(...
Return True if code is similar. Ignore whitespace when comparing specific line.
def add_fields(self, fields = None, **kwargs): """ Add the fields into the list of fields. """ if fields != None: for field in fields: self.fields.append(field)
Add the fields into the list of fields.
def get_keypair_name(): """Returns current keypair name.""" username = get_username() assert '-' not in username, "username must not contain -, change $USER" validate_aws_name(username) assert len(username) < 30 # to avoid exceeding AWS 127 char limit return get_prefix() + '-' + username
Returns current keypair name.
def encode_network(root): """Yield ref-containing obj table entries from object network""" def fix_values(obj): if isinstance(obj, Container): obj.update((k, get_ref(v)) for (k, v) in obj.items() if k != 'class_name') fixed_obj = obj ...
Yield ref-containing obj table entries from object network
def Append(self, **kw): """Append values to existing construction variables in an Environment. """ kw = copy_non_reserved_keywords(kw) for key, val in kw.items(): # It would be easier on the eyes to write this using # "continue" statements whenever we fini...
Append values to existing construction variables in an Environment.
def patch(self, patched_value): """Set a new value for the attribute of the object.""" try: if self.getter: setattr(self.getter_class, self.attr_name, patched_value) else: setattr(self.orig_object, self.attr_name, patched_value) except Type...
Set a new value for the attribute of the object.
def validation_scatter(self, log_lam, b, masks, pre_v, gp, flux, time, med): ''' Computes the scatter in the validation set. ''' # Update the lambda matrix self.lam[b] = 10 ** log_lam # Validation set scatter scatter = [None for i in ...
Computes the scatter in the validation set.
def remove_connection(self, id_interface, back_or_front): """ Remove a connection between two interfaces :param id_interface: One side of relation :param back_or_front: This side of relation is back(0) or front(1) :return: None :raise InterfaceInvalidBackFrontError: Fr...
Remove a connection between two interfaces :param id_interface: One side of relation :param back_or_front: This side of relation is back(0) or front(1) :return: None :raise InterfaceInvalidBackFrontError: Front or Back of interfaces not match to remove connection :raise Invali...
def txn_data2schema_key(self, txn: dict) -> SchemaKey: """ Return schema key from ledger transaction data. :param txn: get-schema transaction (by sequence number) :return: schema key identified """ rv = None if self == Protocol.V_13: rv = SchemaKey(t...
Return schema key from ledger transaction data. :param txn: get-schema transaction (by sequence number) :return: schema key identified
def _assign_name(self, obj, name, shaders): """ Assign *name* to *obj* in *shaders*. """ if self._is_global(obj): assert name not in self._global_ns self._global_ns[name] = obj else: for shader in shaders: ns = self._shader_ns[shader] ...
Assign *name* to *obj* in *shaders*.
def authorize(self, ip_protocol=None, from_port=None, to_port=None, cidr_ip=None, src_group=None): """ Add a new rule to this security group. You need to pass in either src_group_name OR ip_protocol, from_port, to_port, and cidr_ip. In other words, either you a...
Add a new rule to this security group. You need to pass in either src_group_name OR ip_protocol, from_port, to_port, and cidr_ip. In other words, either you are authorizing another group or you are authorizing some ip-based rule. :type ip_protocol: string :param...
def push(self, *args, **kwargs): """ GitHub push Event When a GitHub push event is posted it will be broadcast on this exchange with the designated `organization` and `repository` in the routing-key along with event specific metadata in the payload. This exchange output...
GitHub push Event When a GitHub push event is posted it will be broadcast on this exchange with the designated `organization` and `repository` in the routing-key along with event specific metadata in the payload. This exchange outputs: ``v1/github-push-message.json#``This exchange take...
def textContent(self, text: str) -> None: # type: ignore """Set textContent both on this node and related browser node.""" self._set_text_content(text) if self.connected: self._set_text_content_web(text)
Set textContent both on this node and related browser node.
def get_instances(feature_name): """Return all all instances that compute `feature_name`""" feats = [] for ft in AncillaryFeature.features: if ft.feature_name == feature_name: feats.append(ft) return feats
Return all all instances that compute `feature_name`
def call_fset(self, obj, value) -> None: """Store the given custom value and call the setter function.""" vars(obj)[self.name] = self.fset(obj, value)
Store the given custom value and call the setter function.
def handle_stream(self, stream, address): ''' Handle incoming streams and add messages to the incoming queue ''' log.trace('Req client %s connected', address) self.clients.append((stream, address)) unpacker = msgpack.Unpacker() try: while True: ...
Handle incoming streams and add messages to the incoming queue
def open(self): """ Load topology elements """ if self._status == "opened": return self.reset() self._loading = True self._status = "opened" path = self._topology_file() if not os.path.exists(path): self._loading = False ...
Load topology elements
def get_previous_character(self): """ Returns the character before the cursor. :return: Previous cursor character. :rtype: QString """ cursor = self.textCursor() cursor.movePosition(QTextCursor.PreviousCharacter, QTextCursor.KeepAnchor) return cursor.sel...
Returns the character before the cursor. :return: Previous cursor character. :rtype: QString
def build(self): """ Iterates through the views pointed to by self.detail_views, runs build_object with `self`, and calls _build_extra() and _build_related(). """ for detail_view in self.detail_views: view = self._get_view(detail_view) view().build...
Iterates through the views pointed to by self.detail_views, runs build_object with `self`, and calls _build_extra() and _build_related().
def _nonzero_counter_hook(module, inputs, output): """ Module hook used to count the number of nonzero floating point values from all the tensors used by the given network during inference. This hook will be called every time before :func:`forward` is invoked. See :func:`torch.nn.Module.register_forward_hook...
Module hook used to count the number of nonzero floating point values from all the tensors used by the given network during inference. This hook will be called every time before :func:`forward` is invoked. See :func:`torch.nn.Module.register_forward_hook`
def histogram_info(self) -> dict: """ Return extra information about histogram """ return { 'support_atoms': self.support_atoms, 'atom_delta': self.atom_delta, 'vmin': self.vmin, 'vmax': self.vmax, 'num_atoms': self.atoms }
Return extra information about histogram
def list_member_topics(self, member_id): ''' a method to retrieve a list of topics member follows :param member_id: integer with meetup member id :return: dictionary with list of topic details inside [json] key topic_details = self.objects.topic.schema ''' # htt...
a method to retrieve a list of topics member follows :param member_id: integer with meetup member id :return: dictionary with list of topic details inside [json] key topic_details = self.objects.topic.schema
def animate(self, duration = None, easing = None, on_complete = None, on_update = None, round = False, **kwargs): """Request parent Scene to Interpolate attributes using the internal tweener. Specify sprite's attributes that need changing. `duration` defaults to 0.4 seconds...
Request parent Scene to Interpolate attributes using the internal tweener. Specify sprite's attributes that need changing. `duration` defaults to 0.4 seconds and `easing` to cubic in-out (for others see pytweener.Easing class). Example:: # tween some_sprite to c...
def diff_files(left, right, diff_options=None, formatter=None): """Takes two filenames or streams, and diffs the XML in those files""" return _diff(etree.parse, left, right, diff_options=diff_options, formatter=formatter)
Takes two filenames or streams, and diffs the XML in those files
def unicode_decode(data, encoding_list): """ Decode string data with one or more encodings, trying sequentially :param data: bytes: encoded string data :param encoding_list: list[string] or string: encoding names :return: string: decoded string """ assert encoding_list, 'encodings must not b...
Decode string data with one or more encodings, trying sequentially :param data: bytes: encoded string data :param encoding_list: list[string] or string: encoding names :return: string: decoded string
def build_keyjar(key_conf, kid_template="", keyjar=None, owner=''): """ Builds a :py:class:`oidcmsg.key_jar.KeyJar` instance or adds keys to an existing KeyJar based on a key specification. An example of such a specification:: keys = [ {"type": "RSA", "key": "cp_keys/key.pem", ...
Builds a :py:class:`oidcmsg.key_jar.KeyJar` instance or adds keys to an existing KeyJar based on a key specification. An example of such a specification:: keys = [ {"type": "RSA", "key": "cp_keys/key.pem", "use": ["enc", "sig"]}, {"type": "EC", "crv": "P-256", "use": ["sig"...
def from_plugin_classname(plugin_classname, exclude_lines_regex=None, **kwargs): """Initializes a plugin class, given a classname and kwargs. :type plugin_classname: str :param plugin_classname: subclass of BasePlugin. :type exclude_lines_regex: str|None :param exclude_lines_regex: optional regex ...
Initializes a plugin class, given a classname and kwargs. :type plugin_classname: str :param plugin_classname: subclass of BasePlugin. :type exclude_lines_regex: str|None :param exclude_lines_regex: optional regex for ignored lines.
def find_objects(config=None, config_path=None, regex=None, saltenv='base'): ''' Return all the line objects that match the expression in the ``regex`` argument. .. warning:: This function is mostly valuable when invoked from other Salt components (i.e., execution modules, states, templ...
Return all the line objects that match the expression in the ``regex`` argument. .. warning:: This function is mostly valuable when invoked from other Salt components (i.e., execution modules, states, templates etc.). For CLI usage, please consider using :py:func:`ciscoconfparse...
def register(self, cmd: Type[Command]) -> None: """Register a new IMAP command. Args: cmd: The new command type. """ self.commands[cmd.command] = cmd
Register a new IMAP command. Args: cmd: The new command type.
def get(url, params={}): """Invoke an HTTP GET request on a url Args: url (string): URL endpoint to request params (dict): Dictionary of url parameters Returns: dict: JSON response as a dictionary """ request_url = url if len(params):...
Invoke an HTTP GET request on a url Args: url (string): URL endpoint to request params (dict): Dictionary of url parameters Returns: dict: JSON response as a dictionary
def remove_all(self, item): # type: (Any) -> None """ Remove all occurrence of the parameter. :param item: Value to delete from the WeakList. """ item = self.ref(item) while list.__contains__(self, item): list.remove(self, item)
Remove all occurrence of the parameter. :param item: Value to delete from the WeakList.
def gtlike_spectrum_to_vectors(spectrum): """ Convert a pyLikelihood object to a python dictionary which can be easily saved to a file.""" parameters = pyLike.ParameterVector() spectrum.getParams(parameters) npar = max(parameters.size(), 10) o = {'param_names': np.zeros(npar, dtype='S32')...
Convert a pyLikelihood object to a python dictionary which can be easily saved to a file.