code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def post(self, request, *args, **kwargs): """ Checks for expect event types before continuing """ serializer = EventSerializer(data=request.data) if not serializer.is_valid(): return Response( {"accepted": False, "reason": serializer.errors}, status=4...
Checks for expect event types before continuing
def catalogFactory(name, **kwargs): """ Factory for various catalogs. """ fn = lambda member: inspect.isclass(member) and member.__module__==__name__ catalogs = odict(inspect.getmembers(sys.modules[__name__], fn)) if name not in list(catalogs.keys()): msg = "%s not found in catalogs:\n ...
Factory for various catalogs.
def _Login(): """Login to retrieve bearer token and set default accoutn and location aliases.""" if not clc.v2.V2_API_USERNAME or not clc.v2.V2_API_PASSWD: clc.v1.output.Status('ERROR',3,'V2 API username and password not provided') raise(clc.APIV2NotEnabled) session = clc._REQUESTS_SESSION session.header...
Login to retrieve bearer token and set default accoutn and location aliases.
def cycles(cls, **kwargs): """ Classmethod for convienence in returning both the sunrise and sunset based on a location and date. Always calculates the sunrise and sunset on the given date, no matter the time passed into the function in the datetime object. Parameters: ...
Classmethod for convienence in returning both the sunrise and sunset based on a location and date. Always calculates the sunrise and sunset on the given date, no matter the time passed into the function in the datetime object. Parameters: loc = Location4D (object) O...
def min(self): """ Returns the minimum value of the domain. :rtype: `float` or `np.inf` """ return int(self._min) if not np.isinf(self._min) else self._min
Returns the minimum value of the domain. :rtype: `float` or `np.inf`
async def _execute( self, transforms: List["OutputTransform"], *args: bytes, **kwargs: bytes ) -> None: """Executes this request with the given output transforms.""" self._transforms = transforms try: if self.request.method not in self.SUPPORTED_METHODS: r...
Executes this request with the given output transforms.
def len(self,resolution=1.0,units=None,conversion_function=convert_time, end_at_end=True): """ Calculates the length of the Label Dimension from its minimum, maximum and wether it is discrete. `resolution`: `units`: output units `conversion_function`: ...
Calculates the length of the Label Dimension from its minimum, maximum and wether it is discrete. `resolution`: `units`: output units `conversion_function`: `end_at_end`: additional switch for continuous behaviour
def upsert(self, name, value=None, seq=None): """Add one name/value entry to the main context of the rolne, but only if an entry with that name does not already exist. If the an entry with name exists, then the first entry found has it's value changed. NOTE: the upsert only upd...
Add one name/value entry to the main context of the rolne, but only if an entry with that name does not already exist. If the an entry with name exists, then the first entry found has it's value changed. NOTE: the upsert only updates the FIRST entry with the name found. The me...
def run_forever(self): """ This method is used to run the websocket app continuously. It will execute callbacks as defined and try to stay connected with the provided APIs """ cnt = 0 while True: cnt += 1 self.url = next(self.urls) ...
This method is used to run the websocket app continuously. It will execute callbacks as defined and try to stay connected with the provided APIs
def __populate_repositories_of_interest(self, username): """Method to populate repositories which will be used to suggest repositories for the user. For this purpose we use two kinds of repositories. 1. Repositories starred by user him/herself. 2. Repositories starred by the use...
Method to populate repositories which will be used to suggest repositories for the user. For this purpose we use two kinds of repositories. 1. Repositories starred by user him/herself. 2. Repositories starred by the users followed by the user. :param username: Username for the ...
def get_indexes(self, default_indexes=None): """Returns the list of indexes to act on based on ES_INDEXES setting """ doctype = self.type.get_mapping_type_name() indexes = (settings.ES_INDEXES.get(doctype) or settings.ES_INDEXES['default']) if isinstance(index...
Returns the list of indexes to act on based on ES_INDEXES setting
def create_dialog(self): """ Create the dialog.""" box0 = QGroupBox('Info') self.name = FormStr() self.name.setText('sw') self.idx_group.activated.connect(self.update_channels) form = QFormLayout(box0) form.addRow('Event name', self.n...
Create the dialog.
def set_distribute_compositions(self, distribute_comps=None): """Sets the distribution rights. This sets distribute verbatim to ``true``. :param distribute_comps: right to distribute modifications :type distribute_comps: ``boolean`` :raise: ``InvalidArgument`` -- ``distribute_c...
Sets the distribution rights. This sets distribute verbatim to ``true``. :param distribute_comps: right to distribute modifications :type distribute_comps: ``boolean`` :raise: ``InvalidArgument`` -- ``distribute_comps`` is invalid :raise: ``NoAccess`` -- authorization failure ...
def p_field_id(self, p): '''field_id : INTCONSTANT ':' | ''' if len(p) == 3: if p[1] == 0: # Prevent users from ever using field ID 0. It's reserved for # internal use only. raise ThriftParserError( 'Lin...
field_id : INTCONSTANT ':' |
def list(declared, undeclared): """List configured queues.""" queues = current_queues.queues.values() if declared: queues = filter(lambda queue: queue.exists, queues) elif undeclared: queues = filter(lambda queue: not queue.exists, queues) queue_names = [queue.routing_key for queue i...
List configured queues.
def Uninstall(self, package_name, keep_data=False, timeout_ms=None): """Removes a package from the device. Args: package_name: Package name of target package. keep_data: whether to keep the data and cache directories timeout_ms: Expected timeout for pushing and installing....
Removes a package from the device. Args: package_name: Package name of target package. keep_data: whether to keep the data and cache directories timeout_ms: Expected timeout for pushing and installing. Returns: The pm uninstall output.
def rpc_get_completions(self, filename, source, offset): """Get a list of completion candidates for the symbol at offset. """ results = self._call_backend("rpc_get_completions", [], filename, get_source(source), offset) # Uniquify by name res...
Get a list of completion candidates for the symbol at offset.
def _get_parsing_plan_for_multifile_children(self, obj_on_fs: PersistedObject, desired_type: Type[Any], logger: Logger) -> Dict[str, Any]: """ Implementation of AnyParser API """ raise Exception('This should never happen, since this parser...
Implementation of AnyParser API
def _committors(sources, sinks, tprob): """ Get the forward committors of the reaction sources -> sinks. Parameters ---------- sources : array_like, int The set of unfolded/reactant states. sinks : array_like, int The set of folded/product states. tprob : np.ndarray ...
Get the forward committors of the reaction sources -> sinks. Parameters ---------- sources : array_like, int The set of unfolded/reactant states. sinks : array_like, int The set of folded/product states. tprob : np.ndarray Transition matrix Returns ------- forwa...
async def shuffle(self): """The shuffle command""" self.logger.debug("shuffle command") if not self.state == 'ready': return self.statuslog.debug("Shuffling") random.shuffle(self.queue) self.update_queue() self.statuslog.debug("Shuffled")
The shuffle command
async def prepare_container(self, size, container, elem_type=None): """ Prepares container for serialization :param size: :param container: :return: """ if not self.writing: if container is None: return gen_elem_array(size, elem_type) ...
Prepares container for serialization :param size: :param container: :return:
def select_peaks(data, events, limit): """Check whether event satisfies amplitude limit. Parameters ---------- data : ndarray (dtype='float') vector with data events : ndarray (dtype='int') N x 2+ matrix with peak/trough in second position limit : float low and high limi...
Check whether event satisfies amplitude limit. Parameters ---------- data : ndarray (dtype='float') vector with data events : ndarray (dtype='int') N x 2+ matrix with peak/trough in second position limit : float low and high limit for spindle duration Returns ------...
def update_installed_files(self, installed_files): """ Track the files installed by a package so pip knows how to remove the package. This method is used by :func:`install_binary_dist()` (which collects the list of installed files for :func:`update_installed_files()`). :param i...
Track the files installed by a package so pip knows how to remove the package. This method is used by :func:`install_binary_dist()` (which collects the list of installed files for :func:`update_installed_files()`). :param installed_files: A list of absolute pathnames (strings) with the ...
def debugDumpAttr(self, output, depth): """Dumps debug information for the attribute """ libxml2mod.xmlDebugDumpAttr(output, self._o, depth)
Dumps debug information for the attribute
def profile(self, tile=None): """ Create a metadata dictionary for rasterio. Parameters ---------- tile : ``BufferedTile`` Returns ------- metadata : dictionary output profile dictionary used for rasterio. """ dst_metadata = d...
Create a metadata dictionary for rasterio. Parameters ---------- tile : ``BufferedTile`` Returns ------- metadata : dictionary output profile dictionary used for rasterio.
def _render_templates(files, filetable, written_files, force, open_mode='w'): """Write template contents from filetable into files. Using filetable for the rendered templates, and the list of files, render all the templates into actual files on disk, forcing to overwrite the file as appropriate, and us...
Write template contents from filetable into files. Using filetable for the rendered templates, and the list of files, render all the templates into actual files on disk, forcing to overwrite the file as appropriate, and using the given open mode for the file.
def make_logger(scraper): """ Create two log handlers, one to output info-level ouput to the console, the other to store all logging in a JSON file which will later be used to generate reports. """ logger = logging.getLogger('') logger.setLevel(logging.DEBUG) requests_log = logging.getLogger("...
Create two log handlers, one to output info-level ouput to the console, the other to store all logging in a JSON file which will later be used to generate reports.
def do_format(value, *args, **kwargs): """ Apply python string formatting on an object: .. sourcecode:: jinja {{ "%s - %s"|format("Hello?", "Foo!") }} -> Hello? - Foo! """ if args and kwargs: raise FilterArgumentError('can\'t handle positional and keyword ' ...
Apply python string formatting on an object: .. sourcecode:: jinja {{ "%s - %s"|format("Hello?", "Foo!") }} -> Hello? - Foo!
def checkInputParameter(method, parameters, validParameters, requiredParameters=None): """ Helper function to check input by using before sending to the server :param method: Name of the API :type method: str :param validParameters: Allow parameters for the API call :type validParameters: list ...
Helper function to check input by using before sending to the server :param method: Name of the API :type method: str :param validParameters: Allow parameters for the API call :type validParameters: list :param requiredParameters: Required parameters for the API call (Default: None) :type requi...
def construct_rest_of_world(self, excluded, name=None, fp=None, geom=True): """Construct rest-of-world geometry and optionally write to filepath ``fp``. Excludes faces in location list ``excluded``. ``excluded`` must be an iterable of location strings (not face ids).""" for location in excluded...
Construct rest-of-world geometry and optionally write to filepath ``fp``. Excludes faces in location list ``excluded``. ``excluded`` must be an iterable of location strings (not face ids).
def gen3d_conformer(self): """ A combined method to first generate 3D structures from 0D or 2D structures and then find the minimum energy conformer: 1. Use OBBuilder to create a 3D structure using rules and ring templates 2. Do 250 steps of a steepest descent geometry optimizati...
A combined method to first generate 3D structures from 0D or 2D structures and then find the minimum energy conformer: 1. Use OBBuilder to create a 3D structure using rules and ring templates 2. Do 250 steps of a steepest descent geometry optimization with the MMFF94 forcefield ...
def fig_to_svg(fig): """Helper function to convert matplotlib figure to SVG string Returns: str: figure as SVG string """ buf = io.StringIO() fig.savefig(buf, format='svg') buf.seek(0) return buf.getvalue()
Helper function to convert matplotlib figure to SVG string Returns: str: figure as SVG string
def restart(self): """ Restart the console This is needed when we switch projects to update PYTHONPATH and the selected interpreter """ self.master_clients = 0 self.create_new_client_if_empty = False for i in range(len(self.clients)): ...
Restart the console This is needed when we switch projects to update PYTHONPATH and the selected interpreter
def _handle_browse(self, relpath, params): """Handle requests to browse the filesystem under the build root.""" abspath = os.path.normpath(os.path.join(self._root, relpath)) if not abspath.startswith(self._root): raise ValueError # Prevent using .. to get files from anywhere other than root. if o...
Handle requests to browse the filesystem under the build root.
def rebin(self, *factors, **kwargs): """Return a new histogram that is 'rebinned' (zoomed) by factors (tuple of floats) along each dimensions factors: tuple with zoom factors along each axis. e.g. 2 = double number of bins, 0.5 = halve them. order: Order for spline interpolation in scipy.ndi...
Return a new histogram that is 'rebinned' (zoomed) by factors (tuple of floats) along each dimensions factors: tuple with zoom factors along each axis. e.g. 2 = double number of bins, 0.5 = halve them. order: Order for spline interpolation in scipy.ndimage.zoom. Defaults to linear interpolation (or...
def _simple_dispatch(self, name, params): """ Dispatch method """ try: # Internal method func = self.funcs[name] except KeyError: # Other method pass else: # Internal method found if isinstance(params...
Dispatch method
def expand_labels(labels, subtopic=False): '''Expand a set of labels that define a connected component. ``labels`` must define a *positive* connected component: it is all of the edges that make up the *single* connected component in the :class:`LabelStore`. expand will ignore subtopic assignments, and ...
Expand a set of labels that define a connected component. ``labels`` must define a *positive* connected component: it is all of the edges that make up the *single* connected component in the :class:`LabelStore`. expand will ignore subtopic assignments, and annotator_id will be an arbitrary one selected...
def path(self, which=None): """Extend ``nailgun.entity_mixins.Entity.path``. The format of the returned path depends on the value of ``which``: bulk_resume /foreman_tasks/api/tasks/bulk_resume bulk_search /foreman_tasks/api/tasks/bulk_search summary ...
Extend ``nailgun.entity_mixins.Entity.path``. The format of the returned path depends on the value of ``which``: bulk_resume /foreman_tasks/api/tasks/bulk_resume bulk_search /foreman_tasks/api/tasks/bulk_search summary /foreman_tasks/api/tasks/summar...
def concatenate_matrices(*matrices): """Return concatenation of series of transformation matrices. >>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5 >>> numpy.allclose(M, concatenate_matrices(M)) True >>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T)) True """ M = nu...
Return concatenation of series of transformation matrices. >>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5 >>> numpy.allclose(M, concatenate_matrices(M)) True >>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T)) True
def _augment_text_w_syntactic_info( self, text, text_layer ): ''' Augments given Text object with the syntactic information from the *text_layer*. More specifically, adds information about SYNTAX_LABEL, SYNTAX_HEAD and DEPREL to each token in the Text object; ...
Augments given Text object with the syntactic information from the *text_layer*. More specifically, adds information about SYNTAX_LABEL, SYNTAX_HEAD and DEPREL to each token in the Text object; (!) Note: this method is added to provide some initial ...
def auctionWS(symbols=None, on_data=None): '''https://iextrading.com/developer/docs/#auction''' symbols = _strToList(symbols) sendinit = ({'symbols': symbols, 'channels': ['auction']},) return _stream(_wsURL('deep'), sendinit, on_data)
https://iextrading.com/developer/docs/#auction
def read(address, length): """ Prepares an i2c read transaction. :param address: Slave address. :type: address: int :param length: Number of bytes to read. :type: length: int :return: New :py:class:`i2c_msg` instance for read operation. :rtype: :py:class:...
Prepares an i2c read transaction. :param address: Slave address. :type: address: int :param length: Number of bytes to read. :type: length: int :return: New :py:class:`i2c_msg` instance for read operation. :rtype: :py:class:`i2c_msg`
def insert(self, loc, column, value, allow_duplicates=False): """ Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int...
Insert column into DataFrame at specified location. Raises a ValueError if `column` is already contained in the DataFrame, unless `allow_duplicates` is set to True. Parameters ---------- loc : int Insertion index. Must verify 0 <= loc <= len(columns) column ...
def _set_binner(self): """ Setup our binners. Cache these as we are an immutable object """ if self.binner is None: self.binner, self.grouper = self._get_binner()
Setup our binners. Cache these as we are an immutable object
def display_data_item(self, data_item: DataItem, source_display_panel=None, source_data_item=None): """Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes """ fo...
Display a new data item and gives it keyboard focus. Uses existing display if it is already displayed. .. versionadded:: 1.0 Status: Provisional Scriptable: Yes
def run_script(pycode): """Run the Python in `pycode`, and return a dict of the resulting globals.""" # Fix up the whitespace in pycode. if pycode[0] == "\n": pycode = pycode[1:] pycode.rstrip() pycode = textwrap.dedent(pycode) # execute it. globs = {} six.exec_(pycode, globs, g...
Run the Python in `pycode`, and return a dict of the resulting globals.
def _check_status_errors(proto, content, error_traps=None): """Raises HTTPErrors based on error statuses sent from validator. Checks for common statuses and runs route specific error traps. """ if content.status == proto.OK: return try: if content.status ...
Raises HTTPErrors based on error statuses sent from validator. Checks for common statuses and runs route specific error traps.
def registerAtomType(self, parameters): """Register a new atom type. """ name = parameters['name'] if name in self._atomTypes: raise ValueError('Found multiple definitions for atom type: ' + name) atom_class = parameters['class'] mass = _convertParameterToNumber(param...
Register a new atom type.
def _lazy_migration(self, patch=None, meta=None, toa=None): """ Handle when a revision scheduling is turned onto a collection that was previously not scheduleable. This method will create the first revision for each object before its every used in the context of scheduling. :param dict ...
Handle when a revision scheduling is turned onto a collection that was previously not scheduleable. This method will create the first revision for each object before its every used in the context of scheduling. :param dict patch: The patch that should be used :param dict meta: Meta data for thi...
def variational_expectations(self, Y, m, v, gh_points=None, Y_metadata=None): """ Use Gauss-Hermite Quadrature to compute E_p(f) [ log p(y|f) ] d/dm E_p(f) [ log p(y|f) ] d/dv E_p(f) [ log p(y|f) ] where p(f) is a Gaussian with mean m and variance v. The shapes...
Use Gauss-Hermite Quadrature to compute E_p(f) [ log p(y|f) ] d/dm E_p(f) [ log p(y|f) ] d/dv E_p(f) [ log p(y|f) ] where p(f) is a Gaussian with mean m and variance v. The shapes of Y, m and v should match. if no gh_points are passed, we construct them using defualt ...
def CleanseRawStrings(raw_lines): """Removes C++11 raw strings from lines. Before: static const char kData[] = R"( multi-line string )"; After: static const char kData[] = "" (replaced by blank line) ""; Args: raw_lines: list of raw lines. Return...
Removes C++11 raw strings from lines. Before: static const char kData[] = R"( multi-line string )"; After: static const char kData[] = "" (replaced by blank line) ""; Args: raw_lines: list of raw lines. Returns: list of lines with C++11 raw str...
def set_redis(self, redis_url, redis_timeout=10): """ Realiza a autenticação no servidor Redis utilizando a URL informada. Args: redis_url (str): URL para conectar ao servidor Redis, exemplo: redis://user:password@localhost:6379/2. redis_timeout (int): O timeout padrão (em segun...
Realiza a autenticação no servidor Redis utilizando a URL informada. Args: redis_url (str): URL para conectar ao servidor Redis, exemplo: redis://user:password@localhost:6379/2. redis_timeout (int): O timeout padrão (em segundos). kwargs (dict): Raises: ...
def authenticated_userid(request): """Helper function that can be used in ``db_key`` to support `self` as a collection key. """ user = getattr(request, 'user', None) key = user.pk_field() return getattr(user, key)
Helper function that can be used in ``db_key`` to support `self` as a collection key.
def git(self, *arguments): """ Return (exit code, output) from git. """ process = subprocess.Popen(['git'] + list(arguments), stdout=subprocess.PIPE, cwd=self.cwd) out = process.communicate()[0].decode('UTF-8')...
Return (exit code, output) from git.
def create_arrow(rows, cols, radius=0.1, length=1.0, cone_radius=None, cone_length=None): """Create a 3D arrow using a cylinder plus cone Parameters ---------- rows : int Number of rows. cols : int Number of columns. radius : float Base cylinder radius. ...
Create a 3D arrow using a cylinder plus cone Parameters ---------- rows : int Number of rows. cols : int Number of columns. radius : float Base cylinder radius. length : float Length of the arrow. cone_radius : float Radius of the cone base. ...
def get_version(module='spyder_notebook'): """Get version.""" with open(os.path.join(HERE, module, '_version.py'), 'r') as f: data = f.read() lines = data.split('\n') for line in lines: if line.startswith('VERSION_INFO'): version_tuple = ast.literal_eval(line.split('=')[-1].s...
Get version.
def remove(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs): ''' .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. T...
.. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd from killing any emerge commands spawned by Sa...
def register_writer(klass): """ Add engine to the excel writer registry.io.excel. You must use this method to integrate with ``to_excel``. Parameters ---------- klass : ExcelWriter """ if not callable(klass): raise ValueError("Can only register callables as engines") engine...
Add engine to the excel writer registry.io.excel. You must use this method to integrate with ``to_excel``. Parameters ---------- klass : ExcelWriter
def get_random_label(): """ Get a random label string to use when clustering jobs. """ return ''.join(random.choice(string.ascii_uppercase + string.digits) \ for _ in range(15))
Get a random label string to use when clustering jobs.
def _reset_bbox(self): """This function should only be called internally. It resets the viewers bounding box based on changes to pan or scale. """ scale_x, scale_y = self.get_scale_xy() pan_x, pan_y = self.get_pan(coord='data')[:2] win_wd, win_ht = self.get_window_size()...
This function should only be called internally. It resets the viewers bounding box based on changes to pan or scale.
def set_verify_depth(self, depth): """ Set the maximum depth for the certificate chain verification that shall be allowed for this Context object. :param depth: An integer specifying the verify depth :return: None """ if not isinstance(depth, integer_types): ...
Set the maximum depth for the certificate chain verification that shall be allowed for this Context object. :param depth: An integer specifying the verify depth :return: None
def apply_noise_model(prog, noise_model): """ Apply a noise model to a program and generated a 'noisy-fied' version of the program. :param Program prog: A Quil Program object. :param NoiseModel noise_model: A NoiseModel, either generated from an ISA or from a simple decoherence model. :retu...
Apply a noise model to a program and generated a 'noisy-fied' version of the program. :param Program prog: A Quil Program object. :param NoiseModel noise_model: A NoiseModel, either generated from an ISA or from a simple decoherence model. :return: A new program translated to a noisy gateset and wi...
def drawing_update(self): '''update line drawing''' from MAVProxy.modules.mavproxy_map import mp_slipmap if self.draw_callback is None: return self.draw_line.append(self.click_position) if len(self.draw_line) > 1: self.mpstate.map.add_object(mp_slipmap.Sli...
update line drawing
def register_multi_flags_validator(flag_names, multi_flags_checker, message='Flags validation failed', flag_values=FLAGS): """Adds a constraint to multiple flags. The constraint is validated when flags are init...
Adds a constraint to multiple flags. The constraint is validated when flags are initially parsed, and after each change of the corresponding flag's value. Args: flag_names: [str], a list of the flag names to be checked. multi_flags_checker: callable, a function to validate the flag. input - dict...
def profile(*args, **kwargs): """ http endpoint decorator """ if _is_initialized(): def wrapper(f): return wrapHttpEndpoint(f) return wrapper raise Exception( "before measuring anything, you need to call init_app()")
http endpoint decorator
def __intermediate_bridge(self, interface, i): """ converts NetJSON bridge to UCI intermediate data structure """ # ensure type "bridge" is only given to one logical interface if interface['type'] == 'bridge' and i < 2: bridge_members = ' '.join(interface.pop(...
converts NetJSON bridge to UCI intermediate data structure
def get_bank_form(self, *args, **kwargs): """Pass through to provider BankAdminSession.get_bank_form_for_update""" # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_update_template # This method might be a bit sketchy. Time will tell. if i...
Pass through to provider BankAdminSession.get_bank_form_for_update
def generate_command(dag_id, task_id, execution_date, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ...
Generates the shell command required to execute this task instance. :param dag_id: DAG ID :type dag_id: unicode :param task_id: Task ID :type task_id: unicode :param execution_date: Execution date for the task :type execution_date: datetime :param mark_success: W...
def read(self, src): """ Download GeoJSON file of US counties from url (S3 bucket) """ geojson = None if not self.is_valid_src(src): error = "File < {0} > does not exists or does start with 'http'." raise ValueError(error.format(src)) if not self.is_url(src): ...
Download GeoJSON file of US counties from url (S3 bucket)
def update_user_ns(self, result): """Update user_ns with various things like _, __, _1, etc.""" # Avoid recursive reference when displaying _oh/Out if result is not self.shell.user_ns['_oh']: if len(self.shell.user_ns['_oh']) >= self.cache_size and self.do_full_cache: ...
Update user_ns with various things like _, __, _1, etc.
def CheckCronJobAccess(self, username, cron_job_id): """Checks whether a given user can access given cron job.""" self._CheckAccess( username, str(cron_job_id), rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CRON_JOB)
Checks whether a given user can access given cron job.
def x_11paths_authorization(app_id, secret, context, utc=None): """ Calculate the authentication headers to be sent with a request to the API. :param app_id: :param secret: :param context :param utc: :return: array a map with the Authorization and Date headers needed to sign a Latch API req...
Calculate the authentication headers to be sent with a request to the API. :param app_id: :param secret: :param context :param utc: :return: array a map with the Authorization and Date headers needed to sign a Latch API request
def locked_context(self, key=None, default=dict): """ Executor context is a shared memory object. All workers share this. It needs a lock. Its used like this: with executor.context() as context: visited = context['visited'] visited.append(state.cpu.PC) ...
Executor context is a shared memory object. All workers share this. It needs a lock. Its used like this: with executor.context() as context: visited = context['visited'] visited.append(state.cpu.PC) context['visited'] = visited
def download_and_calibrate_parallel(list_of_ids, n=None): """Download and calibrate in parallel. Parameters ---------- list_of_ids : list, optional container with img_ids to process n : int Number of cores for the parallel processing. Default: n_cores_system//2 """ setup_clu...
Download and calibrate in parallel. Parameters ---------- list_of_ids : list, optional container with img_ids to process n : int Number of cores for the parallel processing. Default: n_cores_system//2
def _tp_relfq_name(tp, tp_name=None, assumed_globals=None, update_assumed_globals=None, implicit_globals=None): # _type: (type, Optional[Union[Set[Union[type, types.ModuleType]], Mapping[Union[type, types.ModuleType], str]]], Optional[bool]) -> str """Provides the fully qualified name of a type rela...
Provides the fully qualified name of a type relative to a set of modules and types that is assumed as globally available. If assumed_globals is None this always returns the fully qualified name. If update_assumed_globals is True, this will return the plain type name, but will add the type to assumed_glo...
def extract_operations(self, migrations): """ Extract SQL operations from the given migrations """ operations = [] for migration in migrations: for operation in migration.operations: if isinstance(operation, RunSQL): statements = s...
Extract SQL operations from the given migrations
def geo_field(queryset): """Returns the GeometryField for a django or spillway GeoQuerySet.""" for field in queryset.model._meta.fields: if isinstance(field, models.GeometryField): return field raise exceptions.FieldDoesNotExist('No GeometryField found')
Returns the GeometryField for a django or spillway GeoQuerySet.
def transform(self, flip_x, flip_y, swap_xy): """Transform view of the image. .. note:: Transforming the image is generally faster than rotating, if rotating in 90 degree increments. Also see :meth:`rotate`. Parameters ---------- flipx, flipy : bool ...
Transform view of the image. .. note:: Transforming the image is generally faster than rotating, if rotating in 90 degree increments. Also see :meth:`rotate`. Parameters ---------- flipx, flipy : bool If `True`, flip the image in the X and Y axes, r...
def resize_old(self, block_size, order=0, mode='constant', cval=False): ''' geo.resize(new_shape, order=0, mode='constant', cval=np.nan, preserve_range=True) Returns resized georaster ''' if not cval: cval = np.nan if (self.raster.dtype.name.find('float') != ...
geo.resize(new_shape, order=0, mode='constant', cval=np.nan, preserve_range=True) Returns resized georaster
def get_listed_projects(): """Find the projects listed in the Home Documentation's index.md file Returns: set(str): projects' names, with the '/' in their beginings """ index_path = Path().resolve() / "docs" / "index.md" with open(index_path, "r") as index_file: lines = index_fi...
Find the projects listed in the Home Documentation's index.md file Returns: set(str): projects' names, with the '/' in their beginings
def fetch_wallet_balances(wallets, fiat, **modes): """ Wallets must be list of two item lists. First item is crypto, second item is the address. example: [ ['btc', '1PZ3Ps9RvCmUW1s1rHE25FeR8vtKUrhEai'], ['ltc', 'Lb78JDGxMcih1gs3AirMeRW6jaG5V9hwFZ'] ] """ price_fetch = set([x...
Wallets must be list of two item lists. First item is crypto, second item is the address. example: [ ['btc', '1PZ3Ps9RvCmUW1s1rHE25FeR8vtKUrhEai'], ['ltc', 'Lb78JDGxMcih1gs3AirMeRW6jaG5V9hwFZ'] ]
def real_ip(self): """ The actual public IP of this host. """ if self._real_ip is None: response = get(ICANHAZIP) self._real_ip = self._get_response_text(response) return self._real_ip
The actual public IP of this host.
def get_supported_currency_choices(api_key): """ Pull a stripe account's supported currencies and returns a choices tuple of those supported currencies. :param api_key: The api key associated with the account from which to pull data. :type api_key: str """ import stripe stripe.api_key = api_key account = str...
Pull a stripe account's supported currencies and returns a choices tuple of those supported currencies. :param api_key: The api key associated with the account from which to pull data. :type api_key: str
def add_fields( layer, absolute_values, static_fields, dynamic_structure): """Function to add fields needed in the output layer. :param layer: The vector layer. :type layer: QgsVectorLayer :param absolute_values: The absolute value structure. :type absolute_values: dict :param static_...
Function to add fields needed in the output layer. :param layer: The vector layer. :type layer: QgsVectorLayer :param absolute_values: The absolute value structure. :type absolute_values: dict :param static_fields: The list of static fields to add. :type static_fields: list :param dynami...
def __setWildcardSymbol(self, value): """self.__wildcardSymbol variable setter""" errors = [] if not value is str and not value.split(): errors.append('wildcardSymbol_ERROR : Symbol : must be char or string!') else: self.__wildcardSymbol = value if errors: view.Tli.showErrors('SymbolError', errors)
self.__wildcardSymbol variable setter
def ignore_missing_email_protection_eku_cb(ok, ctx): """ For verifying PKCS7 signature, m2Crypto uses OpenSSL's PKCS7_verify(). The latter requires that ExtendedKeyUsage extension, if present, contains 'emailProtection' OID. (Is it because S/MIME is/was the primary use case for PKCS7?) We do not...
For verifying PKCS7 signature, m2Crypto uses OpenSSL's PKCS7_verify(). The latter requires that ExtendedKeyUsage extension, if present, contains 'emailProtection' OID. (Is it because S/MIME is/was the primary use case for PKCS7?) We do not want to fail the verification in this case. At present, M2Cr...
def interactive(): """Interactive classifier.""" global n if request.method == 'GET' and request.args.get('heartbeat', '') != "": return request.args.get('heartbeat', '') if request.method == 'POST': logging.warning('POST to /interactive is deprecated. ' 'Use /wor...
Interactive classifier.
def can_edit(self, user=None, request=None): """ Define if a user can edit or not the instance, according to his account or the request. """ can = False if request and not self.owner: if (getattr(settings, "LEAFLET_STORAGE_ALLOW_ANONYMOUS", False) ...
Define if a user can edit or not the instance, according to his account or the request.
def cache_cluster_exists(name, conn=None, region=None, key=None, keyid=None, profile=None): ''' Check to see if a cache cluster exists. Example: .. code-block:: bash salt myminion boto3_elasticache.cache_cluster_exists myelasticache ''' return bool(describe_cache_clusters(name=name, c...
Check to see if a cache cluster exists. Example: .. code-block:: bash salt myminion boto3_elasticache.cache_cluster_exists myelasticache
def tidy_all_the_variables(host, inventory_mgr): ''' removes all overridden and inherited variables from hosts and groups ''' global _vars _vars = dict() _vars[host] = inventory_mgr.inventory.get_host_vars(host) for group in host.get_groups(): remove_inherited_and_overridden_vars(_va...
removes all overridden and inherited variables from hosts and groups
def put_comments(self, resource, comment, timeout=None): """ Post a comment on a file or URL. The initial idea of VirusTotal Community was that users should be able to make comments on files and URLs, the comments may be malware analyses, false positive flags, disinfection instructions, etc. ...
Post a comment on a file or URL. The initial idea of VirusTotal Community was that users should be able to make comments on files and URLs, the comments may be malware analyses, false positive flags, disinfection instructions, etc. Imagine you have some automatic setup that can produce interes...
def getdrawings(): """Get all the drawings.""" infos = Info.query.all() sketches = [json.loads(info.contents) for info in infos] return jsonify(drawings=sketches)
Get all the drawings.
def to_python(self, value: Union[Dict[str, int], int, None]) -> LocalizedIntegerValue: """Converts the value from a database value into a Python value.""" db_value = super().to_python(value) return self._convert_localized_value(db_value)
Converts the value from a database value into a Python value.
def path(self, category = None, image = None, feature = None): """ Constructs the path to categories, images and features. This path function assumes that the following storage scheme is used on the hard disk to access categories, images and features: - categories: /impath/categor...
Constructs the path to categories, images and features. This path function assumes that the following storage scheme is used on the hard disk to access categories, images and features: - categories: /impath/category - images: /impath/category/category_image.png -...
def run(command, parser, cl_args, unknown_args): """ run command """ location = cl_args['cluster/[role]/[env]'].split('/') if len(location) == 1: return show_cluster(cl_args, *location) elif len(location) == 2: return show_cluster_role(cl_args, *location) elif len(location) == 3: return show_clust...
run command
def Dependencies(lTOC, xtrapath=None, manifest=None): """ Expand LTOC to include all the closure of binary dependencies. LTOC is a logical table of contents, ie, a seq of tuples (name, path). Return LTOC expanded by all the binary dependencies of the entries in LTOC, except those listed in the modu...
Expand LTOC to include all the closure of binary dependencies. LTOC is a logical table of contents, ie, a seq of tuples (name, path). Return LTOC expanded by all the binary dependencies of the entries in LTOC, except those listed in the module global EXCLUDES manifest should be a winmanifest.Manifest ...
def bench_serpy(): """Beanchmark for 1000 objects with 2 fields. """ class FooSerializer(serpy.DictSerializer): """The serializer schema definition.""" # Use a Field subclass like IntField if you need more validation. attr_2 = serpy.IntField() attr_1 = serpy.StrField() ...
Beanchmark for 1000 objects with 2 fields.
def to_float_with_default(value, default_value): """ Converts value into float or returns default when conversion is not possible. :param value: the value to convert. :param default_value: the default value. :return: float value or default value when conversion is not supporte...
Converts value into float or returns default when conversion is not possible. :param value: the value to convert. :param default_value: the default value. :return: float value or default value when conversion is not supported.
def send_stats(self, start, environ, response_interception, exception=None): """Send the actual timing stats. :param start: start time in seconds since the epoch as a floating point number :type start: float :param environ: wsgi environment :type environ: dict :param res...
Send the actual timing stats. :param start: start time in seconds since the epoch as a floating point number :type start: float :param environ: wsgi environment :type environ: dict :param response_interception: dictionary in form {'status': '<response status>', 'resp...
def read(self): """Read a wire format DNS message and build a dns.message.Message object.""" l = len(self.wire) if l < 12: raise ShortHeader (self.message.id, self.message.flags, qcount, ancount, aucount, adcount) = struct.unpack('!HHHHHH', self.wire[:12]) ...
Read a wire format DNS message and build a dns.message.Message object.