id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
7,900
robmarkcole/HASS-data-detective
detective/config.py
_stub_tag
def _stub_tag(constructor, node): """Stub a constructor with a dictionary.""" seen = getattr(constructor, "_stub_seen", None) if seen is None: seen = constructor._stub_seen = set() if node.tag not in seen: print("YAML tag {} is not supported".format(node.tag)) seen.add(node.tag...
python
def _stub_tag(constructor, node): """Stub a constructor with a dictionary.""" seen = getattr(constructor, "_stub_seen", None) if seen is None: seen = constructor._stub_seen = set() if node.tag not in seen: print("YAML tag {} is not supported".format(node.tag)) seen.add(node.tag...
[ "def", "_stub_tag", "(", "constructor", ",", "node", ")", ":", "seen", "=", "getattr", "(", "constructor", ",", "\"_stub_seen\"", ",", "None", ")", "if", "seen", "is", "None", ":", "seen", "=", "constructor", ".", "_stub_seen", "=", "set", "(", ")", "i...
Stub a constructor with a dictionary.
[ "Stub", "a", "constructor", "with", "a", "dictionary", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/config.py#L61-L72
7,901
robmarkcole/HASS-data-detective
detective/config.py
load_yaml
def load_yaml(fname): """Load a YAML file.""" yaml = YAML(typ="safe") # Compat with HASS yaml.allow_duplicate_keys = True # Stub HASS constructors HassSafeConstructor.name = fname yaml.Constructor = HassSafeConstructor with open(fname, encoding="utf-8") as conf_file: # If config...
python
def load_yaml(fname): """Load a YAML file.""" yaml = YAML(typ="safe") # Compat with HASS yaml.allow_duplicate_keys = True # Stub HASS constructors HassSafeConstructor.name = fname yaml.Constructor = HassSafeConstructor with open(fname, encoding="utf-8") as conf_file: # If config...
[ "def", "load_yaml", "(", "fname", ")", ":", "yaml", "=", "YAML", "(", "typ", "=", "\"safe\"", ")", "# Compat with HASS", "yaml", ".", "allow_duplicate_keys", "=", "True", "# Stub HASS constructors", "HassSafeConstructor", ".", "name", "=", "fname", "yaml", ".", ...
Load a YAML file.
[ "Load", "a", "YAML", "file", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/config.py#L89-L101
7,902
robmarkcole/HASS-data-detective
detective/config.py
db_url_from_hass_config
def db_url_from_hass_config(path): """Find the recorder database url from a HASS config dir.""" config = load_hass_config(path) default_path = os.path.join(path, "home-assistant_v2.db") default_url = "sqlite:///{}".format(default_path) recorder = config.get("recorder") if recorder: db_...
python
def db_url_from_hass_config(path): """Find the recorder database url from a HASS config dir.""" config = load_hass_config(path) default_path = os.path.join(path, "home-assistant_v2.db") default_url = "sqlite:///{}".format(default_path) recorder = config.get("recorder") if recorder: db_...
[ "def", "db_url_from_hass_config", "(", "path", ")", ":", "config", "=", "load_hass_config", "(", "path", ")", "default_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "\"home-assistant_v2.db\"", ")", "default_url", "=", "\"sqlite:///{}\"", ".", "...
Find the recorder database url from a HASS config dir.
[ "Find", "the", "recorder", "database", "url", "from", "a", "HASS", "config", "dir", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/config.py#L104-L122
7,903
robmarkcole/HASS-data-detective
detective/time.py
localize
def localize(dt): """Localize a datetime object to local time.""" if dt.tzinfo is UTC: return (dt + LOCAL_UTC_OFFSET).replace(tzinfo=None) # No TZ info so not going to assume anything, return as-is. return dt
python
def localize(dt): """Localize a datetime object to local time.""" if dt.tzinfo is UTC: return (dt + LOCAL_UTC_OFFSET).replace(tzinfo=None) # No TZ info so not going to assume anything, return as-is. return dt
[ "def", "localize", "(", "dt", ")", ":", "if", "dt", ".", "tzinfo", "is", "UTC", ":", "return", "(", "dt", "+", "LOCAL_UTC_OFFSET", ")", ".", "replace", "(", "tzinfo", "=", "None", ")", "# No TZ info so not going to assume anything, return as-is.", "return", "d...
Localize a datetime object to local time.
[ "Localize", "a", "datetime", "object", "to", "local", "time", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/time.py#L19-L24
7,904
robmarkcole/HASS-data-detective
detective/time.py
sqlalch_datetime
def sqlalch_datetime(dt): """Convert a SQLAlchemy datetime string to a datetime object.""" if isinstance(dt, str): return datetime.strptime(dt, "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=UTC) if dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None: return dt.astimezone(UTC) return d...
python
def sqlalch_datetime(dt): """Convert a SQLAlchemy datetime string to a datetime object.""" if isinstance(dt, str): return datetime.strptime(dt, "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=UTC) if dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None: return dt.astimezone(UTC) return d...
[ "def", "sqlalch_datetime", "(", "dt", ")", ":", "if", "isinstance", "(", "dt", ",", "str", ")", ":", "return", "datetime", ".", "strptime", "(", "dt", ",", "\"%Y-%m-%d %H:%M:%S.%f\"", ")", ".", "replace", "(", "tzinfo", "=", "UTC", ")", "if", "dt", "."...
Convert a SQLAlchemy datetime string to a datetime object.
[ "Convert", "a", "SQLAlchemy", "datetime", "string", "to", "a", "datetime", "object", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/time.py#L44-L50
7,905
robmarkcole/HASS-data-detective
detective/core.py
db_from_hass_config
def db_from_hass_config(path=None, **kwargs): """Initialize a database from HASS config.""" if path is None: path = config.find_hass_config() url = config.db_url_from_hass_config(path) return HassDatabase(url, **kwargs)
python
def db_from_hass_config(path=None, **kwargs): """Initialize a database from HASS config.""" if path is None: path = config.find_hass_config() url = config.db_url_from_hass_config(path) return HassDatabase(url, **kwargs)
[ "def", "db_from_hass_config", "(", "path", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "path", "is", "None", ":", "path", "=", "config", ".", "find_hass_config", "(", ")", "url", "=", "config", ".", "db_url_from_hass_config", "(", "path", ")", ...
Initialize a database from HASS config.
[ "Initialize", "a", "database", "from", "HASS", "config", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L14-L20
7,906
robmarkcole/HASS-data-detective
detective/core.py
stripped_db_url
def stripped_db_url(url): """Return a version of the DB url with the password stripped out.""" parsed = urlparse(url) if parsed.password is None: return url return parsed._replace( netloc="{}:***@{}".format(parsed.username, parsed.hostname) ).geturl()
python
def stripped_db_url(url): """Return a version of the DB url with the password stripped out.""" parsed = urlparse(url) if parsed.password is None: return url return parsed._replace( netloc="{}:***@{}".format(parsed.username, parsed.hostname) ).geturl()
[ "def", "stripped_db_url", "(", "url", ")", ":", "parsed", "=", "urlparse", "(", "url", ")", "if", "parsed", ".", "password", "is", "None", ":", "return", "url", "return", "parsed", ".", "_replace", "(", "netloc", "=", "\"{}:***@{}\"", ".", "format", "(",...
Return a version of the DB url with the password stripped out.
[ "Return", "a", "version", "of", "the", "DB", "url", "with", "the", "password", "stripped", "out", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L27-L36
7,907
robmarkcole/HASS-data-detective
detective/core.py
HassDatabase.perform_query
def perform_query(self, query, **params): """Perform a query, where query is a string.""" try: return self.engine.execute(query, params) except: print("Error with query: {}".format(query)) raise
python
def perform_query(self, query, **params): """Perform a query, where query is a string.""" try: return self.engine.execute(query, params) except: print("Error with query: {}".format(query)) raise
[ "def", "perform_query", "(", "self", ",", "query", ",", "*", "*", "params", ")", ":", "try", ":", "return", "self", ".", "engine", ".", "execute", "(", "query", ",", "params", ")", "except", ":", "print", "(", "\"Error with query: {}\"", ".", "format", ...
Perform a query, where query is a string.
[ "Perform", "a", "query", "where", "query", "is", "a", "string", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L74-L80
7,908
robmarkcole/HASS-data-detective
detective/core.py
HassDatabase.fetch_entities
def fetch_entities(self): """Fetch entities for which we have data.""" query = text( """ SELECT entity_id FROM states GROUP BY entity_id """ ) response = self.perform_query(query) # Parse the domains from the entities. ...
python
def fetch_entities(self): """Fetch entities for which we have data.""" query = text( """ SELECT entity_id FROM states GROUP BY entity_id """ ) response = self.perform_query(query) # Parse the domains from the entities. ...
[ "def", "fetch_entities", "(", "self", ")", ":", "query", "=", "text", "(", "\"\"\"\n SELECT entity_id\n FROM states\n GROUP BY entity_id\n \"\"\"", ")", "response", "=", "self", ".", "perform_query", "(", "query", ")", "# Parse the ...
Fetch entities for which we have data.
[ "Fetch", "entities", "for", "which", "we", "have", "data", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L82-L104
7,909
robmarkcole/HASS-data-detective
detective/core.py
HassDatabase.fetch_all_data
def fetch_all_data(self, limit=50000): """ Fetch data for all entities. """ # Query text query = text( """ SELECT domain, entity_id, state, last_changed FROM states WHERE state NOT IN ('unknown', 'unavailable') ...
python
def fetch_all_data(self, limit=50000): """ Fetch data for all entities. """ # Query text query = text( """ SELECT domain, entity_id, state, last_changed FROM states WHERE state NOT IN ('unknown', 'unavailable') ...
[ "def", "fetch_all_data", "(", "self", ",", "limit", "=", "50000", ")", ":", "# Query text", "query", "=", "text", "(", "\"\"\"\n SELECT domain, entity_id, state, last_changed\n FROM states\n WHERE\n state NOT IN ('unknown', 'unavailable')...
Fetch data for all entities.
[ "Fetch", "data", "for", "all", "entities", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L155-L179
7,910
robmarkcole/HASS-data-detective
detective/core.py
HassDatabase.parse_all_data
def parse_all_data(self): """Parses the master df.""" self._master_df.columns = ["domain", "entity", "state", "last_changed"] # Check if state is float and store in numericals category. self._master_df["numerical"] = self._master_df["state"].apply( lambda x: functions.isfloa...
python
def parse_all_data(self): """Parses the master df.""" self._master_df.columns = ["domain", "entity", "state", "last_changed"] # Check if state is float and store in numericals category. self._master_df["numerical"] = self._master_df["state"].apply( lambda x: functions.isfloa...
[ "def", "parse_all_data", "(", "self", ")", ":", "self", ".", "_master_df", ".", "columns", "=", "[", "\"domain\"", ",", "\"entity\"", ",", "\"state\"", ",", "\"last_changed\"", "]", "# Check if state is float and store in numericals category.", "self", ".", "_master_d...
Parses the master df.
[ "Parses", "the", "master", "df", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L181-L193
7,911
robmarkcole/HASS-data-detective
detective/core.py
NumericalSensors.correlations
def correlations(self): """ Calculate the correlation coefficients. """ corr_df = self._sensors_num_df.corr() corr_names = [] corrs = [] for i in range(len(corr_df.index)): for j in range(len(corr_df.index)): c_name = corr_df.index[i] ...
python
def correlations(self): """ Calculate the correlation coefficients. """ corr_df = self._sensors_num_df.corr() corr_names = [] corrs = [] for i in range(len(corr_df.index)): for j in range(len(corr_df.index)): c_name = corr_df.index[i] ...
[ "def", "correlations", "(", "self", ")", ":", "corr_df", "=", "self", ".", "_sensors_num_df", ".", "corr", "(", ")", "corr_names", "=", "[", "]", "corrs", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "corr_df", ".", "index", ")", ")",...
Calculate the correlation coefficients.
[ "Calculate", "the", "correlation", "coefficients", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L248-L272
7,912
robmarkcole/HASS-data-detective
detective/core.py
NumericalSensors.plot
def plot(self, entities: List[str]): """ Basic plot of a numerical sensor data. Parameters ---------- entities : a list of entities """ ax = self._sensors_num_df[entities].plot(figsize=[12, 6]) ax.legend(loc="center left", bbox_to_anchor=(1, 0.5)) ...
python
def plot(self, entities: List[str]): """ Basic plot of a numerical sensor data. Parameters ---------- entities : a list of entities """ ax = self._sensors_num_df[entities].plot(figsize=[12, 6]) ax.legend(loc="center left", bbox_to_anchor=(1, 0.5)) ...
[ "def", "plot", "(", "self", ",", "entities", ":", "List", "[", "str", "]", ")", ":", "ax", "=", "self", ".", "_sensors_num_df", "[", "entities", "]", ".", "plot", "(", "figsize", "=", "[", "12", ",", "6", "]", ")", "ax", ".", "legend", "(", "lo...
Basic plot of a numerical sensor data. Parameters ---------- entities : a list of entities
[ "Basic", "plot", "of", "a", "numerical", "sensor", "data", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L292-L305
7,913
robmarkcole/HASS-data-detective
detective/core.py
BinarySensors.plot
def plot(self, entity): """ Basic plot of a single binary sensor data. Parameters ---------- entity : string The entity to plot """ df = self._binary_df[[entity]] resampled = df.resample("s").ffill() # Sample at seconds and ffill resa...
python
def plot(self, entity): """ Basic plot of a single binary sensor data. Parameters ---------- entity : string The entity to plot """ df = self._binary_df[[entity]] resampled = df.resample("s").ffill() # Sample at seconds and ffill resa...
[ "def", "plot", "(", "self", ",", "entity", ")", ":", "df", "=", "self", ".", "_binary_df", "[", "[", "entity", "]", "]", "resampled", "=", "df", ".", "resample", "(", "\"s\"", ")", ".", "ffill", "(", ")", "# Sample at seconds and ffill", "resampled", "...
Basic plot of a single binary sensor data. Parameters ---------- entity : string The entity to plot
[ "Basic", "plot", "of", "a", "single", "binary", "sensor", "data", "." ]
f67dfde9dd63a3af411944d1857b0835632617c5
https://github.com/robmarkcole/HASS-data-detective/blob/f67dfde9dd63a3af411944d1857b0835632617c5/detective/core.py#L353-L381
7,914
django-salesforce/django-salesforce
salesforce/router.py
is_sf_database
def is_sf_database(db, model=None): """The alias is a Salesforce database.""" from django.db import connections if db is None: return getattr(model, '_salesforce_object', False) engine = connections[db].settings_dict['ENGINE'] return engine == 'salesforce.backend' or connections[db].vendor =...
python
def is_sf_database(db, model=None): """The alias is a Salesforce database.""" from django.db import connections if db is None: return getattr(model, '_salesforce_object', False) engine = connections[db].settings_dict['ENGINE'] return engine == 'salesforce.backend' or connections[db].vendor =...
[ "def", "is_sf_database", "(", "db", ",", "model", "=", "None", ")", ":", "from", "django", ".", "db", "import", "connections", "if", "db", "is", "None", ":", "return", "getattr", "(", "model", ",", "'_salesforce_object'", ",", "False", ")", "engine", "="...
The alias is a Salesforce database.
[ "The", "alias", "is", "a", "Salesforce", "database", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/router.py#L16-L22
7,915
django-salesforce/django-salesforce
salesforce/router.py
ModelRouter.allow_migrate
def allow_migrate(self, db, app_label, model_name=None, **hints): """ Don't attempt to sync SF models to non SF databases and vice versa. """ if model_name: model = apps.get_model(app_label, model_name) else: # hints are used with less priority, because ma...
python
def allow_migrate(self, db, app_label, model_name=None, **hints): """ Don't attempt to sync SF models to non SF databases and vice versa. """ if model_name: model = apps.get_model(app_label, model_name) else: # hints are used with less priority, because ma...
[ "def", "allow_migrate", "(", "self", ",", "db", ",", "app_label", ",", "model_name", "=", "None", ",", "*", "*", "hints", ")", ":", "if", "model_name", ":", "model", "=", "apps", ".", "get_model", "(", "app_label", ",", "model_name", ")", "else", ":", ...
Don't attempt to sync SF models to non SF databases and vice versa.
[ "Don", "t", "attempt", "to", "sync", "SF", "models", "to", "non", "SF", "databases", "and", "vice", "versa", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/router.py#L60-L85
7,916
django-salesforce/django-salesforce
salesforce/backend/indep.py
LazyField.update
def update(self, **kwargs): """Customize the lazy field""" assert not self.called self.kw.update(kwargs) return self
python
def update(self, **kwargs): """Customize the lazy field""" assert not self.called self.kw.update(kwargs) return self
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "assert", "not", "self", ".", "called", "self", ".", "kw", ".", "update", "(", "kwargs", ")", "return", "self" ]
Customize the lazy field
[ "Customize", "the", "lazy", "field" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/indep.py#L32-L36
7,917
django-salesforce/django-salesforce
salesforce/backend/indep.py
LazyField.create
def create(self): """Create a normal field from the lazy field""" assert not self.called return self.klass(*self.args, **self.kw)
python
def create(self): """Create a normal field from the lazy field""" assert not self.called return self.klass(*self.args, **self.kw)
[ "def", "create", "(", "self", ")", ":", "assert", "not", "self", ".", "called", "return", "self", ".", "klass", "(", "*", "self", ".", "args", ",", "*", "*", "self", ".", "kw", ")" ]
Create a normal field from the lazy field
[ "Create", "a", "normal", "field", "from", "the", "lazy", "field" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/indep.py#L38-L41
7,918
django-salesforce/django-salesforce
salesforce/backend/manager.py
SalesforceManager.get_queryset
def get_queryset(self): """ Returns a QuerySet which access remote SF objects. """ if router.is_sf_database(self.db): q = models_sql_query.SalesforceQuery(self.model, where=compiler.SalesforceWhereNode) return query.SalesforceQuerySet(self.model, query=q, using=se...
python
def get_queryset(self): """ Returns a QuerySet which access remote SF objects. """ if router.is_sf_database(self.db): q = models_sql_query.SalesforceQuery(self.model, where=compiler.SalesforceWhereNode) return query.SalesforceQuerySet(self.model, query=q, using=se...
[ "def", "get_queryset", "(", "self", ")", ":", "if", "router", ".", "is_sf_database", "(", "self", ".", "db", ")", ":", "q", "=", "models_sql_query", ".", "SalesforceQuery", "(", "self", ".", "model", ",", "where", "=", "compiler", ".", "SalesforceWhereNode...
Returns a QuerySet which access remote SF objects.
[ "Returns", "a", "QuerySet", "which", "access", "remote", "SF", "objects", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/manager.py#L27-L34
7,919
django-salesforce/django-salesforce
salesforce/fields.py
SfField.get_attname_column
def get_attname_column(self): """ Get the database column name automatically in most cases. """ # See "A guide to Field parameters": django/db/models/fields/__init__.py # * attname: The attribute to use on the model object. This is the same as # "name",...
python
def get_attname_column(self): """ Get the database column name automatically in most cases. """ # See "A guide to Field parameters": django/db/models/fields/__init__.py # * attname: The attribute to use on the model object. This is the same as # "name",...
[ "def", "get_attname_column", "(", "self", ")", ":", "# See \"A guide to Field parameters\": django/db/models/fields/__init__.py", "# * attname: The attribute to use on the model object. This is the same as", "# \"name\", except in the case of ForeignKeys, where \"_id\" is", "# ...
Get the database column name automatically in most cases.
[ "Get", "the", "database", "column", "name", "automatically", "in", "most", "cases", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/fields.py#L109-L133
7,920
django-salesforce/django-salesforce
salesforce/backend/utils.py
extract_values
def extract_values(query): """ Extract values from insert or update query. Supports bulk_create """ # pylint if isinstance(query, subqueries.UpdateQuery): row = query.values return extract_values_inner(row, query) if isinstance(query, subqueries.InsertQuery): ret = []...
python
def extract_values(query): """ Extract values from insert or update query. Supports bulk_create """ # pylint if isinstance(query, subqueries.UpdateQuery): row = query.values return extract_values_inner(row, query) if isinstance(query, subqueries.InsertQuery): ret = []...
[ "def", "extract_values", "(", "query", ")", ":", "# pylint", "if", "isinstance", "(", "query", ",", "subqueries", ".", "UpdateQuery", ")", ":", "row", "=", "query", ".", "values", "return", "extract_values_inner", "(", "row", ",", "query", ")", "if", "isin...
Extract values from insert or update query. Supports bulk_create
[ "Extract", "values", "from", "insert", "or", "update", "query", ".", "Supports", "bulk_create" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/utils.py#L94-L108
7,921
django-salesforce/django-salesforce
salesforce/backend/utils.py
CursorWrapper.execute
def execute(self, q, args=()): """ Send a query to the Salesforce API. """ # pylint:disable=too-many-branches self.rowcount = None response = None if self.query is None: self.execute_select(q, args) else: response = self.execute_dja...
python
def execute(self, q, args=()): """ Send a query to the Salesforce API. """ # pylint:disable=too-many-branches self.rowcount = None response = None if self.query is None: self.execute_select(q, args) else: response = self.execute_dja...
[ "def", "execute", "(", "self", ",", "q", ",", "args", "=", "(", ")", ")", ":", "# pylint:disable=too-many-branches", "self", ".", "rowcount", "=", "None", "response", "=", "None", "if", "self", ".", "query", "is", "None", ":", "self", ".", "execute_selec...
Send a query to the Salesforce API.
[ "Send", "a", "query", "to", "the", "Salesforce", "API", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/utils.py#L173-L218
7,922
django-salesforce/django-salesforce
salesforce/backend/utils.py
CursorWrapper.execute_django
def execute_django(self, soql, args=()): """ Fixed execute for queries coming from Django query compilers """ response = None sqltype = soql.split(None, 1)[0].upper() if isinstance(self.query, subqueries.InsertQuery): response = self.execute_insert(self.query)...
python
def execute_django(self, soql, args=()): """ Fixed execute for queries coming from Django query compilers """ response = None sqltype = soql.split(None, 1)[0].upper() if isinstance(self.query, subqueries.InsertQuery): response = self.execute_insert(self.query)...
[ "def", "execute_django", "(", "self", ",", "soql", ",", "args", "=", "(", ")", ")", ":", "response", "=", "None", "sqltype", "=", "soql", ".", "split", "(", "None", ",", "1", ")", "[", "0", "]", ".", "upper", "(", ")", "if", "isinstance", "(", ...
Fixed execute for queries coming from Django query compilers
[ "Fixed", "execute", "for", "queries", "coming", "from", "Django", "query", "compilers" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/utils.py#L223-L244
7,923
django-salesforce/django-salesforce
salesforce/backend/utils.py
CursorWrapper.get_pks_from_query
def get_pks_from_query(self, query): """Prepare primary keys for update and delete queries""" where = query.where sql = None if where.connector == 'AND' and not where.negated and len(where.children) == 1: # simple cases are optimized, especially because a suboptimal ...
python
def get_pks_from_query(self, query): """Prepare primary keys for update and delete queries""" where = query.where sql = None if where.connector == 'AND' and not where.negated and len(where.children) == 1: # simple cases are optimized, especially because a suboptimal ...
[ "def", "get_pks_from_query", "(", "self", ",", "query", ")", ":", "where", "=", "query", ".", "where", "sql", "=", "None", "if", "where", ".", "connector", "==", "'AND'", "and", "not", "where", ".", "negated", "and", "len", "(", "where", ".", "children...
Prepare primary keys for update and delete queries
[ "Prepare", "primary", "keys", "for", "update", "and", "delete", "queries" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/utils.py#L286-L320
7,924
django-salesforce/django-salesforce
salesforce/backend/utils.py
CursorWrapper.versions_request
def versions_request(self): """List Available REST API Versions""" ret = self.handle_api_exceptions('GET', '', api_ver='') return [str_dict(x) for x in ret.json()]
python
def versions_request(self): """List Available REST API Versions""" ret = self.handle_api_exceptions('GET', '', api_ver='') return [str_dict(x) for x in ret.json()]
[ "def", "versions_request", "(", "self", ")", ":", "ret", "=", "self", ".", "handle_api_exceptions", "(", "'GET'", ",", "''", ",", "api_ver", "=", "''", ")", "return", "[", "str_dict", "(", "x", ")", "for", "x", "in", "ret", ".", "json", "(", ")", "...
List Available REST API Versions
[ "List", "Available", "REST", "API", "Versions" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/utils.py#L401-L404
7,925
django-salesforce/django-salesforce
salesforce/management/commands/inspectdb.py
fix_international
def fix_international(text): "Fix excaped international characters back to utf-8" class SmartInternational(str): def __new__(cls, text): return str.__new__(cls, text) def endswith(self, string): return super(SmartInternational, self).endswith(str(string)) if PY3: ...
python
def fix_international(text): "Fix excaped international characters back to utf-8" class SmartInternational(str): def __new__(cls, text): return str.__new__(cls, text) def endswith(self, string): return super(SmartInternational, self).endswith(str(string)) if PY3: ...
[ "def", "fix_international", "(", "text", ")", ":", "class", "SmartInternational", "(", "str", ")", ":", "def", "__new__", "(", "cls", ",", "text", ")", ":", "return", "str", ".", "__new__", "(", "cls", ",", "text", ")", "def", "endswith", "(", "self", ...
Fix excaped international characters back to utf-8
[ "Fix", "excaped", "international", "characters", "back", "to", "utf", "-", "8" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/management/commands/inspectdb.py#L46-L65
7,926
django-salesforce/django-salesforce
salesforce/management/commands/inspectdb.py
Command.get_meta
def get_meta(self, table_name, constraints=None, column_to_field_name=None, is_view=False, is_partition=None): """ Return a sequence comprising the lines of code necessary to construct the inner Meta class for the model corresponding to the given database table name. """ ...
python
def get_meta(self, table_name, constraints=None, column_to_field_name=None, is_view=False, is_partition=None): """ Return a sequence comprising the lines of code necessary to construct the inner Meta class for the model corresponding to the given database table name. """ ...
[ "def", "get_meta", "(", "self", ",", "table_name", ",", "constraints", "=", "None", ",", "column_to_field_name", "=", "None", ",", "is_view", "=", "False", ",", "is_partition", "=", "None", ")", ":", "# pylint:disable=arguments-differ,too-many-arguments,unused-argumen...
Return a sequence comprising the lines of code necessary to construct the inner Meta class for the model corresponding to the given database table name.
[ "Return", "a", "sequence", "comprising", "the", "lines", "of", "code", "necessary", "to", "construct", "the", "inner", "Meta", "class", "for", "the", "model", "corresponding", "to", "the", "given", "database", "table", "name", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/management/commands/inspectdb.py#L141-L154
7,927
django-salesforce/django-salesforce
setup.py
relative_path
def relative_path(path): """ Return the given path relative to this file. """ return os.path.join(os.path.dirname(__file__), path)
python
def relative_path(path): """ Return the given path relative to this file. """ return os.path.join(os.path.dirname(__file__), path)
[ "def", "relative_path", "(", "path", ")", ":", "return", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "path", ")" ]
Return the given path relative to this file.
[ "Return", "the", "given", "path", "relative", "to", "this", "file", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/setup.py#L16-L20
7,928
django-salesforce/django-salesforce
setup.py
get_tagged_version
def get_tagged_version(): """ Determine the current version of this package. Precise long version numbers are used if the Git repository is found. They contain: the Git tag, the commit serial and a short commit id. otherwise a short version number is used if installed from Pypi. """ with op...
python
def get_tagged_version(): """ Determine the current version of this package. Precise long version numbers are used if the Git repository is found. They contain: the Git tag, the commit serial and a short commit id. otherwise a short version number is used if installed from Pypi. """ with op...
[ "def", "get_tagged_version", "(", ")", ":", "with", "open", "(", "relative_path", "(", "'salesforce/__init__.py'", ")", ",", "'r'", ")", "as", "fd", ":", "version", "=", "re", ".", "search", "(", "r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]'", ",", "fd", "....
Determine the current version of this package. Precise long version numbers are used if the Git repository is found. They contain: the Git tag, the commit serial and a short commit id. otherwise a short version number is used if installed from Pypi.
[ "Determine", "the", "current", "version", "of", "this", "package", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/setup.py#L23-L34
7,929
django-salesforce/django-salesforce
salesforce/auth.py
SalesforceAuth.dynamic_start
def dynamic_start(self, access_token, instance_url=None, **kw): """ Set the access token dynamically according to the current user. More parameters can be set. """ self.dynamic = {'access_token': str(access_token), 'instance_url': str(instance_url)} self.dynamic.update(k...
python
def dynamic_start(self, access_token, instance_url=None, **kw): """ Set the access token dynamically according to the current user. More parameters can be set. """ self.dynamic = {'access_token': str(access_token), 'instance_url': str(instance_url)} self.dynamic.update(k...
[ "def", "dynamic_start", "(", "self", ",", "access_token", ",", "instance_url", "=", "None", ",", "*", "*", "kw", ")", ":", "self", ".", "dynamic", "=", "{", "'access_token'", ":", "str", "(", "access_token", ")", ",", "'instance_url'", ":", "str", "(", ...
Set the access token dynamically according to the current user. More parameters can be set.
[ "Set", "the", "access", "token", "dynamically", "according", "to", "the", "current", "user", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/auth.py#L144-L151
7,930
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
mark_quoted_strings
def mark_quoted_strings(sql): """Mark all quoted strings in the SOQL by '@' and get them as params, with respect to all escaped backslashes and quotes. """ # pattern of a string parameter (pm), a char escaped by backslash (bs) # out_pattern: characters valid in SOQL pm_pattern = re.compile(r"'[^...
python
def mark_quoted_strings(sql): """Mark all quoted strings in the SOQL by '@' and get them as params, with respect to all escaped backslashes and quotes. """ # pattern of a string parameter (pm), a char escaped by backslash (bs) # out_pattern: characters valid in SOQL pm_pattern = re.compile(r"'[^...
[ "def", "mark_quoted_strings", "(", "sql", ")", ":", "# pattern of a string parameter (pm), a char escaped by backslash (bs)", "# out_pattern: characters valid in SOQL", "pm_pattern", "=", "re", ".", "compile", "(", "r\"'[^\\\\']*(?:\\\\[\\\\'][^\\\\']*)*'\"", ")", "bs_pattern", "="...
Mark all quoted strings in the SOQL by '@' and get them as params, with respect to all escaped backslashes and quotes.
[ "Mark", "all", "quoted", "strings", "in", "the", "SOQL", "by" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L194-L214
7,931
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
subst_quoted_strings
def subst_quoted_strings(sql, params): """Reverse operation to mark_quoted_strings - substitutes '@' by params. """ parts = sql.split('@') params_dont_match = "number of parameters doesn' match the transformed query" assert len(parts) == len(params) + 1, params_dont_match # would be internal error ...
python
def subst_quoted_strings(sql, params): """Reverse operation to mark_quoted_strings - substitutes '@' by params. """ parts = sql.split('@') params_dont_match = "number of parameters doesn' match the transformed query" assert len(parts) == len(params) + 1, params_dont_match # would be internal error ...
[ "def", "subst_quoted_strings", "(", "sql", ",", "params", ")", ":", "parts", "=", "sql", ".", "split", "(", "'@'", ")", "params_dont_match", "=", "\"number of parameters doesn' match the transformed query\"", "assert", "len", "(", "parts", ")", "==", "len", "(", ...
Reverse operation to mark_quoted_strings - substitutes '@' by params.
[ "Reverse", "operation", "to", "mark_quoted_strings", "-", "substitutes" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L217-L228
7,932
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
find_closing_parenthesis
def find_closing_parenthesis(sql, startpos): """Find the pair of opening and closing parentheses. Starts search at the position startpos. Returns tuple of positions (opening, closing) if search succeeds, otherwise None. """ pattern = re.compile(r'[()]') level = 0 opening = [] for match ...
python
def find_closing_parenthesis(sql, startpos): """Find the pair of opening and closing parentheses. Starts search at the position startpos. Returns tuple of positions (opening, closing) if search succeeds, otherwise None. """ pattern = re.compile(r'[()]') level = 0 opening = [] for match ...
[ "def", "find_closing_parenthesis", "(", "sql", ",", "startpos", ")", ":", "pattern", "=", "re", ".", "compile", "(", "r'[()]'", ")", "level", "=", "0", "opening", "=", "[", "]", "for", "match", "in", "pattern", ".", "finditer", "(", "sql", ",", "startp...
Find the pair of opening and closing parentheses. Starts search at the position startpos. Returns tuple of positions (opening, closing) if search succeeds, otherwise None.
[ "Find", "the", "pair", "of", "opening", "and", "closing", "parentheses", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L231-L251
7,933
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
split_subquery
def split_subquery(sql): """Split on subqueries and replace them by '&'.""" sql, params = mark_quoted_strings(sql) sql = simplify_expression(sql) _ = params # NOQA start = 0 out = [] subqueries = [] pattern = re.compile(r'\(SELECT\b', re.I) match = pattern.search(sql, start) whi...
python
def split_subquery(sql): """Split on subqueries and replace them by '&'.""" sql, params = mark_quoted_strings(sql) sql = simplify_expression(sql) _ = params # NOQA start = 0 out = [] subqueries = [] pattern = re.compile(r'\(SELECT\b', re.I) match = pattern.search(sql, start) whi...
[ "def", "split_subquery", "(", "sql", ")", ":", "sql", ",", "params", "=", "mark_quoted_strings", "(", "sql", ")", "sql", "=", "simplify_expression", "(", "sql", ")", "_", "=", "params", "# NOQA", "start", "=", "0", "out", "=", "[", "]", "subqueries", "...
Split on subqueries and replace them by '&'.
[ "Split", "on", "subqueries", "and", "replace", "them", "by", "&", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L268-L286
7,934
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
simplify_expression
def simplify_expression(txt): """Remove all unecessary whitespace and some very usual space""" minimal = re.sub(r'\s', ' ', re.sub(r'\s(?=\W)', '', re.sub(r'(?<=\W)\s', '', txt.strip()))) # add space before some "(" and afte...
python
def simplify_expression(txt): """Remove all unecessary whitespace and some very usual space""" minimal = re.sub(r'\s', ' ', re.sub(r'\s(?=\W)', '', re.sub(r'(?<=\W)\s', '', txt.strip()))) # add space before some "(" and afte...
[ "def", "simplify_expression", "(", "txt", ")", ":", "minimal", "=", "re", ".", "sub", "(", "r'\\s'", ",", "' '", ",", "re", ".", "sub", "(", "r'\\s(?=\\W)'", ",", "''", ",", "re", ".", "sub", "(", "r'(?<=\\W)\\s'", ",", "''", ",", "txt", ".", "stri...
Remove all unecessary whitespace and some very usual space
[ "Remove", "all", "unecessary", "whitespace", "and", "some", "very", "usual", "space" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L289-L298
7,935
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
QQuery._make_flat
def _make_flat(self, row_dict, path, subroots): """Replace the nested dict objects by a flat dict with keys "object.object.name".""" # can get a cursor parameter, if introspection should be possible on the fly out = {} for k, v in row_dict.items(): klc = k.lower() # "key low...
python
def _make_flat(self, row_dict, path, subroots): """Replace the nested dict objects by a flat dict with keys "object.object.name".""" # can get a cursor parameter, if introspection should be possible on the fly out = {} for k, v in row_dict.items(): klc = k.lower() # "key low...
[ "def", "_make_flat", "(", "self", ",", "row_dict", ",", "path", ",", "subroots", ")", ":", "# can get a cursor parameter, if introspection should be possible on the fly", "out", "=", "{", "}", "for", "k", ",", "v", "in", "row_dict", ".", "items", "(", ")", ":", ...
Replace the nested dict objects by a flat dict with keys "object.object.name".
[ "Replace", "the", "nested", "dict", "objects", "by", "a", "flat", "dict", "with", "keys", "object", ".", "object", ".", "name", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L128-L149
7,936
django-salesforce/django-salesforce
salesforce/dbapi/subselect.py
QQuery.parse_rest_response
def parse_rest_response(self, records, rowcount, row_type=list): """Parse the REST API response to DB API cursor flat response""" if self.is_plain_count: # result of "SELECT COUNT() FROM ... WHERE ..." assert list(records) == [] yield rowcount # originally [resp.json...
python
def parse_rest_response(self, records, rowcount, row_type=list): """Parse the REST API response to DB API cursor flat response""" if self.is_plain_count: # result of "SELECT COUNT() FROM ... WHERE ..." assert list(records) == [] yield rowcount # originally [resp.json...
[ "def", "parse_rest_response", "(", "self", ",", "records", ",", "rowcount", ",", "row_type", "=", "list", ")", ":", "if", "self", ".", "is_plain_count", ":", "# result of \"SELECT COUNT() FROM ... WHERE ...\"", "assert", "list", "(", "records", ")", "==", "[", "...
Parse the REST API response to DB API cursor flat response
[ "Parse", "the", "REST", "API", "response", "to", "DB", "API", "cursor", "flat", "response" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/subselect.py#L151-L174
7,937
django-salesforce/django-salesforce
salesforce/models.py
make_dynamic_fields
def make_dynamic_fields(pattern_module, dynamic_field_patterns, attrs): """Add some Salesforce fields from a pattern_module models.py Parameters: pattern_module: Module where to search additional fields settings. It is an imported module created by introspection (inspectdb), usua...
python
def make_dynamic_fields(pattern_module, dynamic_field_patterns, attrs): """Add some Salesforce fields from a pattern_module models.py Parameters: pattern_module: Module where to search additional fields settings. It is an imported module created by introspection (inspectdb), usua...
[ "def", "make_dynamic_fields", "(", "pattern_module", ",", "dynamic_field_patterns", ",", "attrs", ")", ":", "# pylint:disable=invalid-name,too-many-branches,too-many-locals", "import", "re", "attr_meta", "=", "attrs", "[", "'Meta'", "]", "db_table", "=", "getattr", "(", ...
Add some Salesforce fields from a pattern_module models.py Parameters: pattern_module: Module where to search additional fields settings. It is an imported module created by introspection (inspectdb), usually named `models_template.py`. (You will probably not add it to ver...
[ "Add", "some", "Salesforce", "fields", "from", "a", "pattern_module", "models", ".", "py" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/models.py#L103-L199
7,938
django-salesforce/django-salesforce
salesforce/dbapi/exceptions.py
prepare_exception
def prepare_exception(obj, messages=None, response=None, verbs=None): """Prepare excetion params or only an exception message parameters: messages: list of strings, that will be separated by new line response: response from a request to SFDC REST API verbs: list of options about verbosi...
python
def prepare_exception(obj, messages=None, response=None, verbs=None): """Prepare excetion params or only an exception message parameters: messages: list of strings, that will be separated by new line response: response from a request to SFDC REST API verbs: list of options about verbosi...
[ "def", "prepare_exception", "(", "obj", ",", "messages", "=", "None", ",", "response", "=", "None", ",", "verbs", "=", "None", ")", ":", "# pylint:disable=too-many-branches", "verbs", "=", "set", "(", "verbs", "or", "[", "]", ")", "known_options", "=", "["...
Prepare excetion params or only an exception message parameters: messages: list of strings, that will be separated by new line response: response from a request to SFDC REST API verbs: list of options about verbosity
[ "Prepare", "excetion", "params", "or", "only", "an", "exception", "message" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/exceptions.py#L67-L119
7,939
django-salesforce/django-salesforce
salesforce/dbapi/exceptions.py
warn_sf
def warn_sf(messages, response, verbs=None, klass=SalesforceWarning): """Issue a warning SalesforceWarning, with message combined from message and data from SFDC response""" warnings.warn(klass(messages, response, verbs), stacklevel=2)
python
def warn_sf(messages, response, verbs=None, klass=SalesforceWarning): """Issue a warning SalesforceWarning, with message combined from message and data from SFDC response""" warnings.warn(klass(messages, response, verbs), stacklevel=2)
[ "def", "warn_sf", "(", "messages", ",", "response", ",", "verbs", "=", "None", ",", "klass", "=", "SalesforceWarning", ")", ":", "warnings", ".", "warn", "(", "klass", "(", "messages", ",", "response", ",", "verbs", ")", ",", "stacklevel", "=", "2", ")...
Issue a warning SalesforceWarning, with message combined from message and data from SFDC response
[ "Issue", "a", "warning", "SalesforceWarning", "with", "message", "combined", "from", "message", "and", "data", "from", "SFDC", "response" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/exceptions.py#L122-L124
7,940
django-salesforce/django-salesforce
salesforce/backend/compiler.py
SQLCompiler.get_from_clause
def get_from_clause(self): """ Return the FROM clause, converted the SOQL dialect. It should be only the name of base object, even in parent-to-child and child-to-parent relationships queries. """ self.query_topology() root_table = self.soql_trans[self.root_alias...
python
def get_from_clause(self): """ Return the FROM clause, converted the SOQL dialect. It should be only the name of base object, even in parent-to-child and child-to-parent relationships queries. """ self.query_topology() root_table = self.soql_trans[self.root_alias...
[ "def", "get_from_clause", "(", "self", ")", ":", "self", ".", "query_topology", "(", ")", "root_table", "=", "self", ".", "soql_trans", "[", "self", ".", "root_alias", "]", "return", "[", "root_table", "]", ",", "[", "]" ]
Return the FROM clause, converted the SOQL dialect. It should be only the name of base object, even in parent-to-child and child-to-parent relationships queries.
[ "Return", "the", "FROM", "clause", "converted", "the", "SOQL", "dialect", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/compiler.py#L34-L43
7,941
django-salesforce/django-salesforce
salesforce/backend/compiler.py
SQLCompiler.quote_name_unless_alias
def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. Mostly used during the ORDER BY clause. """ r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r
python
def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. Mostly used during the ORDER BY clause. """ r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r
[ "def", "quote_name_unless_alias", "(", "self", ",", "name", ")", ":", "r", "=", "self", ".", "connection", ".", "ops", ".", "quote_name", "(", "name", ")", "self", ".", "quote_cache", "[", "name", "]", "=", "r", "return", "r" ]
A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. Mostly used during the ORDER BY clause.
[ "A", "wrapper", "around", "connection", ".", "ops", ".", "quote_name", "that", "doesn", "t", "quote", "aliases", "for", "table", "names", ".", "Mostly", "used", "during", "the", "ORDER", "BY", "clause", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/backend/compiler.py#L45-L52
7,942
django-salesforce/django-salesforce
salesforce/utils.py
get_soap_client
def get_soap_client(db_alias, client_class=None): """ Create the SOAP client for the current user logged in the db_alias The default created client is "beatbox.PythonClient", but an alternative client is possible. (i.e. other subtype of beatbox.XMLClient) """ if not beatbox: raise Inter...
python
def get_soap_client(db_alias, client_class=None): """ Create the SOAP client for the current user logged in the db_alias The default created client is "beatbox.PythonClient", but an alternative client is possible. (i.e. other subtype of beatbox.XMLClient) """ if not beatbox: raise Inter...
[ "def", "get_soap_client", "(", "db_alias", ",", "client_class", "=", "None", ")", ":", "if", "not", "beatbox", ":", "raise", "InterfaceError", "(", "\"To use SOAP API, you'll need to install the Beatbox package.\"", ")", "if", "client_class", "is", "None", ":", "clien...
Create the SOAP client for the current user logged in the db_alias The default created client is "beatbox.PythonClient", but an alternative client is possible. (i.e. other subtype of beatbox.XMLClient)
[ "Create", "the", "SOAP", "client", "for", "the", "current", "user", "logged", "in", "the", "db_alias" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/utils.py#L20-L46
7,943
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
signalize_extensions
def signalize_extensions(): """DB API 2.0 extension are reported by warnings at run-time.""" warnings.warn("DB-API extension cursor.rownumber used", SalesforceWarning) warnings.warn("DB-API extension connection.<exception> used", SalesforceWarning) # TODO warnings.warn("DB-API extension cursor.connecti...
python
def signalize_extensions(): """DB API 2.0 extension are reported by warnings at run-time.""" warnings.warn("DB-API extension cursor.rownumber used", SalesforceWarning) warnings.warn("DB-API extension connection.<exception> used", SalesforceWarning) # TODO warnings.warn("DB-API extension cursor.connecti...
[ "def", "signalize_extensions", "(", ")", ":", "warnings", ".", "warn", "(", "\"DB-API extension cursor.rownumber used\"", ",", "SalesforceWarning", ")", "warnings", ".", "warn", "(", "\"DB-API extension connection.<exception> used\"", ",", "SalesforceWarning", ")", "# TODO"...
DB API 2.0 extension are reported by warnings at run-time.
[ "DB", "API", "2", ".", "0", "extension", "are", "reported", "by", "warnings", "at", "run", "-", "time", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L670-L681
7,944
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
arg_to_soql
def arg_to_soql(arg): """ Perform necessary SOQL quoting on the arg. """ conversion = sql_conversions.get(type(arg)) if conversion: return conversion(arg) for type_ in subclass_conversions: if isinstance(arg, type_): return sql_conversions[type_](arg) return sql_c...
python
def arg_to_soql(arg): """ Perform necessary SOQL quoting on the arg. """ conversion = sql_conversions.get(type(arg)) if conversion: return conversion(arg) for type_ in subclass_conversions: if isinstance(arg, type_): return sql_conversions[type_](arg) return sql_c...
[ "def", "arg_to_soql", "(", "arg", ")", ":", "conversion", "=", "sql_conversions", ".", "get", "(", "type", "(", "arg", ")", ")", "if", "conversion", ":", "return", "conversion", "(", "arg", ")", "for", "type_", "in", "subclass_conversions", ":", "if", "i...
Perform necessary SOQL quoting on the arg.
[ "Perform", "necessary", "SOQL", "quoting", "on", "the", "arg", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L735-L745
7,945
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
arg_to_json
def arg_to_json(arg): """ Perform necessary JSON conversion on the arg. """ conversion = json_conversions.get(type(arg)) if conversion: return conversion(arg) for type_ in subclass_conversions: if isinstance(arg, type_): return json_conversions[type_](arg) return ...
python
def arg_to_json(arg): """ Perform necessary JSON conversion on the arg. """ conversion = json_conversions.get(type(arg)) if conversion: return conversion(arg) for type_ in subclass_conversions: if isinstance(arg, type_): return json_conversions[type_](arg) return ...
[ "def", "arg_to_json", "(", "arg", ")", ":", "conversion", "=", "json_conversions", ".", "get", "(", "type", "(", "arg", ")", ")", "if", "conversion", ":", "return", "conversion", "(", "arg", ")", "for", "type_", "in", "subclass_conversions", ":", "if", "...
Perform necessary JSON conversion on the arg.
[ "Perform", "necessary", "JSON", "conversion", "on", "the", "arg", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L748-L758
7,946
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
merge_dict
def merge_dict(dict_1, *other, **kw): """Merge two or more dict including kw into result dict.""" tmp = dict_1.copy() for x in other: tmp.update(x) tmp.update(kw) return tmp
python
def merge_dict(dict_1, *other, **kw): """Merge two or more dict including kw into result dict.""" tmp = dict_1.copy() for x in other: tmp.update(x) tmp.update(kw) return tmp
[ "def", "merge_dict", "(", "dict_1", ",", "*", "other", ",", "*", "*", "kw", ")", ":", "tmp", "=", "dict_1", ".", "copy", "(", ")", "for", "x", "in", "other", ":", "tmp", ".", "update", "(", "x", ")", "tmp", ".", "update", "(", "kw", ")", "ret...
Merge two or more dict including kw into result dict.
[ "Merge", "two", "or", "more", "dict", "including", "kw", "into", "result", "dict", "." ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L792-L798
7,947
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
RawConnection.make_session
def make_session(self): """Authenticate and get the name of assigned SFDC data server""" with connect_lock: if self._sf_session is None: sf_session = requests.Session() # TODO configurable class Salesforce***Auth sf_session.auth = SalesforcePas...
python
def make_session(self): """Authenticate and get the name of assigned SFDC data server""" with connect_lock: if self._sf_session is None: sf_session = requests.Session() # TODO configurable class Salesforce***Auth sf_session.auth = SalesforcePas...
[ "def", "make_session", "(", "self", ")", ":", "with", "connect_lock", ":", "if", "self", ".", "_sf_session", "is", "None", ":", "sf_session", "=", "requests", ".", "Session", "(", ")", "# TODO configurable class Salesforce***Auth", "sf_session", ".", "auth", "="...
Authenticate and get the name of assigned SFDC data server
[ "Authenticate", "and", "get", "the", "name", "of", "assigned", "SFDC", "data", "server" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L159-L172
7,948
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
RawConnection.rest_api_url
def rest_api_url(self, *url_parts, **kwargs): """Join the URL of REST_API parameters: upl_parts: strings that are joined to the url by "/". a REST url like https://na1.salesforce.com/services/data/v44.0/ is usually added, but not if the first string starts w...
python
def rest_api_url(self, *url_parts, **kwargs): """Join the URL of REST_API parameters: upl_parts: strings that are joined to the url by "/". a REST url like https://na1.salesforce.com/services/data/v44.0/ is usually added, but not if the first string starts w...
[ "def", "rest_api_url", "(", "self", ",", "*", "url_parts", ",", "*", "*", "kwargs", ")", ":", "url_parts", "=", "list", "(", "url_parts", ")", "if", "url_parts", "and", "re", ".", "match", "(", "r'^(?:https|mock)://'", ",", "url_parts", "[", "0", "]", ...
Join the URL of REST_API parameters: upl_parts: strings that are joined to the url by "/". a REST url like https://na1.salesforce.com/services/data/v44.0/ is usually added, but not if the first string starts with https:// api_ver: API version that shoul...
[ "Join", "the", "URL", "of", "REST_API" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L174-L216
7,949
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
RawConnection.raise_errors
def raise_errors(self, response): """The innermost part - report errors by exceptions""" # Errors: 400, 403 permissions or REQUEST_LIMIT_EXCEEDED, 404, 405, 415, 500) # TODO extract a case ID for Salesforce support from code 500 messages # TODO disabled 'debug_verbs' temporarily, after ...
python
def raise_errors(self, response): """The innermost part - report errors by exceptions""" # Errors: 400, 403 permissions or REQUEST_LIMIT_EXCEEDED, 404, 405, 415, 500) # TODO extract a case ID for Salesforce support from code 500 messages # TODO disabled 'debug_verbs' temporarily, after ...
[ "def", "raise_errors", "(", "self", ",", "response", ")", ":", "# Errors: 400, 403 permissions or REQUEST_LIMIT_EXCEEDED, 404, 405, 415, 500)", "# TODO extract a case ID for Salesforce support from code 500 messages", "# TODO disabled 'debug_verbs' temporarily, after writing better default messa...
The innermost part - report errors by exceptions
[ "The", "innermost", "part", "-", "report", "errors", "by", "exceptions" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L287-L322
7,950
django-salesforce/django-salesforce
salesforce/dbapi/driver.py
RawConnection.composite_request
def composite_request(self, data): """Call a 'composite' request with subrequests, error handling A fake object for request/response is created for a subrequest in case of error, to be possible to use the same error hanler with a clear message as with an individual request. """ ...
python
def composite_request(self, data): """Call a 'composite' request with subrequests, error handling A fake object for request/response is created for a subrequest in case of error, to be possible to use the same error hanler with a clear message as with an individual request. """ ...
[ "def", "composite_request", "(", "self", ",", "data", ")", ":", "post_data", "=", "{", "'compositeRequest'", ":", "data", ",", "'allOrNone'", ":", "True", "}", "resp", "=", "self", ".", "handle_api_exceptions", "(", "'POST'", ",", "'composite'", ",", "json",...
Call a 'composite' request with subrequests, error handling A fake object for request/response is created for a subrequest in case of error, to be possible to use the same error hanler with a clear message as with an individual request.
[ "Call", "a", "composite", "request", "with", "subrequests", "error", "handling" ]
6fd5643dba69d49c5881de50875cf90204a8f808
https://github.com/django-salesforce/django-salesforce/blob/6fd5643dba69d49c5881de50875cf90204a8f808/salesforce/dbapi/driver.py#L324-L359
7,951
crs4/pydoop
pydoop/avrolib.py
SeekableDataFileReader.align_after
def align_after(self, offset): """ Search for a sync point after offset and align just after that. """ f = self.reader if offset <= 0: # FIXME what is a negative offset?? f.seek(0) self._block_count = 0 self._read_header() # FIXME we can't ex...
python
def align_after(self, offset): """ Search for a sync point after offset and align just after that. """ f = self.reader if offset <= 0: # FIXME what is a negative offset?? f.seek(0) self._block_count = 0 self._read_header() # FIXME we can't ex...
[ "def", "align_after", "(", "self", ",", "offset", ")", ":", "f", "=", "self", ".", "reader", "if", "offset", "<=", "0", ":", "# FIXME what is a negative offset??", "f", ".", "seek", "(", "0", ")", "self", ".", "_block_count", "=", "0", "self", ".", "_r...
Search for a sync point after offset and align just after that.
[ "Search", "for", "a", "sync", "point", "after", "offset", "and", "align", "just", "after", "that", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/avrolib.py#L77-L98
7,952
crs4/pydoop
pydoop/avrolib.py
AvroReader.get_progress
def get_progress(self): """ Give a rough estimate of the progress done. """ pos = self.reader.reader.tell() return min((pos - self.region_start) / float(self.region_end - self.region_start), 1.0)
python
def get_progress(self): """ Give a rough estimate of the progress done. """ pos = self.reader.reader.tell() return min((pos - self.region_start) / float(self.region_end - self.region_start), 1.0)
[ "def", "get_progress", "(", "self", ")", ":", "pos", "=", "self", ".", "reader", ".", "reader", ".", "tell", "(", ")", "return", "min", "(", "(", "pos", "-", "self", ".", "region_start", ")", "/", "float", "(", "self", ".", "region_end", "-", "self...
Give a rough estimate of the progress done.
[ "Give", "a", "rough", "estimate", "of", "the", "progress", "done", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/avrolib.py#L124-L131
7,953
crs4/pydoop
pydoop/hadoop_utils.py
is_exe
def is_exe(fpath): """ Path references an executable file. """ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
python
def is_exe(fpath): """ Path references an executable file. """ return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
[ "def", "is_exe", "(", "fpath", ")", ":", "return", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "os", ".", "access", "(", "fpath", ",", "os", ".", "X_OK", ")" ]
Path references an executable file.
[ "Path", "references", "an", "executable", "file", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadoop_utils.py#L265-L269
7,954
crs4/pydoop
pydoop/hadoop_utils.py
is_readable
def is_readable(fpath): """ Path references a readable file. """ return os.path.isfile(fpath) and os.access(fpath, os.R_OK)
python
def is_readable(fpath): """ Path references a readable file. """ return os.path.isfile(fpath) and os.access(fpath, os.R_OK)
[ "def", "is_readable", "(", "fpath", ")", ":", "return", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "os", ".", "access", "(", "fpath", ",", "os", ".", "R_OK", ")" ]
Path references a readable file.
[ "Path", "references", "a", "readable", "file", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadoop_utils.py#L272-L276
7,955
crs4/pydoop
pydoop/hadoop_utils.py
PathFinder.is_local
def is_local(self, hadoop_conf=None, hadoop_home=None): """\ Is Hadoop configured to run in local mode? By default, it is. [pseudo-]distributed mode must be explicitly configured. """ conf = self.hadoop_params(hadoop_conf, hadoop_home) keys = ('mapreduce.framewor...
python
def is_local(self, hadoop_conf=None, hadoop_home=None): """\ Is Hadoop configured to run in local mode? By default, it is. [pseudo-]distributed mode must be explicitly configured. """ conf = self.hadoop_params(hadoop_conf, hadoop_home) keys = ('mapreduce.framewor...
[ "def", "is_local", "(", "self", ",", "hadoop_conf", "=", "None", ",", "hadoop_home", "=", "None", ")", ":", "conf", "=", "self", ".", "hadoop_params", "(", "hadoop_conf", ",", "hadoop_home", ")", "keys", "=", "(", "'mapreduce.framework.name'", ",", "'mapredu...
\ Is Hadoop configured to run in local mode? By default, it is. [pseudo-]distributed mode must be explicitly configured.
[ "\\", "Is", "Hadoop", "configured", "to", "run", "in", "local", "mode?" ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadoop_utils.py#L562-L576
7,956
crs4/pydoop
pydoop/hdfs/path.py
abspath
def abspath(hdfs_path, user=None, local=False): """ Return an absolute path for ``hdfs_path``. The ``user`` arg is passed to :func:`split`. The ``local`` argument forces ``hdfs_path`` to be interpreted as an ordinary local path: .. code-block:: python >>> import os >>> os.chdir('/tmp'...
python
def abspath(hdfs_path, user=None, local=False): """ Return an absolute path for ``hdfs_path``. The ``user`` arg is passed to :func:`split`. The ``local`` argument forces ``hdfs_path`` to be interpreted as an ordinary local path: .. code-block:: python >>> import os >>> os.chdir('/tmp'...
[ "def", "abspath", "(", "hdfs_path", ",", "user", "=", "None", ",", "local", "=", "False", ")", ":", "if", "local", ":", "return", "'file:%s'", "%", "os", ".", "path", ".", "abspath", "(", "hdfs_path", ")", "if", "isfull", "(", "hdfs_path", ")", ":", ...
Return an absolute path for ``hdfs_path``. The ``user`` arg is passed to :func:`split`. The ``local`` argument forces ``hdfs_path`` to be interpreted as an ordinary local path: .. code-block:: python >>> import os >>> os.chdir('/tmp') >>> import pydoop.hdfs.path as hpath >>> hpath...
[ "Return", "an", "absolute", "path", "for", "hdfs_path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/path.py#L242-L278
7,957
crs4/pydoop
pydoop/hdfs/path.py
dirname
def dirname(hdfs_path): """ Return the directory component of ``hdfs_path``. """ scheme, netloc, path = parse(hdfs_path) return unparse(scheme, netloc, os.path.dirname(path))
python
def dirname(hdfs_path): """ Return the directory component of ``hdfs_path``. """ scheme, netloc, path = parse(hdfs_path) return unparse(scheme, netloc, os.path.dirname(path))
[ "def", "dirname", "(", "hdfs_path", ")", ":", "scheme", ",", "netloc", ",", "path", "=", "parse", "(", "hdfs_path", ")", "return", "unparse", "(", "scheme", ",", "netloc", ",", "os", ".", "path", ".", "dirname", "(", "path", ")", ")" ]
Return the directory component of ``hdfs_path``.
[ "Return", "the", "directory", "component", "of", "hdfs_path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/path.py#L296-L301
7,958
crs4/pydoop
pydoop/hdfs/path.py
expanduser
def expanduser(path): """ Replace initial ``~`` or ``~user`` with the user's home directory. **NOTE:** if the default file system is HDFS, the ``~user`` form is expanded regardless of the user's existence. """ if hdfs_fs.default_is_local(): return os.path.expanduser(path) m = re.mat...
python
def expanduser(path): """ Replace initial ``~`` or ``~user`` with the user's home directory. **NOTE:** if the default file system is HDFS, the ``~user`` form is expanded regardless of the user's existence. """ if hdfs_fs.default_is_local(): return os.path.expanduser(path) m = re.mat...
[ "def", "expanduser", "(", "path", ")", ":", "if", "hdfs_fs", ".", "default_is_local", "(", ")", ":", "return", "os", ".", "path", ".", "expanduser", "(", "path", ")", "m", "=", "re", ".", "match", "(", "r'^~([^/]*)'", ",", "path", ")", "if", "m", "...
Replace initial ``~`` or ``~user`` with the user's home directory. **NOTE:** if the default file system is HDFS, the ``~user`` form is expanded regardless of the user's existence.
[ "Replace", "initial", "~", "or", "~user", "with", "the", "user", "s", "home", "directory", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/path.py#L355-L368
7,959
crs4/pydoop
pydoop/hdfs/path.py
normpath
def normpath(path): """ Normalize ``path``, collapsing redundant separators and up-level refs. """ scheme, netloc, path_ = parse(path) return unparse(scheme, netloc, os.path.normpath(path_))
python
def normpath(path): """ Normalize ``path``, collapsing redundant separators and up-level refs. """ scheme, netloc, path_ = parse(path) return unparse(scheme, netloc, os.path.normpath(path_))
[ "def", "normpath", "(", "path", ")", ":", "scheme", ",", "netloc", ",", "path_", "=", "parse", "(", "path", ")", "return", "unparse", "(", "scheme", ",", "netloc", ",", "os", ".", "path", ".", "normpath", "(", "path_", ")", ")" ]
Normalize ``path``, collapsing redundant separators and up-level refs.
[ "Normalize", "path", "collapsing", "redundant", "separators", "and", "up", "-", "level", "refs", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/path.py#L480-L485
7,960
crs4/pydoop
pydoop/hdfs/path.py
realpath
def realpath(path): """ Return ``path`` with symlinks resolved. Currently this function returns non-local paths unchanged. """ scheme, netloc, path_ = parse(path) if scheme == 'file' or hdfs_fs.default_is_local(): return unparse(scheme, netloc, os.path.realpath(path_)) return path
python
def realpath(path): """ Return ``path`` with symlinks resolved. Currently this function returns non-local paths unchanged. """ scheme, netloc, path_ = parse(path) if scheme == 'file' or hdfs_fs.default_is_local(): return unparse(scheme, netloc, os.path.realpath(path_)) return path
[ "def", "realpath", "(", "path", ")", ":", "scheme", ",", "netloc", ",", "path_", "=", "parse", "(", "path", ")", "if", "scheme", "==", "'file'", "or", "hdfs_fs", ".", "default_is_local", "(", ")", ":", "return", "unparse", "(", "scheme", ",", "netloc",...
Return ``path`` with symlinks resolved. Currently this function returns non-local paths unchanged.
[ "Return", "path", "with", "symlinks", "resolved", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/path.py#L488-L497
7,961
crs4/pydoop
pydoop/hdfs/fs.py
default_is_local
def default_is_local(hadoop_conf=None, hadoop_home=None): """\ Is Hadoop configured to use the local file system? By default, it is. A DFS must be explicitly configured. """ params = pydoop.hadoop_params(hadoop_conf, hadoop_home) for k in 'fs.defaultFS', 'fs.default.name': if not params...
python
def default_is_local(hadoop_conf=None, hadoop_home=None): """\ Is Hadoop configured to use the local file system? By default, it is. A DFS must be explicitly configured. """ params = pydoop.hadoop_params(hadoop_conf, hadoop_home) for k in 'fs.defaultFS', 'fs.default.name': if not params...
[ "def", "default_is_local", "(", "hadoop_conf", "=", "None", ",", "hadoop_home", "=", "None", ")", ":", "params", "=", "pydoop", ".", "hadoop_params", "(", "hadoop_conf", ",", "hadoop_home", ")", "for", "k", "in", "'fs.defaultFS'", ",", "'fs.default.name'", ":"...
\ Is Hadoop configured to use the local file system? By default, it is. A DFS must be explicitly configured.
[ "\\", "Is", "Hadoop", "configured", "to", "use", "the", "local", "file", "system?" ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L93-L103
7,962
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.open_file
def open_file(self, path, mode="r", buff_size=0, replication=0, blocksize=0, encoding=None, errors=None): """ Open an HDFS file. Supported opening modes are "r", "w", "a". In addition, a ...
python
def open_file(self, path, mode="r", buff_size=0, replication=0, blocksize=0, encoding=None, errors=None): """ Open an HDFS file. Supported opening modes are "r", "w", "a". In addition, a ...
[ "def", "open_file", "(", "self", ",", "path", ",", "mode", "=", "\"r\"", ",", "buff_size", "=", "0", ",", "replication", "=", "0", ",", "blocksize", "=", "0", ",", "encoding", "=", "None", ",", "errors", "=", "None", ")", ":", "_complain_ifclosed", "...
Open an HDFS file. Supported opening modes are "r", "w", "a". In addition, a trailing "t" can be added to specify text mode (e.g., "rt" = open for reading text). Pass 0 as ``buff_size``, ``replication`` or ``blocksize`` if you want to use the "configured" values, i.e., the ones...
[ "Open", "an", "HDFS", "file", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L235-L280
7,963
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.capacity
def capacity(self): """ Return the raw capacity of the filesystem. :rtype: int :return: filesystem capacity """ _complain_ifclosed(self.closed) if not self.__status.host: raise RuntimeError('Capacity is not defined for a local fs') return self...
python
def capacity(self): """ Return the raw capacity of the filesystem. :rtype: int :return: filesystem capacity """ _complain_ifclosed(self.closed) if not self.__status.host: raise RuntimeError('Capacity is not defined for a local fs') return self...
[ "def", "capacity", "(", "self", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "if", "not", "self", ".", "__status", ".", "host", ":", "raise", "RuntimeError", "(", "'Capacity is not defined for a local fs'", ")", "return", "self", ".", "fs",...
Return the raw capacity of the filesystem. :rtype: int :return: filesystem capacity
[ "Return", "the", "raw", "capacity", "of", "the", "filesystem", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L282-L292
7,964
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.copy
def copy(self, from_path, to_hdfs, to_path): """ Copy file from one filesystem to another. :type from_path: str :param from_path: the path of the source file :type to_hdfs: :class:`hdfs` :param to_hdfs: destination filesystem :type to_path: str :param to_...
python
def copy(self, from_path, to_hdfs, to_path): """ Copy file from one filesystem to another. :type from_path: str :param from_path: the path of the source file :type to_hdfs: :class:`hdfs` :param to_hdfs: destination filesystem :type to_path: str :param to_...
[ "def", "copy", "(", "self", ",", "from_path", ",", "to_hdfs", ",", "to_path", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "if", "isinstance", "(", "to_hdfs", ",", "self", ".", "__class__", ")", ":", "to_hdfs", "=", "to_hdfs", ".", ...
Copy file from one filesystem to another. :type from_path: str :param from_path: the path of the source file :type to_hdfs: :class:`hdfs` :param to_hdfs: destination filesystem :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~ex...
[ "Copy", "file", "from", "one", "filesystem", "to", "another", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L294-L309
7,965
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.delete
def delete(self, path, recursive=True): """ Delete ``path``. :type path: str :param path: the path of the file or directory :type recursive: bool :param recursive: if ``path`` is a directory, delete it recursively when :obj:`True` :raises: :exc:`~except...
python
def delete(self, path, recursive=True): """ Delete ``path``. :type path: str :param path: the path of the file or directory :type recursive: bool :param recursive: if ``path`` is a directory, delete it recursively when :obj:`True` :raises: :exc:`~except...
[ "def", "delete", "(", "self", ",", "path", ",", "recursive", "=", "True", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "delete", "(", "path", ",", "recursive", ")" ]
Delete ``path``. :type path: str :param path: the path of the file or directory :type recursive: bool :param recursive: if ``path`` is a directory, delete it recursively when :obj:`True` :raises: :exc:`~exceptions.IOError` when ``recursive`` is :obj:`False` a...
[ "Delete", "path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L333-L346
7,966
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.exists
def exists(self, path): """ Check if a given path exists on the filesystem. :type path: str :param path: the path to look for :rtype: bool :return: :obj:`True` if ``path`` exists """ _complain_ifclosed(self.closed) return self.fs.exists(path)
python
def exists(self, path): """ Check if a given path exists on the filesystem. :type path: str :param path: the path to look for :rtype: bool :return: :obj:`True` if ``path`` exists """ _complain_ifclosed(self.closed) return self.fs.exists(path)
[ "def", "exists", "(", "self", ",", "path", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "exists", "(", "path", ")" ]
Check if a given path exists on the filesystem. :type path: str :param path: the path to look for :rtype: bool :return: :obj:`True` if ``path`` exists
[ "Check", "if", "a", "given", "path", "exists", "on", "the", "filesystem", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L348-L358
7,967
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.get_path_info
def get_path_info(self, path): """ Get information about ``path`` as a dict of properties. The return value, based upon ``fs.FileStatus`` from the Java API, has the following fields: * ``block_size``: HDFS block size of ``path`` * ``group``: group associated with ``path...
python
def get_path_info(self, path): """ Get information about ``path`` as a dict of properties. The return value, based upon ``fs.FileStatus`` from the Java API, has the following fields: * ``block_size``: HDFS block size of ``path`` * ``group``: group associated with ``path...
[ "def", "get_path_info", "(", "self", ",", "path", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "get_path_info", "(", "path", ")" ]
Get information about ``path`` as a dict of properties. The return value, based upon ``fs.FileStatus`` from the Java API, has the following fields: * ``block_size``: HDFS block size of ``path`` * ``group``: group associated with ``path`` * ``kind``: ``'file'`` or ``'directory'`...
[ "Get", "information", "about", "path", "as", "a", "dict", "of", "properties", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L378-L403
7,968
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.list_directory
def list_directory(self, path): r""" Get list of files and directories for ``path``\ . :type path: str :param path: the path of the directory :rtype: list :return: list of files and directories in ``path`` :raises: :exc:`~exceptions.IOError` """ _...
python
def list_directory(self, path): r""" Get list of files and directories for ``path``\ . :type path: str :param path: the path of the directory :rtype: list :return: list of files and directories in ``path`` :raises: :exc:`~exceptions.IOError` """ _...
[ "def", "list_directory", "(", "self", ",", "path", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "list_directory", "(", "path", ")" ]
r""" Get list of files and directories for ``path``\ . :type path: str :param path: the path of the directory :rtype: list :return: list of files and directories in ``path`` :raises: :exc:`~exceptions.IOError`
[ "r", "Get", "list", "of", "files", "and", "directories", "for", "path", "\\", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L405-L416
7,969
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.rename
def rename(self, from_path, to_path): """ Rename file. :type from_path: str :param from_path: the path of the source file :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~exceptions.IOError` """ _complain_ifclose...
python
def rename(self, from_path, to_path): """ Rename file. :type from_path: str :param from_path: the path of the source file :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~exceptions.IOError` """ _complain_ifclose...
[ "def", "rename", "(", "self", ",", "from_path", ",", "to_path", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "rename", "(", "from_path", ",", "to_path", ")" ]
Rename file. :type from_path: str :param from_path: the path of the source file :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~exceptions.IOError`
[ "Rename", "file", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L435-L446
7,970
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.set_replication
def set_replication(self, path, replication): r""" Set the replication of ``path`` to ``replication``\ . :type path: str :param path: the path of the file :type replication: int :param replication: the replication value :raises: :exc:`~exceptions.IOError` ...
python
def set_replication(self, path, replication): r""" Set the replication of ``path`` to ``replication``\ . :type path: str :param path: the path of the file :type replication: int :param replication: the replication value :raises: :exc:`~exceptions.IOError` ...
[ "def", "set_replication", "(", "self", ",", "path", ",", "replication", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "set_replication", "(", "path", ",", "replication", ")" ]
r""" Set the replication of ``path`` to ``replication``\ . :type path: str :param path: the path of the file :type replication: int :param replication: the replication value :raises: :exc:`~exceptions.IOError`
[ "r", "Set", "the", "replication", "of", "path", "to", "replication", "\\", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L448-L459
7,971
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.set_working_directory
def set_working_directory(self, path): r""" Set the working directory to ``path``\ . All relative paths will be resolved relative to it. :type path: str :param path: the path of the directory :raises: :exc:`~exceptions.IOError` """ _complain_ifclosed(self...
python
def set_working_directory(self, path): r""" Set the working directory to ``path``\ . All relative paths will be resolved relative to it. :type path: str :param path: the path of the directory :raises: :exc:`~exceptions.IOError` """ _complain_ifclosed(self...
[ "def", "set_working_directory", "(", "self", ",", "path", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "set_working_directory", "(", "path", ")" ]
r""" Set the working directory to ``path``\ . All relative paths will be resolved relative to it. :type path: str :param path: the path of the directory :raises: :exc:`~exceptions.IOError`
[ "r", "Set", "the", "working", "directory", "to", "path", "\\", ".", "All", "relative", "paths", "will", "be", "resolved", "relative", "to", "it", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L461-L471
7,972
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.working_directory
def working_directory(self): """ Get the current working directory. :rtype: str :return: current working directory """ _complain_ifclosed(self.closed) wd = self.fs.get_working_directory() return wd
python
def working_directory(self): """ Get the current working directory. :rtype: str :return: current working directory """ _complain_ifclosed(self.closed) wd = self.fs.get_working_directory() return wd
[ "def", "working_directory", "(", "self", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "wd", "=", "self", ".", "fs", ".", "get_working_directory", "(", ")", "return", "wd" ]
Get the current working directory. :rtype: str :return: current working directory
[ "Get", "the", "current", "working", "directory", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L483-L492
7,973
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.__compute_mode_from_string
def __compute_mode_from_string(self, path, mode_string): """ Scan a unix-style mode string and apply it to ``path``. :type mode_string: str :param mode_string: see ``man chmod`` for details. ``X``, ``s`` and ``t`` modes are not supported. The string should match the ...
python
def __compute_mode_from_string(self, path, mode_string): """ Scan a unix-style mode string and apply it to ``path``. :type mode_string: str :param mode_string: see ``man chmod`` for details. ``X``, ``s`` and ``t`` modes are not supported. The string should match the ...
[ "def", "__compute_mode_from_string", "(", "self", ",", "path", ",", "mode_string", ")", ":", "Char_to_perm_byte", "=", "{", "'r'", ":", "4", ",", "'w'", ":", "2", ",", "'x'", ":", "1", "}", "Fields", "=", "(", "(", "'u'", ",", "6", ")", ",", "(", ...
Scan a unix-style mode string and apply it to ``path``. :type mode_string: str :param mode_string: see ``man chmod`` for details. ``X``, ``s`` and ``t`` modes are not supported. The string should match the following regular expression: ``[ugoa]*[-+=]([rwx]*)``. :rtype: int ...
[ "Scan", "a", "unix", "-", "style", "mode", "string", "and", "apply", "it", "to", "path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L515-L576
7,974
crs4/pydoop
pydoop/hdfs/fs.py
hdfs.utime
def utime(self, path, mtime, atime): """ Change file last access and modification times. :type path: str :param path: the path to the file or directory :type mtime: int :param mtime: new modification time in seconds :type atime: int :param atime: new acce...
python
def utime(self, path, mtime, atime): """ Change file last access and modification times. :type path: str :param path: the path to the file or directory :type mtime: int :param mtime: new modification time in seconds :type atime: int :param atime: new acce...
[ "def", "utime", "(", "self", ",", "path", ",", "mtime", ",", "atime", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "fs", ".", "utime", "(", "path", ",", "int", "(", "mtime", ")", ",", "int", "(", "atime", ...
Change file last access and modification times. :type path: str :param path: the path to the file or directory :type mtime: int :param mtime: new modification time in seconds :type atime: int :param atime: new access time in seconds :raises: :exc:`~exceptions.IOE...
[ "Change", "file", "last", "access", "and", "modification", "times", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/fs.py#L595-L608
7,975
crs4/pydoop
setup.py
rm_rf
def rm_rf(path, dry_run=False): """ Remove a file or directory tree. Won't throw an exception, even if the removal fails. """ log.info("removing %s" % path) if dry_run: return try: if os.path.isdir(path) and not os.path.islink(path): shutil.rmtree(path) e...
python
def rm_rf(path, dry_run=False): """ Remove a file or directory tree. Won't throw an exception, even if the removal fails. """ log.info("removing %s" % path) if dry_run: return try: if os.path.isdir(path) and not os.path.islink(path): shutil.rmtree(path) e...
[ "def", "rm_rf", "(", "path", ",", "dry_run", "=", "False", ")", ":", "log", ".", "info", "(", "\"removing %s\"", "%", "path", ")", "if", "dry_run", ":", "return", "try", ":", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", "and", "not", ...
Remove a file or directory tree. Won't throw an exception, even if the removal fails.
[ "Remove", "a", "file", "or", "directory", "tree", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/setup.py#L93-L108
7,976
crs4/pydoop
setup.py
BuildPydoopExt.__finalize_hdfs
def __finalize_hdfs(self, ext): """\ Adds a few bits that depend on the specific environment. Delaying this until the build_ext phase allows non-build commands (e.g., sdist) to be run without java. """ java_home = jvm.get_java_home() jvm_lib_path, _ = jvm.get_jvm...
python
def __finalize_hdfs(self, ext): """\ Adds a few bits that depend on the specific environment. Delaying this until the build_ext phase allows non-build commands (e.g., sdist) to be run without java. """ java_home = jvm.get_java_home() jvm_lib_path, _ = jvm.get_jvm...
[ "def", "__finalize_hdfs", "(", "self", ",", "ext", ")", ":", "java_home", "=", "jvm", ".", "get_java_home", "(", ")", "jvm_lib_path", ",", "_", "=", "jvm", ".", "get_jvm_lib_path_and_name", "(", "java_home", ")", "ext", ".", "include_dirs", "=", "jvm", "."...
\ Adds a few bits that depend on the specific environment. Delaying this until the build_ext phase allows non-build commands (e.g., sdist) to be run without java.
[ "\\", "Adds", "a", "few", "bits", "that", "depend", "on", "the", "specific", "environment", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/setup.py#L286-L306
7,977
crs4/pydoop
pydoop/hadut.py
run_tool_cmd
def run_tool_cmd(tool, cmd, args=None, properties=None, hadoop_conf_dir=None, logger=None, keep_streams=True): """ Run a Hadoop command. If ``keep_streams`` is set to :obj:`True` (the default), the stdout and stderr of the command will be buffered in memory. If the command succeed...
python
def run_tool_cmd(tool, cmd, args=None, properties=None, hadoop_conf_dir=None, logger=None, keep_streams=True): """ Run a Hadoop command. If ``keep_streams`` is set to :obj:`True` (the default), the stdout and stderr of the command will be buffered in memory. If the command succeed...
[ "def", "run_tool_cmd", "(", "tool", ",", "cmd", ",", "args", "=", "None", ",", "properties", "=", "None", ",", "hadoop_conf_dir", "=", "None", ",", "logger", "=", "None", ",", "keep_streams", "=", "True", ")", ":", "if", "logger", "is", "None", ":", ...
Run a Hadoop command. If ``keep_streams`` is set to :obj:`True` (the default), the stdout and stderr of the command will be buffered in memory. If the command succeeds, the former will be returned; if it fails, a ``RunCmdError`` will be raised with the latter as the message. This mode is appropria...
[ "Run", "a", "Hadoop", "command", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L118-L175
7,978
crs4/pydoop
pydoop/hadut.py
get_task_trackers
def get_task_trackers(properties=None, hadoop_conf_dir=None, offline=False): """ Get the list of task trackers in the Hadoop cluster. Each element in the returned list is in the ``(host, port)`` format. All arguments are passed to :func:`run_class`. If ``offline`` is :obj:`True`, try getting the l...
python
def get_task_trackers(properties=None, hadoop_conf_dir=None, offline=False): """ Get the list of task trackers in the Hadoop cluster. Each element in the returned list is in the ``(host, port)`` format. All arguments are passed to :func:`run_class`. If ``offline`` is :obj:`True`, try getting the l...
[ "def", "get_task_trackers", "(", "properties", "=", "None", ",", "hadoop_conf_dir", "=", "None", ",", "offline", "=", "False", ")", ":", "if", "offline", ":", "if", "not", "hadoop_conf_dir", ":", "hadoop_conf_dir", "=", "pydoop", ".", "hadoop_conf", "(", ")"...
Get the list of task trackers in the Hadoop cluster. Each element in the returned list is in the ``(host, port)`` format. All arguments are passed to :func:`run_class`. If ``offline`` is :obj:`True`, try getting the list of task trackers from the ``slaves`` file in Hadoop's configuration directory (no...
[ "Get", "the", "list", "of", "task", "trackers", "in", "the", "Hadoop", "cluster", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L194-L227
7,979
crs4/pydoop
pydoop/hadut.py
get_num_nodes
def get_num_nodes(properties=None, hadoop_conf_dir=None, offline=False): """ Get the number of task trackers in the Hadoop cluster. All arguments are passed to :func:`get_task_trackers`. """ return len(get_task_trackers(properties, hadoop_conf_dir, offline))
python
def get_num_nodes(properties=None, hadoop_conf_dir=None, offline=False): """ Get the number of task trackers in the Hadoop cluster. All arguments are passed to :func:`get_task_trackers`. """ return len(get_task_trackers(properties, hadoop_conf_dir, offline))
[ "def", "get_num_nodes", "(", "properties", "=", "None", ",", "hadoop_conf_dir", "=", "None", ",", "offline", "=", "False", ")", ":", "return", "len", "(", "get_task_trackers", "(", "properties", ",", "hadoop_conf_dir", ",", "offline", ")", ")" ]
Get the number of task trackers in the Hadoop cluster. All arguments are passed to :func:`get_task_trackers`.
[ "Get", "the", "number", "of", "task", "trackers", "in", "the", "Hadoop", "cluster", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L230-L236
7,980
crs4/pydoop
pydoop/hadut.py
dfs
def dfs(args=None, properties=None, hadoop_conf_dir=None): """ Run the Hadoop file system shell. All arguments are passed to :func:`run_class`. """ # run FsShell directly (avoids "hadoop dfs" deprecation) return run_class( "org.apache.hadoop.fs.FsShell", args, properties, hadoop...
python
def dfs(args=None, properties=None, hadoop_conf_dir=None): """ Run the Hadoop file system shell. All arguments are passed to :func:`run_class`. """ # run FsShell directly (avoids "hadoop dfs" deprecation) return run_class( "org.apache.hadoop.fs.FsShell", args, properties, hadoop...
[ "def", "dfs", "(", "args", "=", "None", ",", "properties", "=", "None", ",", "hadoop_conf_dir", "=", "None", ")", ":", "# run FsShell directly (avoids \"hadoop dfs\" deprecation)", "return", "run_class", "(", "\"org.apache.hadoop.fs.FsShell\"", ",", "args", ",", "prop...
Run the Hadoop file system shell. All arguments are passed to :func:`run_class`.
[ "Run", "the", "Hadoop", "file", "system", "shell", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L239-L249
7,981
crs4/pydoop
pydoop/hadut.py
run_pipes
def run_pipes(executable, input_path, output_path, more_args=None, properties=None, force_pydoop_submitter=False, hadoop_conf_dir=None, logger=None, keep_streams=False): """ Run a pipes command. ``more_args`` (after setting input/output path) and ``properties`` are passed to...
python
def run_pipes(executable, input_path, output_path, more_args=None, properties=None, force_pydoop_submitter=False, hadoop_conf_dir=None, logger=None, keep_streams=False): """ Run a pipes command. ``more_args`` (after setting input/output path) and ``properties`` are passed to...
[ "def", "run_pipes", "(", "executable", ",", "input_path", ",", "output_path", ",", "more_args", "=", "None", ",", "properties", "=", "None", ",", "force_pydoop_submitter", "=", "False", ",", "hadoop_conf_dir", "=", "None", ",", "logger", "=", "None", ",", "k...
Run a pipes command. ``more_args`` (after setting input/output path) and ``properties`` are passed to :func:`run_cmd`. If not specified otherwise, this function sets the properties ``mapreduce.pipes.isjavarecordreader`` and ``mapreduce.pipes.isjavarecordwriter`` to ``"true"``. This function w...
[ "Run", "a", "pipes", "command", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L338-L395
7,982
crs4/pydoop
pydoop/hadut.py
collect_output
def collect_output(mr_out_dir, out_file=None): """ Return all mapreduce output in ``mr_out_dir``. Append the output to ``out_file`` if provided. Otherwise, return the result as a single string (it is the caller's responsibility to ensure that the amount of data retrieved fits into memory). """...
python
def collect_output(mr_out_dir, out_file=None): """ Return all mapreduce output in ``mr_out_dir``. Append the output to ``out_file`` if provided. Otherwise, return the result as a single string (it is the caller's responsibility to ensure that the amount of data retrieved fits into memory). """...
[ "def", "collect_output", "(", "mr_out_dir", ",", "out_file", "=", "None", ")", ":", "if", "out_file", "is", "None", ":", "output", "=", "[", "]", "for", "fn", "in", "iter_mr_out_files", "(", "mr_out_dir", ")", ":", "with", "hdfs", ".", "open", "(", "fn...
Return all mapreduce output in ``mr_out_dir``. Append the output to ``out_file`` if provided. Otherwise, return the result as a single string (it is the caller's responsibility to ensure that the amount of data retrieved fits into memory).
[ "Return", "all", "mapreduce", "output", "in", "mr_out_dir", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L425-L447
7,983
crs4/pydoop
pydoop/hadut.py
PipesRunner.set_output
def set_output(self, output): """ Set the output path for the job. Optional if the runner has been instantiated with a prefix. """ self.output = output self.logger.info("assigning output to %s", self.output)
python
def set_output(self, output): """ Set the output path for the job. Optional if the runner has been instantiated with a prefix. """ self.output = output self.logger.info("assigning output to %s", self.output)
[ "def", "set_output", "(", "self", ",", "output", ")", ":", "self", ".", "output", "=", "output", "self", ".", "logger", ".", "info", "(", "\"assigning output to %s\"", ",", "self", ".", "output", ")" ]
Set the output path for the job. Optional if the runner has been instantiated with a prefix.
[ "Set", "the", "output", "path", "for", "the", "job", ".", "Optional", "if", "the", "runner", "has", "been", "instantiated", "with", "a", "prefix", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L504-L510
7,984
crs4/pydoop
pydoop/hadut.py
PipesRunner.set_exe
def set_exe(self, pipes_code): """ Dump launcher code to the distributed file system. """ if not self.output: raise RuntimeError("no output directory, can't create launcher") parent = hdfs.path.dirname(hdfs.path.abspath(self.output.rstrip("/"))) self.exe = hdf...
python
def set_exe(self, pipes_code): """ Dump launcher code to the distributed file system. """ if not self.output: raise RuntimeError("no output directory, can't create launcher") parent = hdfs.path.dirname(hdfs.path.abspath(self.output.rstrip("/"))) self.exe = hdf...
[ "def", "set_exe", "(", "self", ",", "pipes_code", ")", ":", "if", "not", "self", ".", "output", ":", "raise", "RuntimeError", "(", "\"no output directory, can't create launcher\"", ")", "parent", "=", "hdfs", ".", "path", ".", "dirname", "(", "hdfs", ".", "p...
Dump launcher code to the distributed file system.
[ "Dump", "launcher", "code", "to", "the", "distributed", "file", "system", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hadut.py#L512-L520
7,985
crs4/pydoop
pydoop/hdfs/__init__.py
dump
def dump(data, hdfs_path, **kwargs): """\ Write ``data`` to ``hdfs_path``. Keyword arguments are passed to :func:`open`, except for ``mode``, which is forced to ``"w"`` (or ``"wt"`` for text data). """ kwargs["mode"] = "w" if isinstance(data, bintype) else "wt" with open(hdfs_path, **kwargs...
python
def dump(data, hdfs_path, **kwargs): """\ Write ``data`` to ``hdfs_path``. Keyword arguments are passed to :func:`open`, except for ``mode``, which is forced to ``"w"`` (or ``"wt"`` for text data). """ kwargs["mode"] = "w" if isinstance(data, bintype) else "wt" with open(hdfs_path, **kwargs...
[ "def", "dump", "(", "data", ",", "hdfs_path", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "\"mode\"", "]", "=", "\"w\"", "if", "isinstance", "(", "data", ",", "bintype", ")", "else", "\"wt\"", "with", "open", "(", "hdfs_path", ",", "*", "*", ...
\ Write ``data`` to ``hdfs_path``. Keyword arguments are passed to :func:`open`, except for ``mode``, which is forced to ``"w"`` (or ``"wt"`` for text data).
[ "\\", "Write", "data", "to", "hdfs_path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L129-L143
7,986
crs4/pydoop
pydoop/hdfs/__init__.py
load
def load(hdfs_path, **kwargs): """\ Read the content of ``hdfs_path`` and return it. Keyword arguments are passed to :func:`open`. The `"mode"` kwarg must be readonly. """ m, _ = common.parse_mode(kwargs.get("mode", "r")) if m != "r": raise ValueError("opening mode must be readonly"...
python
def load(hdfs_path, **kwargs): """\ Read the content of ``hdfs_path`` and return it. Keyword arguments are passed to :func:`open`. The `"mode"` kwarg must be readonly. """ m, _ = common.parse_mode(kwargs.get("mode", "r")) if m != "r": raise ValueError("opening mode must be readonly"...
[ "def", "load", "(", "hdfs_path", ",", "*", "*", "kwargs", ")", ":", "m", ",", "_", "=", "common", ".", "parse_mode", "(", "kwargs", ".", "get", "(", "\"mode\"", ",", "\"r\"", ")", ")", "if", "m", "!=", "\"r\"", ":", "raise", "ValueError", "(", "\...
\ Read the content of ``hdfs_path`` and return it. Keyword arguments are passed to :func:`open`. The `"mode"` kwarg must be readonly.
[ "\\", "Read", "the", "content", "of", "hdfs_path", "and", "return", "it", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L146-L159
7,987
crs4/pydoop
pydoop/hdfs/__init__.py
cp
def cp(src_hdfs_path, dest_hdfs_path, **kwargs): """\ Copy the contents of ``src_hdfs_path`` to ``dest_hdfs_path``. If ``src_hdfs_path`` is a directory, its contents will be copied recursively. Source file(s) are opened for reading and copies are opened for writing. Additional keyword arguments, if...
python
def cp(src_hdfs_path, dest_hdfs_path, **kwargs): """\ Copy the contents of ``src_hdfs_path`` to ``dest_hdfs_path``. If ``src_hdfs_path`` is a directory, its contents will be copied recursively. Source file(s) are opened for reading and copies are opened for writing. Additional keyword arguments, if...
[ "def", "cp", "(", "src_hdfs_path", ",", "dest_hdfs_path", ",", "*", "*", "kwargs", ")", ":", "src", ",", "dest", "=", "{", "}", ",", "{", "}", "try", ":", "for", "d", ",", "p", "in", "(", "(", "src", ",", "src_hdfs_path", ")", ",", "(", "dest",...
\ Copy the contents of ``src_hdfs_path`` to ``dest_hdfs_path``. If ``src_hdfs_path`` is a directory, its contents will be copied recursively. Source file(s) are opened for reading and copies are opened for writing. Additional keyword arguments, if any, are handled like in :func:`open`.
[ "\\", "Copy", "the", "contents", "of", "src_hdfs_path", "to", "dest_hdfs_path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L177-L230
7,988
crs4/pydoop
pydoop/hdfs/__init__.py
put
def put(src_path, dest_hdfs_path, **kwargs): """\ Copy the contents of ``src_path`` to ``dest_hdfs_path``. ``src_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). The source file is opened for reading and the copy is opened for writing. Additional keyword arg...
python
def put(src_path, dest_hdfs_path, **kwargs): """\ Copy the contents of ``src_path`` to ``dest_hdfs_path``. ``src_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). The source file is opened for reading and the copy is opened for writing. Additional keyword arg...
[ "def", "put", "(", "src_path", ",", "dest_hdfs_path", ",", "*", "*", "kwargs", ")", ":", "cp", "(", "path", ".", "abspath", "(", "src_path", ",", "local", "=", "True", ")", ",", "dest_hdfs_path", ",", "*", "*", "kwargs", ")" ]
\ Copy the contents of ``src_path`` to ``dest_hdfs_path``. ``src_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). The source file is opened for reading and the copy is opened for writing. Additional keyword arguments, if any, are handled like in :func:`open`...
[ "\\", "Copy", "the", "contents", "of", "src_path", "to", "dest_hdfs_path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L233-L242
7,989
crs4/pydoop
pydoop/hdfs/__init__.py
get
def get(src_hdfs_path, dest_path, **kwargs): """\ Copy the contents of ``src_hdfs_path`` to ``dest_path``. ``dest_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). The source file is opened for reading and the copy is opened for writing. Additional keyword ...
python
def get(src_hdfs_path, dest_path, **kwargs): """\ Copy the contents of ``src_hdfs_path`` to ``dest_path``. ``dest_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). The source file is opened for reading and the copy is opened for writing. Additional keyword ...
[ "def", "get", "(", "src_hdfs_path", ",", "dest_path", ",", "*", "*", "kwargs", ")", ":", "cp", "(", "src_hdfs_path", ",", "path", ".", "abspath", "(", "dest_path", ",", "local", "=", "True", ")", ",", "*", "*", "kwargs", ")" ]
\ Copy the contents of ``src_hdfs_path`` to ``dest_path``. ``dest_path`` is forced to be interpreted as an ordinary local path (see :func:`~path.abspath`). The source file is opened for reading and the copy is opened for writing. Additional keyword arguments, if any, are handled like in :func:`open...
[ "\\", "Copy", "the", "contents", "of", "src_hdfs_path", "to", "dest_path", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L245-L254
7,990
crs4/pydoop
pydoop/hdfs/__init__.py
mkdir
def mkdir(hdfs_path, user=None): """ Create a directory and its parents as needed. """ host, port, path_ = path.split(hdfs_path, user) fs = hdfs(host, port, user) retval = fs.create_directory(path_) fs.close() return retval
python
def mkdir(hdfs_path, user=None): """ Create a directory and its parents as needed. """ host, port, path_ = path.split(hdfs_path, user) fs = hdfs(host, port, user) retval = fs.create_directory(path_) fs.close() return retval
[ "def", "mkdir", "(", "hdfs_path", ",", "user", "=", "None", ")", ":", "host", ",", "port", ",", "path_", "=", "path", ".", "split", "(", "hdfs_path", ",", "user", ")", "fs", "=", "hdfs", "(", "host", ",", "port", ",", "user", ")", "retval", "=", ...
Create a directory and its parents as needed.
[ "Create", "a", "directory", "and", "its", "parents", "as", "needed", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L257-L265
7,991
crs4/pydoop
pydoop/hdfs/__init__.py
lsl
def lsl(hdfs_path, user=None, recursive=False): """ Return a list of dictionaries of file properties. If ``hdfs_path`` is a file, there is only one item corresponding to the file itself; if it is a directory and ``recursive`` is :obj:`False`, each list item corresponds to a file or directory co...
python
def lsl(hdfs_path, user=None, recursive=False): """ Return a list of dictionaries of file properties. If ``hdfs_path`` is a file, there is only one item corresponding to the file itself; if it is a directory and ``recursive`` is :obj:`False`, each list item corresponds to a file or directory co...
[ "def", "lsl", "(", "hdfs_path", ",", "user", "=", "None", ",", "recursive", "=", "False", ")", ":", "host", ",", "port", ",", "path_", "=", "path", ".", "split", "(", "hdfs_path", ",", "user", ")", "fs", "=", "hdfs", "(", "host", ",", "port", ","...
Return a list of dictionaries of file properties. If ``hdfs_path`` is a file, there is only one item corresponding to the file itself; if it is a directory and ``recursive`` is :obj:`False`, each list item corresponds to a file or directory contained by it; if it is a directory and ``recursive`` is ...
[ "Return", "a", "list", "of", "dictionaries", "of", "file", "properties", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L287-L310
7,992
crs4/pydoop
pydoop/hdfs/__init__.py
ls
def ls(hdfs_path, user=None, recursive=False): """ Return a list of hdfs paths. Works in the same way as :func:`lsl`, except for the fact that list items are hdfs paths instead of dictionaries of properties. """ dir_list = lsl(hdfs_path, user, recursive) return [d["name"] for d in dir_list]
python
def ls(hdfs_path, user=None, recursive=False): """ Return a list of hdfs paths. Works in the same way as :func:`lsl`, except for the fact that list items are hdfs paths instead of dictionaries of properties. """ dir_list = lsl(hdfs_path, user, recursive) return [d["name"] for d in dir_list]
[ "def", "ls", "(", "hdfs_path", ",", "user", "=", "None", ",", "recursive", "=", "False", ")", ":", "dir_list", "=", "lsl", "(", "hdfs_path", ",", "user", ",", "recursive", ")", "return", "[", "d", "[", "\"name\"", "]", "for", "d", "in", "dir_list", ...
Return a list of hdfs paths. Works in the same way as :func:`lsl`, except for the fact that list items are hdfs paths instead of dictionaries of properties.
[ "Return", "a", "list", "of", "hdfs", "paths", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L313-L321
7,993
crs4/pydoop
pydoop/hdfs/__init__.py
move
def move(src, dest, user=None): """ Move or rename src to dest. """ src_host, src_port, src_path = path.split(src, user) dest_host, dest_port, dest_path = path.split(dest, user) src_fs = hdfs(src_host, src_port, user) dest_fs = hdfs(dest_host, dest_port, user) try: retval = src_f...
python
def move(src, dest, user=None): """ Move or rename src to dest. """ src_host, src_port, src_path = path.split(src, user) dest_host, dest_port, dest_path = path.split(dest, user) src_fs = hdfs(src_host, src_port, user) dest_fs = hdfs(dest_host, dest_port, user) try: retval = src_f...
[ "def", "move", "(", "src", ",", "dest", ",", "user", "=", "None", ")", ":", "src_host", ",", "src_port", ",", "src_path", "=", "path", ".", "split", "(", "src", ",", "user", ")", "dest_host", ",", "dest_port", ",", "dest_path", "=", "path", ".", "s...
Move or rename src to dest.
[ "Move", "or", "rename", "src", "to", "dest", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L340-L353
7,994
crs4/pydoop
pydoop/hdfs/__init__.py
renames
def renames(from_path, to_path, user=None): """ Rename ``from_path`` to ``to_path``, creating parents as needed. """ to_dir = path.dirname(to_path) if to_dir: mkdir(to_dir, user=user) rename(from_path, to_path, user=user)
python
def renames(from_path, to_path, user=None): """ Rename ``from_path`` to ``to_path``, creating parents as needed. """ to_dir = path.dirname(to_path) if to_dir: mkdir(to_dir, user=user) rename(from_path, to_path, user=user)
[ "def", "renames", "(", "from_path", ",", "to_path", ",", "user", "=", "None", ")", ":", "to_dir", "=", "path", ".", "dirname", "(", "to_path", ")", "if", "to_dir", ":", "mkdir", "(", "to_dir", ",", "user", "=", "user", ")", "rename", "(", "from_path"...
Rename ``from_path`` to ``to_path``, creating parents as needed.
[ "Rename", "from_path", "to", "to_path", "creating", "parents", "as", "needed", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L381-L388
7,995
crs4/pydoop
pydoop/hdfs/file.py
FileIO.readline
def readline(self): """ Read and return a line of text. :rtype: str :return: the next line of text in the file, including the newline character """ _complain_ifclosed(self.closed) line = self.f.readline() if self.__encoding: return l...
python
def readline(self): """ Read and return a line of text. :rtype: str :return: the next line of text in the file, including the newline character """ _complain_ifclosed(self.closed) line = self.f.readline() if self.__encoding: return l...
[ "def", "readline", "(", "self", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "line", "=", "self", ".", "f", ".", "readline", "(", ")", "if", "self", ".", "__encoding", ":", "return", "line", ".", "decode", "(", "self", ".", "__enc...
Read and return a line of text. :rtype: str :return: the next line of text in the file, including the newline character
[ "Read", "and", "return", "a", "line", "of", "text", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/file.py#L107-L120
7,996
crs4/pydoop
pydoop/hdfs/file.py
FileIO.pread
def pread(self, position, length): r""" Read ``length`` bytes of data from the file, starting from ``position``\ . :type position: int :param position: position from which to read :type length: int :param length: the number of bytes to read :rtype: string...
python
def pread(self, position, length): r""" Read ``length`` bytes of data from the file, starting from ``position``\ . :type position: int :param position: position from which to read :type length: int :param length: the number of bytes to read :rtype: string...
[ "def", "pread", "(", "self", ",", "position", ",", "length", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "if", "position", ">", "self", ".", "size", ":", "raise", "IOError", "(", "\"position cannot be past EOF\"", ")", "if", "length", ...
r""" Read ``length`` bytes of data from the file, starting from ``position``\ . :type position: int :param position: position from which to read :type length: int :param length: the number of bytes to read :rtype: string :return: the chunk of data read fr...
[ "r", "Read", "length", "bytes", "of", "data", "from", "the", "file", "starting", "from", "position", "\\", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/file.py#L165-L186
7,997
crs4/pydoop
pydoop/hdfs/file.py
FileIO.read
def read(self, length=-1): """ Read ``length`` bytes from the file. If ``length`` is negative or omitted, read all data until EOF. :type length: int :param length: the number of bytes to read :rtype: string :return: the chunk of data read from the file "...
python
def read(self, length=-1): """ Read ``length`` bytes from the file. If ``length`` is negative or omitted, read all data until EOF. :type length: int :param length: the number of bytes to read :rtype: string :return: the chunk of data read from the file "...
[ "def", "read", "(", "self", ",", "length", "=", "-", "1", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "# NOTE: libhdfs read stops at block boundaries: it is *essential*", "# to ensure that we actually read the required number of bytes.", "if", "length", ...
Read ``length`` bytes from the file. If ``length`` is negative or omitted, read all data until EOF. :type length: int :param length: the number of bytes to read :rtype: string :return: the chunk of data read from the file
[ "Read", "length", "bytes", "from", "the", "file", ".", "If", "length", "is", "negative", "or", "omitted", "read", "all", "data", "until", "EOF", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/file.py#L188-L216
7,998
crs4/pydoop
pydoop/hdfs/file.py
FileIO.seek
def seek(self, position, whence=os.SEEK_SET): """ Seek to ``position`` in file. :type position: int :param position: offset in bytes to seek to :type whence: int :param whence: defaults to ``os.SEEK_SET`` (absolute); other values are ``os.SEEK_CUR`` (relative t...
python
def seek(self, position, whence=os.SEEK_SET): """ Seek to ``position`` in file. :type position: int :param position: offset in bytes to seek to :type whence: int :param whence: defaults to ``os.SEEK_SET`` (absolute); other values are ``os.SEEK_CUR`` (relative t...
[ "def", "seek", "(", "self", ",", "position", ",", "whence", "=", "os", ".", "SEEK_SET", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "return", "self", ".", "f", ".", "seek", "(", "position", ",", "whence", ")" ]
Seek to ``position`` in file. :type position: int :param position: offset in bytes to seek to :type whence: int :param whence: defaults to ``os.SEEK_SET`` (absolute); other values are ``os.SEEK_CUR`` (relative to the current position) and ``os.SEEK_END`` (relative to...
[ "Seek", "to", "position", "in", "file", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/file.py#L218-L230
7,999
crs4/pydoop
pydoop/hdfs/file.py
FileIO.write
def write(self, data): """ Write ``data`` to the file. :type data: bytes :param data: the data to be written to the file :rtype: int :return: the number of bytes written """ _complain_ifclosed(self.closed) if self.__encoding: self.f.wr...
python
def write(self, data): """ Write ``data`` to the file. :type data: bytes :param data: the data to be written to the file :rtype: int :return: the number of bytes written """ _complain_ifclosed(self.closed) if self.__encoding: self.f.wr...
[ "def", "write", "(", "self", ",", "data", ")", ":", "_complain_ifclosed", "(", "self", ".", "closed", ")", "if", "self", ".", "__encoding", ":", "self", ".", "f", ".", "write", "(", "data", ".", "encode", "(", "self", ".", "__encoding", ",", "self", ...
Write ``data`` to the file. :type data: bytes :param data: the data to be written to the file :rtype: int :return: the number of bytes written
[ "Write", "data", "to", "the", "file", "." ]
f375be2a06f9c67eaae3ce6f605195dbca143b2b
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/file.py#L242-L256